Max level shown:
1 Downloading crates ...
2 Downloaded similar v2.2.1
3 Downloaded tokio-test v0.4.2
4 Downloaded quick-error v1.2.3
5 Downloaded newline-converter v0.3.0
6 Downloaded wait-timeout v0.2.0
7 Downloaded unarray v0.1.4
8 Downloaded tokio-stream v0.1.8
9 Downloaded structmeta v0.2.0
10 Downloaded httptest v0.15.4
11 Downloaded test-strategy v0.3.1
12 Downloaded subprocess v0.2.9
13 Downloaded convert_case v0.5.0
14 Downloaded bit-vec v0.6.3
15 Downloaded bit-set v0.5.3
16 Downloaded structmeta-derive v0.2.0
17 Downloaded regex-automata v0.1.10
18 Downloaded expectorate v1.0.7
19 Downloaded proptest v1.2.0
20 Downloaded bstr v0.2.17
21 Downloaded rand_xorshift v0.3.0
22 Downloaded rusty-fork v0.3.0
23 Compiling unicode-ident v1.0.11
24 Compiling cfg-if v1.0.0
25 Fresh autocfg v1.1.0
26 Fresh version_check v0.9.4
27 Running `rustc --crate-name unicode_ident --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-ident-1.0.11/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=81e7752fff89e70f -C extra-filename=-81e7752fff89e70f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
28 Running `rustc --crate-name cfg_if --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cfg-if-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a93276574f0edf39 -C extra-filename=-a93276574f0edf39 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
29 Compiling libc v0.2.147
30 Compiling once_cell v1.14.0
31 Running `rustc --crate-name libc /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libc-0.2.147/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="extra_traits"' --cfg 'feature="std"' -C metadata=a748caf4ceff51bd -C extra-filename=-a748caf4ceff51bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg freebsd11 --cfg libc_priv_mod_use --cfg libc_union --cfg libc_const_size_of --cfg libc_align --cfg libc_int128 --cfg libc_core_cvoid --cfg libc_packedN --cfg libc_cfg_target_vendor --cfg libc_non_exhaustive --cfg libc_long_array --cfg libc_ptr_addr_of --cfg libc_underscore_const_names --cfg libc_const_extern_fn`
32 Running `rustc --crate-name once_cell --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/once_cell-1.14.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="race"' --cfg 'feature="std"' --cfg 'feature="unstable"' -C metadata=fdb5f9e769d1e589 -C extra-filename=-fdb5f9e769d1e589 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
33 Compiling itoa v1.0.1
34 Running `rustc --crate-name itoa --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itoa-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f0781104e344570e -C extra-filename=-f0781104e344570e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
35 Compiling memchr v2.5.0
36 Compiling libm v0.2.6
37 Running `rustc --crate-name memchr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memchr-2.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9611c546f9b73cca -C extra-filename=-9611c546f9b73cca --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg memchr_runtime_simd --cfg memchr_runtime_sse2 --cfg memchr_runtime_sse42 --cfg memchr_runtime_avx`
38 Compiling scopeguard v1.1.0
39 Running `rustc --crate-name libm --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libm-0.2.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=e5a688f05412e317 -C extra-filename=-e5a688f05412e317 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
40 Running `rustc --crate-name scopeguard /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/scopeguard-1.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=00b7ece4eb7b8e7e -C extra-filename=-00b7ece4eb7b8e7e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
41 Compiling log v0.4.14
42 Running `rustc --crate-name log /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/log-0.4.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=930e200cffaa7930 -C extra-filename=-930e200cffaa7930 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow --cfg atomic_cas --cfg has_atomics`
43 Compiling proc-macro2 v1.0.63
44 Running `rustc --crate-name proc_macro2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proc-macro2-1.0.63/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=a0e1129b171da08c -C extra-filename=-a0e1129b171da08c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow --cfg wrap_proc_macro`
45 Compiling pin-project-lite v0.2.13
46 Running `rustc --crate-name pin_project_lite --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pin-project-lite-0.2.13/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c035e320730303c2 -C extra-filename=-c035e320730303c2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
47 Compiling smallvec v1.10.0
48 Running `rustc --crate-name smallvec --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/smallvec-1.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=397f26bd8c84e528 -C extra-filename=-397f26bd8c84e528 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
49 Compiling lock_api v0.4.6
50 Running `rustc --crate-name lock_api --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/lock_api-0.4.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4b01d37c549347e9 -C extra-filename=-4b01d37c549347e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern scopeguard=/work/oxidecomputer/crucible/target/debug/deps/libscopeguard-00b7ece4eb7b8e7e.rmeta --cap-lints allow`
51 Compiling ryu v1.0.9
52 Running `rustc --crate-name ryu --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ryu-1.0.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=062e5ac4087417b3 -C extra-filename=-062e5ac4087417b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
53 Compiling futures-core v0.3.28
54 Running `rustc --crate-name futures_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-core-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=46c6e3a1b3966417 -C extra-filename=-46c6e3a1b3966417 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
55 Compiling pin-utils v0.1.0
56 Running `rustc --crate-name pin_utils --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pin-utils-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bcfb754cd1ab8c67 -C extra-filename=-bcfb754cd1ab8c67 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
57 Fresh pkg-config v0.3.24
58 Compiling bitflags v1.3.2
59 Running `rustc --crate-name bitflags --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bitflags-1.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=b5bc15fb96af61fc -C extra-filename=-b5bc15fb96af61fc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
60 Compiling fnv v1.0.7
61 Running `rustc --crate-name fnv /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fnv-1.0.7/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=5641130f60a8056b -C extra-filename=-5641130f60a8056b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
62 Compiling futures-sink v0.3.28
63 Running `rustc --crate-name futures_sink --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-sink-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=a6d6ed8a846c5f8a -C extra-filename=-a6d6ed8a846c5f8a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
64 Compiling quote v1.0.29
65 Running `rustc --crate-name quote --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/quote-1.0.29/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=2ffbaa80dd156621 -C extra-filename=-2ffbaa80dd156621 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --cap-lints allow`
66 Compiling num-traits v0.2.16
67 Running `rustc --crate-name num_traits --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-traits-0.2.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="libm"' --cfg 'feature="std"' -C metadata=8e50de91aba3f8f9 -C extra-filename=-8e50de91aba3f8f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libm=/work/oxidecomputer/crucible/target/debug/deps/liblibm-e5a688f05412e317.rmeta --cap-lints allow --cfg has_to_int_unchecked --cfg has_reverse_bits --cfg has_leading_trailing_ones --cfg has_div_euclid --cfg has_copysign --cfg has_is_subnormal --cfg has_int_to_from_bytes --cfg has_float_to_from_bytes`
68 Compiling futures-task v0.3.28
69 Running `rustc --crate-name futures_task --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-task-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=12b58f257ddc96a4 -C extra-filename=-12b58f257ddc96a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
70 Compiling futures-channel v0.3.28
71 Running `rustc --crate-name futures_channel --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-channel-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="futures-sink"' --cfg 'feature="sink"' --cfg 'feature="std"' -C metadata=34a7a018f58dc5a7 -C extra-filename=-34a7a018f58dc5a7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --cap-lints allow`
72 Compiling syn v2.0.23
73 Running `rustc --crate-name syn --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/syn-2.0.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clone-impls"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="extra-traits"' --cfg 'feature="full"' --cfg 'feature="parsing"' --cfg 'feature="printing"' --cfg 'feature="proc-macro"' --cfg 'feature="quote"' --cfg 'feature="visit"' --cfg 'feature="visit-mut"' -C metadata=baedf68a9175a6da -C extra-filename=-baedf68a9175a6da --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
74 Compiling getrandom v0.2.5
75 Running `rustc --crate-name getrandom --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/getrandom-0.2.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=567199de146d617e -C extra-filename=-567199de146d617e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
76 Compiling syn v1.0.107
77 Running `rustc --crate-name syn --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/syn-1.0.107/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clone-impls"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="extra-traits"' --cfg 'feature="full"' --cfg 'feature="parsing"' --cfg 'feature="printing"' --cfg 'feature="proc-macro"' --cfg 'feature="quote"' --cfg 'feature="visit"' --cfg 'feature="visit-mut"' -C metadata=837f9a049f77ca38 -C extra-filename=-837f9a049f77ca38 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow --cfg syn_disable_nightly_tests`
78 Compiling jobserver v0.1.25
79 Running `rustc --crate-name jobserver --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/jobserver-0.1.25/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=0ee11fba78dd3235 -C extra-filename=-0ee11fba78dd3235 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
80 Compiling num_cpus v1.13.1
81 Running `rustc --crate-name num_cpus /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_cpus-1.13.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=67a451bebfcc5086 -C extra-filename=-67a451bebfcc5086 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
82 Compiling ahash v0.7.6
83 Running `rustc --crate-name ahash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c781b32f1fcd8d92 -C extra-filename=-c781b32f1fcd8d92 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow --cfg 'feature="runtime-rng"' --cfg 'feature="folded_multiply"'`
84 Compiling parking_lot_core v0.9.1
85 Running `rustc --crate-name parking_lot_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parking_lot_core-0.9.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f45d0642d52c20c1 -C extra-filename=-f45d0642d52c20c1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --cap-lints allow`
86 Compiling cc v1.0.73
87 Running `rustc --crate-name cc --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cc-1.0.73/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="jobserver"' --cfg 'feature="parallel"' -C metadata=2976d4b8f46fa671 -C extra-filename=-2976d4b8f46fa671 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern jobserver=/work/oxidecomputer/crucible/target/debug/deps/libjobserver-0ee11fba78dd3235.rmeta --cap-lints allow`
88 Compiling signal-hook-registry v1.4.0
89 Running `rustc --crate-name signal_hook_registry /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-registry-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4d955479f235827e -C extra-filename=-4d955479f235827e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
90 Compiling parking_lot v0.12.0
91 Running `rustc --crate-name parking_lot --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parking_lot-0.12.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="send_guard"' -C metadata=970d5c0acece447c -C extra-filename=-970d5c0acece447c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lock_api=/work/oxidecomputer/crucible/target/debug/deps/liblock_api-4b01d37c549347e9.rmeta --extern parking_lot_core=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot_core-f45d0642d52c20c1.rmeta --cap-lints allow`
92 Compiling hashbrown v0.12.3
93 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.12.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ahash"' --cfg 'feature="default"' --cfg 'feature="inline-more"' --cfg 'feature="raw"' -C metadata=3ad6614047c487f9 -C extra-filename=-3ad6614047c487f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ahash=/work/oxidecomputer/crucible/target/debug/deps/libahash-c781b32f1fcd8d92.rmeta --cap-lints allow`
94 Compiling rand_core v0.6.4
95 Running `rustc --crate-name rand_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.6.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="getrandom"' --cfg 'feature="std"' -C metadata=e2870cc0e01c33c9 -C extra-filename=-e2870cc0e01c33c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --cap-lints allow`
96 Compiling mio v0.8.8
97 Running `rustc --crate-name mio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mio-0.8.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="net"' --cfg 'feature="os-ext"' --cfg 'feature="os-poll"' -C metadata=27a8136cf12de2bb -C extra-filename=-27a8136cf12de2bb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --cap-lints allow`
98 Compiling percent-encoding v2.3.0
99 Running `rustc --crate-name percent_encoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/percent-encoding-2.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=0000aebce3d30803 -C extra-filename=-0000aebce3d30803 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
100 Compiling futures-io v0.3.28
101 Running `rustc --crate-name futures_io --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-io-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=bcbbef0c8c581d67 -C extra-filename=-bcbbef0c8c581d67 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
102 Compiling slab v0.4.5
103 Running `rustc --crate-name slab --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slab-0.4.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=5b7c79e345d6363e -C extra-filename=-5b7c79e345d6363e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
104 Compiling socket2 v0.5.3
105 Running `rustc --crate-name socket2 --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/socket2-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="all"' -C metadata=b464b617227db85e -C extra-filename=-b464b617227db85e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
106 Compiling time v0.1.44
107 Compiling iana-time-zone v0.1.47
108 Running `rustc --crate-name time /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-0.1.44/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fcbf6ea26d482f3a -C extra-filename=-fcbf6ea26d482f3a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
109 Running `rustc --crate-name iana_time_zone --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/iana-time-zone-0.1.47/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="fallback"' -C metadata=3051bc3a975f54c9 -C extra-filename=-3051bc3a975f54c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
110 Compiling tinyvec_macros v0.1.0
111 Running `rustc --crate-name tinyvec_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tinyvec_macros-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=71472bb1a287b427 -C extra-filename=-71472bb1a287b427 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
112 Compiling tinyvec v1.5.1
113 Running `rustc --crate-name tinyvec --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tinyvec-1.5.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="tinyvec_macros"' -C metadata=592495f429dcf8bd -C extra-filename=-592495f429dcf8bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern tinyvec_macros=/work/oxidecomputer/crucible/target/debug/deps/libtinyvec_macros-71472bb1a287b427.rmeta --cap-lints allow`
114 Compiling unicode-bidi v0.3.7
115 Running `rustc --crate-name unicode_bidi --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-bidi-0.3.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2c825f5beb05a037 -C extra-filename=-2c825f5beb05a037 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
116 Compiling tracing-core v0.1.30
117 Running `rustc --crate-name tracing_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-core-0.1.30/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="once_cell"' --cfg 'feature="std"' --cfg 'feature="valuable"' -C metadata=adac59f754126e83 -C extra-filename=-adac59f754126e83 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
118 Compiling ring v0.16.20
119 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ring-0.16.20/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="dev_urandom_fallback"' --cfg 'feature="once_cell"' -C metadata=9674c8d76dda18b5 -C extra-filename=-9674c8d76dda18b5 --out-dir /work/oxidecomputer/crucible/target/debug/build/ring-9674c8d76dda18b5 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --cap-lints allow`
120 Compiling form_urlencoded v1.2.0
121 Running `rustc --crate-name form_urlencoded --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/form_urlencoded-1.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=ef731295a29c9150 -C extra-filename=-ef731295a29c9150 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --cap-lints allow`
122 Compiling dyn-clone v1.0.5
123 Running `rustc --crate-name dyn_clone --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dyn-clone-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6ca330b50294836a -C extra-filename=-6ca330b50294836a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
124 Compiling untrusted v0.7.1
125 Running `rustc --crate-name untrusted --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/untrusted-0.7.1/src/untrusted.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4b93784238d33e58 -C extra-filename=-4b93784238d33e58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
126 Compiling matches v0.1.9
127 Running `rustc --crate-name matches /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/matches-0.1.9/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7dd63e8ea065bea3 -C extra-filename=-7dd63e8ea065bea3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
128 Compiling spin v0.5.2
129 Running `rustc --crate-name spin /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/spin-0.5.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bfb6115ad3135235 -C extra-filename=-bfb6115ad3135235 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
130 Compiling base64 v0.21.3
131 Running `rustc --crate-name base64 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64-0.21.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c139bdd129e780ac -C extra-filename=-c139bdd129e780ac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
132 Compiling unicode-normalization v0.1.19
133 Running `rustc --crate-name unicode_normalization --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-normalization-0.1.19/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=3df8261a03d4248e -C extra-filename=-3df8261a03d4248e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern tinyvec=/work/oxidecomputer/crucible/target/debug/deps/libtinyvec-592495f429dcf8bd.rmeta --cap-lints allow`
134 Compiling lazy_static v1.4.0
135 Running `rustc --crate-name lazy_static /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/lazy_static-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9c1a21fa7aad0259 -C extra-filename=-9c1a21fa7aad0259 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
136 Compiling heck v0.4.1
137 Running `rustc --crate-name heck --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/heck-0.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=29af75c938b110f7 -C extra-filename=-29af75c938b110f7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
138 Running `/work/oxidecomputer/crucible/target/debug/build/ring-9674c8d76dda18b5/build-script-build`
139 Compiling ppv-lite86 v0.2.16
140 Running `rustc --crate-name ppv_lite86 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ppv-lite86-0.2.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="simd"' --cfg 'feature="std"' -C metadata=a7fd0e8d54744928 -C extra-filename=-a7fd0e8d54744928 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
141 Compiling anyhow v1.0.71
142 Running `rustc --crate-name anyhow --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anyhow-1.0.71/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c23d7ea2a714731e -C extra-filename=-c23d7ea2a714731e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
143 Compiling rustls-pemfile v1.0.3
144 Running `rustc --crate-name rustls_pemfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-pemfile-1.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e52b2a6debfcae48 -C extra-filename=-e52b2a6debfcae48 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --cap-lints allow`
145 Compiling openssl-sys v0.9.90
146 Running `rustc --crate-name build_script_main --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-sys-0.9.90/build/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=b13a834c02d58f09 -C extra-filename=-b13a834c02d58f09 --out-dir /work/oxidecomputer/crucible/target/debug/build/openssl-sys-b13a834c02d58f09 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
147 Compiling rand_chacha v0.3.1
148 Running `rustc --crate-name rand_chacha --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_chacha-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=add466c063ef8725 -C extra-filename=-add466c063ef8725 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ppv_lite86=/work/oxidecomputer/crucible/target/debug/deps/libppv_lite86-a7fd0e8d54744928.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
149 Compiling socket2 v0.4.9
150 Running `rustc --crate-name socket2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/socket2-0.4.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="all"' -C metadata=3c3e3607c1c6d64e -C extra-filename=-3c3e3607c1c6d64e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
151 Compiling idna v0.2.3
152 Running `rustc --crate-name idna --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/idna-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=67f12269d91917c9 -C extra-filename=-67f12269d91917c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern matches=/work/oxidecomputer/crucible/target/debug/deps/libmatches-7dd63e8ea065bea3.rmeta --extern unicode_bidi=/work/oxidecomputer/crucible/target/debug/deps/libunicode_bidi-2c825f5beb05a037.rmeta --extern unicode_normalization=/work/oxidecomputer/crucible/target/debug/deps/libunicode_normalization-3df8261a03d4248e.rmeta --cap-lints allow`
153 Compiling rand v0.8.5
154 Running `rustc --crate-name rand --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand-0.8.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="getrandom"' --cfg 'feature="libc"' --cfg 'feature="min_const_gen"' --cfg 'feature="rand_chacha"' --cfg 'feature="small_rng"' --cfg 'feature="std"' --cfg 'feature="std_rng"' -C metadata=1f91a9ea4aed49ee -C extra-filename=-1f91a9ea4aed49ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
155 Running `/work/oxidecomputer/crucible/target/debug/build/openssl-sys-b13a834c02d58f09/build-script-main`
156 Compiling try-lock v0.2.3
157 Running `rustc --crate-name try_lock /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/try-lock-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=02baba71598f88d3 -C extra-filename=-02baba71598f88d3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
158 Compiling httparse v1.8.0
159 Running `rustc --crate-name httparse --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httparse-1.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=59406412a39ce707 -C extra-filename=-59406412a39ce707 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg httparse_simd`
160 Compiling want v0.3.0
161 Running `rustc --crate-name want --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/want-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5737a0d118420ef7 -C extra-filename=-5737a0d118420ef7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern try_lock=/work/oxidecomputer/crucible/target/debug/deps/libtry_lock-02baba71598f88d3.rmeta --cap-lints allow`
162 Compiling httpdate v1.0.2
163 Running `rustc --crate-name httpdate --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httpdate-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e3ef82a990113a54 -C extra-filename=-e3ef82a990113a54 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
164 Compiling tower-service v0.3.1
165 Running `rustc --crate-name tower_service --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tower-service-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=51da71f2ad5117ee -C extra-filename=-51da71f2ad5117ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
166 Compiling url v2.2.2
167 Running `rustc --crate-name url --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/url-2.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ff56943ab9066fdc -C extra-filename=-ff56943ab9066fdc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern idna=/work/oxidecomputer/crucible/target/debug/deps/libidna-67f12269d91917c9.rmeta --extern matches=/work/oxidecomputer/crucible/target/debug/deps/libmatches-7dd63e8ea065bea3.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --cap-lints allow`
168 Compiling foreign-types-shared v0.1.1
169 Running `rustc --crate-name foreign_types_shared /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/foreign-types-shared-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=623b4e0ee39a8057 -C extra-filename=-623b4e0ee39a8057 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
170 Compiling foreign-types v0.3.2
171 Running `rustc --crate-name foreign_types /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/foreign-types-0.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=07949a1c938aca9c -C extra-filename=-07949a1c938aca9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern foreign_types_shared=/work/oxidecomputer/crucible/target/debug/deps/libforeign_types_shared-623b4e0ee39a8057.rmeta --cap-lints allow`
172 Compiling openssl v0.10.55
173 Running `/work/oxidecomputer/crucible/target/debug/build/openssl-754c33330368d7dd/build-script-build`
174 Running `rustc --crate-name openssl_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-sys-0.9.90/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe2ced2b5ac7bf5a -C extra-filename=-fe2ced2b5ac7bf5a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 -l ssl -l crypto --cfg openssl --cfg 'osslconf="OPENSSL_NO_SSL3_METHOD"' --cfg ossl300 --cfg ossl101 --cfg ossl102 --cfg ossl102f --cfg ossl102h --cfg ossl110 --cfg ossl110f --cfg ossl110g --cfg ossl110h --cfg ossl111 --cfg ossl111b --cfg ossl111c --cfg ossl111d`
175 Compiling native-tls v0.2.11
176 Running `/work/oxidecomputer/crucible/target/debug/build/native-tls-52d6aeab7008e848/build-script-build`
177 Compiling ahash v0.8.3
178 Running `rustc --crate-name ahash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ahash-0.8.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0c4a58ad1daaf354 -C extra-filename=-0c4a58ad1daaf354 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow --cfg 'feature="folded_multiply"'`
179 Compiling typenum v1.15.0
180 Running `rustc --crate-name typenum --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/typenum-1.15.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=21543e9c4f362850 -C extra-filename=-21543e9c4f362850 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
181 Compiling serde_derive_internals v0.26.0
182 Running `rustc --crate-name serde_derive_internals /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_derive_internals-0.26.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=6aea929a774cf30f -C extra-filename=-6aea929a774cf30f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
183 Compiling crossbeam-utils v0.8.8
184 Running `rustc --crate-name crossbeam_utils --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-utils-0.8.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=a1fb255bfa31483a -C extra-filename=-a1fb255bfa31483a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --cap-lints allow`
185 Compiling unicode-width v0.1.9
186 Running `rustc --crate-name unicode_width /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-width-0.1.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=87c5262a4c4bb0e9 -C extra-filename=-87c5262a4c4bb0e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
187 Compiling openssl-probe v0.1.5
188 Running `rustc --crate-name openssl_probe /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-probe-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ef939aeef8872804 -C extra-filename=-ef939aeef8872804 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
189 Compiling hashbrown v0.13.2
190 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.13.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ahash"' --cfg 'feature="default"' --cfg 'feature="inline-more"' -C metadata=d4fb045aef0e24c1 -C extra-filename=-d4fb045aef0e24c1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ahash=/work/oxidecomputer/crucible/target/debug/deps/libahash-0c4a58ad1daaf354.rmeta --cap-lints allow`
191 Compiling generic-array v0.14.5
192 Running `rustc --crate-name generic_array /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/generic-array-0.14.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="more_lengths"' -C metadata=bc31b01978a602e7 -C extra-filename=-bc31b01978a602e7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern typenum=/work/oxidecomputer/crucible/target/debug/deps/libtypenum-21543e9c4f362850.rmeta --cap-lints allow --cfg relaxed_coherence`
193 Compiling aho-corasick v1.0.1
194 Running `rustc --crate-name aho_corasick --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aho-corasick-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="perf-literal"' --cfg 'feature="std"' -C metadata=afc99e972f7e39a1 -C extra-filename=-afc99e972f7e39a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
195 Compiling mime v0.3.16
196 Running `rustc --crate-name mime /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime-0.3.16/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ac14a9115eddd3c2 -C extra-filename=-ac14a9115eddd3c2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
197 Compiling byteorder v1.4.3
198 Running `rustc --crate-name byteorder --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/byteorder-1.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=1306999913c8e1b3 -C extra-filename=-1306999913c8e1b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
199 Compiling regex-syntax v0.7.2
200 Running `rustc --crate-name regex_syntax --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-syntax-0.7.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=c9e6cb4ff3b1ed69 -C extra-filename=-c9e6cb4ff3b1ed69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
201 Compiling regress v0.6.0
202 Running `rustc --crate-name regress --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regress-0.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="backend-pikevm"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=10da65958da1c830 -C extra-filename=-10da65958da1c830 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-d4fb045aef0e24c1.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
203 Compiling encoding_rs v0.8.30
204 Running `rustc --crate-name encoding_rs --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/encoding_rs-0.8.30/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' -C metadata=3255048793b3f7a6 -C extra-filename=-3255048793b3f7a6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow`
205 Compiling crossbeam-channel v0.5.3
206 Running `rustc --crate-name crossbeam_channel --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-channel-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="crossbeam-utils"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=19ff6fa486e51c63 -C extra-filename=-19ff6fa486e51c63 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --cap-lints allow`
207 Compiling ipnet v2.4.0
208 Running `rustc --crate-name ipnet /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ipnet-2.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=8b250db103a32779 -C extra-filename=-8b250db103a32779 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
209 Compiling webpki-roots v0.25.2
210 Running `rustc --crate-name webpki_roots --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/webpki-roots-0.25.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=31272bd9a7615638 -C extra-filename=-31272bd9a7615638 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
211 Compiling crypto-common v0.1.6
212 Running `rustc --crate-name crypto_common --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crypto-common-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="getrandom"' --cfg 'feature="rand_core"' --cfg 'feature="std"' -C metadata=0953bfc5dcef84b9 -C extra-filename=-0953bfc5dcef84b9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --extern typenum=/work/oxidecomputer/crucible/target/debug/deps/libtypenum-21543e9c4f362850.rmeta --cap-lints allow`
213 Compiling io-lifetimes v1.0.3
214 Running `rustc --crate-name io_lifetimes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-1.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="close"' --cfg 'feature="libc"' --cfg 'feature="windows-sys"' -C metadata=df7ee936a2a5cbac -C extra-filename=-df7ee936a2a5cbac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg io_safety_is_in_std --cfg panic_in_const_fn`
215 Compiling getopts v0.2.21
216 Running `rustc --crate-name getopts /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/getopts-0.2.21/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=93a8419d37acce69 -C extra-filename=-93a8419d37acce69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
217 Fresh unsafe-libyaml v0.2.5
218 Compiling ucd-trie v0.1.3
219 Running `rustc --crate-name ucd_trie --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ucd-trie-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=a4e8188b5963a3f1 -C extra-filename=-a4e8188b5963a3f1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
220 Compiling pest v2.1.3
221 Running `rustc --crate-name pest /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e1aabf8a6c7bc1ff -C extra-filename=-e1aabf8a6c7bc1ff --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ucd_trie=/work/oxidecomputer/crucible/target/debug/deps/libucd_trie-a4e8188b5963a3f1.rmeta --cap-lints allow`
222 Fresh maplit v1.0.2
223 Fresh unicode-xid v0.2.2
224 Compiling synstructure v0.12.6
225 Running `rustc --crate-name synstructure --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/synstructure-0.12.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="proc-macro"' -C metadata=48a8bcebf08faced -C extra-filename=-48a8bcebf08faced --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --extern unicode_xid=/work/oxidecomputer/crucible/target/debug/deps/libunicode_xid-911d92403f8fb404.rmeta --cap-lints allow`
226 Compiling regex v1.8.3
227 Running `rustc --crate-name regex --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-1.8.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="aho-corasick"' --cfg 'feature="default"' --cfg 'feature="memchr"' --cfg 'feature="perf"' --cfg 'feature="perf-cache"' --cfg 'feature="perf-dfa"' --cfg 'feature="perf-inline"' --cfg 'feature="perf-literal"' --cfg 'feature="std"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=f9e3a4eb3da387ce -C extra-filename=-f9e3a4eb3da387ce --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aho_corasick=/work/oxidecomputer/crucible/target/debug/deps/libaho_corasick-afc99e972f7e39a1.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-c9e6cb4ff3b1ed69.rmeta --cap-lints allow`
228 Compiling pest_meta v2.1.3
229 Running `rustc --crate-name pest_meta /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_meta-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=74e8955a7d734149 -C extra-filename=-74e8955a7d734149 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern maplit=/work/oxidecomputer/crucible/target/debug/deps/libmaplit-695e3e42e8316fa9.rmeta --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --cap-lints allow`
230 Compiling time-core v0.1.1
231 Running `rustc --crate-name time_core --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-core-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fd216cb66fe61cb9 -C extra-filename=-fd216cb66fe61cb9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
232 Compiling hashbrown v0.14.0
233 Running `rustc --crate-name hashbrown --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashbrown-0.14.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="raw"' -C metadata=615db2aaa1e4d335 -C extra-filename=-615db2aaa1e4d335 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
234 Compiling subtle v2.4.1
235 Running `rustc --crate-name subtle /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/subtle-2.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ee0c8d46ce57336b -C extra-filename=-ee0c8d46ce57336b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
236 Compiling equivalent v1.0.0
237 Running `rustc --crate-name equivalent /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/equivalent-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7a40718821784752 -C extra-filename=-7a40718821784752 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
238 Compiling time-macros v0.2.10
239 Running `rustc --crate-name time_macros --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-macros-0.2.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="formatting"' --cfg 'feature="parsing"' -C metadata=e07155b8a4270ad7 -C extra-filename=-e07155b8a4270ad7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern time_core=/work/oxidecomputer/crucible/target/debug/deps/libtime_core-fd216cb66fe61cb9.rlib --extern proc_macro --cap-lints allow`
240 Compiling pest_generator v2.1.3
241 Running `rustc --crate-name pest_generator /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_generator-2.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=01755500ddb0705d -C extra-filename=-01755500ddb0705d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --extern pest_meta=/work/oxidecomputer/crucible/target/debug/deps/libpest_meta-74e8955a7d734149.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
242 Compiling indexmap v2.0.0
243 Running `rustc --crate-name indexmap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indexmap-2.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=32a62b7926f710bd -C extra-filename=-32a62b7926f710bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern equivalent=/work/oxidecomputer/crucible/target/debug/deps/libequivalent-7a40718821784752.rmeta --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-615db2aaa1e4d335.rmeta --cap-lints allow`
244 Compiling serde_derive v1.0.167
245 Running `rustc --crate-name serde_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_derive-1.0.167/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="default"' -C metadata=3441a1f9756a6d5b -C extra-filename=-3441a1f9756a6d5b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
246 Compiling thiserror-impl v1.0.40
247 Running `rustc --crate-name thiserror_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thiserror-impl-1.0.40/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=bf767c319ff2d238 -C extra-filename=-bf767c319ff2d238 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
248 Compiling futures-macro v0.3.28
249 Running `rustc --crate-name futures_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-macro-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=81cbf12cf17c7f91 -C extra-filename=-81cbf12cf17c7f91 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
250 Compiling tokio-macros v2.1.0
251 Running `rustc --crate-name tokio_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-macros-2.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=129d27199cb07668 -C extra-filename=-129d27199cb07668 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
252 Compiling schemars_derive v0.8.12
253 Running `rustc --crate-name schemars_derive --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/schemars_derive-0.8.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=ca659e99c5534de4 -C extra-filename=-ca659e99c5534de4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_derive_internals=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive_internals-6aea929a774cf30f.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
254 Compiling tracing-attributes v0.1.23
255 Running `rustc --crate-name tracing_attributes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-attributes-0.1.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=af60b1f7cb0d953c -C extra-filename=-af60b1f7cb0d953c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
256 Compiling openssl-macros v0.1.0
257 Running `rustc --crate-name openssl_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-macros-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=57d0261583d18db5 -C extra-filename=-57d0261583d18db5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
258 Compiling zerocopy-derive v0.2.0
259 Running `rustc --crate-name zerocopy_derive --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zerocopy-derive-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=f3f98c5abdba899d -C extra-filename=-f3f98c5abdba899d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern synstructure=/work/oxidecomputer/crucible/target/debug/deps/libsynstructure-48a8bcebf08faced.rlib --extern proc_macro --cap-lints allow`
260 Running `rustc --crate-name openssl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openssl-0.10.55/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=4a7553c915defdd5 -C extra-filename=-4a7553c915defdd5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern foreign_types=/work/oxidecomputer/crucible/target/debug/deps/libforeign_types-07949a1c938aca9c.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern openssl_macros=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_macros-57d0261583d18db5.so --extern ffi=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_sys-fe2ced2b5ac7bf5a.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 --cfg 'osslconf="OPENSSL_NO_SSL3_METHOD"' --cfg ossl101 --cfg ossl102 --cfg ossl110 --cfg ossl110g --cfg ossl110h --cfg ossl111 --cfg ossl300`
261 Compiling futures-util v0.3.28
262 Running `rustc --crate-name futures_util --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-util-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="async-await"' --cfg 'feature="async-await-macro"' --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="futures-channel"' --cfg 'feature="futures-io"' --cfg 'feature="futures-macro"' --cfg 'feature="futures-sink"' --cfg 'feature="io"' --cfg 'feature="memchr"' --cfg 'feature="sink"' --cfg 'feature="slab"' --cfg 'feature="std"' -C metadata=b4da5d5433271d56 -C extra-filename=-b4da5d5433271d56 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_io=/work/oxidecomputer/crucible/target/debug/deps/libfutures_io-bcbbef0c8c581d67.rmeta --extern futures_macro=/work/oxidecomputer/crucible/target/debug/deps/libfutures_macro-81cbf12cf17c7f91.so --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern pin_utils=/work/oxidecomputer/crucible/target/debug/deps/libpin_utils-bcfb754cd1ab8c67.rmeta --extern slab=/work/oxidecomputer/crucible/target/debug/deps/libslab-5b7c79e345d6363e.rmeta --cap-lints allow`
263 Compiling thiserror v1.0.40
264 Running `rustc --crate-name thiserror --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thiserror-1.0.40/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=07eca56f531a0e5d -C extra-filename=-07eca56f531a0e5d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern thiserror_impl=/work/oxidecomputer/crucible/target/debug/deps/libthiserror_impl-bf767c319ff2d238.so --cap-lints allow`
265 Compiling num_threads v0.1.5
266 Running `rustc --crate-name num_threads /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_threads-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4e84d104f1db9110 -C extra-filename=-4e84d104f1db9110 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
267 Compiling winnow v0.4.6
268 Running `rustc --crate-name winnow --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/winnow-0.4.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=e34c187c773d92ef -C extra-filename=-e34c187c773d92ef --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
269 Compiling time v0.3.23
270 Running `rustc --crate-name time --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/time-0.3.23/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="formatting"' --cfg 'feature="local-offset"' --cfg 'feature="macros"' --cfg 'feature="parsing"' --cfg 'feature="std"' -C metadata=9b604407a0d52f86 -C extra-filename=-9b604407a0d52f86 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern num_threads=/work/oxidecomputer/crucible/target/debug/deps/libnum_threads-4e84d104f1db9110.rmeta --extern time_core=/work/oxidecomputer/crucible/target/debug/deps/libtime_core-fd216cb66fe61cb9.rmeta --extern time_macros=/work/oxidecomputer/crucible/target/debug/deps/libtime_macros-e07155b8a4270ad7.so --cap-lints allow`
271 Compiling pest_derive v2.1.0
272 Running `rustc --crate-name pest_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/pest_derive-2.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=8b18b291ae7a4e87 -C extra-filename=-8b18b291ae7a4e87 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rlib --extern pest_generator=/work/oxidecomputer/crucible/target/debug/deps/libpest_generator-01755500ddb0705d.rlib --extern proc_macro --cap-lints allow`
273 Compiling zerocopy v0.3.0
274 Running `rustc --crate-name zerocopy --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zerocopy-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0b893e01c09e6e03 -C extra-filename=-0b893e01c09e6e03 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern zerocopy_derive=/work/oxidecomputer/crucible/target/debug/deps/libzerocopy_derive-f3f98c5abdba899d.so --cap-lints allow`
275 Compiling tracing v0.1.37
276 Running `rustc --crate-name tracing --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-0.1.37/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="attributes"' --cfg 'feature="default"' --cfg 'feature="std"' --cfg 'feature="tracing-attributes"' -C metadata=1de351a7f2b0ab55 -C extra-filename=-1de351a7f2b0ab55 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tracing_attributes=/work/oxidecomputer/crucible/target/debug/deps/libtracing_attributes-af60b1f7cb0d953c.so --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --cap-lints allow`
277 Compiling slog v2.7.0
278 Running `rustc --crate-name slog /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-2.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="dynamic-keys"' --cfg 'feature="max_level_trace"' --cfg 'feature="release_max_level_debug"' --cfg 'feature="std"' -C metadata=84fd25666c3c26ee -C extra-filename=-84fd25666c3c26ee --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg integer128`
279 Fresh rustversion v1.0.14
280 Compiling errno v0.3.1
281 Running `rustc --crate-name errno --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/errno-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e243723ea5172a32 -C extra-filename=-e243723ea5172a32 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
282 Compiling dtrace-parser v0.1.14
283 Running `rustc --crate-name dtrace_parser --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dtrace-parser-0.1.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=647a421ab06e4ff3 -C extra-filename=-647a421ab06e4ff3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern pest=/work/oxidecomputer/crucible/target/debug/deps/libpest-e1aabf8a6c7bc1ff.rmeta --extern pest_derive=/work/oxidecomputer/crucible/target/debug/deps/libpest_derive-8b18b291ae7a4e87.so --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
284 Compiling rustix v0.37.7
285 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.37.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fs"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="use-libc-auxv"' -C metadata=4f0213bb214bbfd6 -C extra-filename=-4f0213bb214bbfd6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc_errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-e243723ea5172a32.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-df7ee936a2a5cbac.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc --cfg solarish`
286 Compiling dof v0.1.5
287 Running `rustc --crate-name dof --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dof-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=48f06b38719b0295 -C extra-filename=-48f06b38719b0295 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern zerocopy=/work/oxidecomputer/crucible/target/debug/deps/libzerocopy-0b893e01c09e6e03.rmeta --cap-lints allow`
288 Compiling thread-id v4.0.0
289 Running `rustc --crate-name thread_id /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thread-id-4.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=eaa0d9ff93152533 -C extra-filename=-eaa0d9ff93152533 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
290 Compiling fastrand v1.7.0
291 Running `rustc --crate-name fastrand --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fastrand-1.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d1e60a5c45d284ad -C extra-filename=-d1e60a5c45d284ad --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
292 Compiling async-trait v0.1.73
293 Running `rustc --crate-name async_trait --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-trait-0.1.73/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=a300e84178ee0ad1 -C extra-filename=-a300e84178ee0ad1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
294 Compiling block-buffer v0.10.2
295 Running `rustc --crate-name block_buffer --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/block-buffer-0.10.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0de17b0eee43f62e -C extra-filename=-0de17b0eee43f62e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
296 Compiling thread_local v1.1.4
297 Running `rustc --crate-name thread_local --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thread_local-1.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=771d112d9219dc9f -C extra-filename=-771d112d9219dc9f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
298 Compiling serde v1.0.167
299 Running `rustc --crate-name serde /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde-1.0.167/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="rc"' --cfg 'feature="serde_derive"' --cfg 'feature="std"' -C metadata=2779165e31567af2 -C extra-filename=-2779165e31567af2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --cap-lints allow`
300 Compiling tempfile v3.5.0
301 Running `rustc --crate-name tempfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tempfile-3.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=05cb6a5f7e86cb66 -C extra-filename=-05cb6a5f7e86cb66 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern fastrand=/work/oxidecomputer/crucible/target/debug/deps/libfastrand-d1e60a5c45d284ad.rmeta --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-4f0213bb214bbfd6.rmeta --cap-lints allow`
302 Running `rustc --crate-name native_tls /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/native-tls-0.2.11/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=320c05ab5bbd33c9 -C extra-filename=-320c05ab5bbd33c9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern openssl=/work/oxidecomputer/crucible/target/debug/deps/libopenssl-4a7553c915defdd5.rmeta --extern openssl_probe=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_probe-ef939aeef8872804.rmeta --extern openssl_sys=/work/oxidecomputer/crucible/target/debug/deps/libopenssl_sys-fe2ced2b5ac7bf5a.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64 --cfg have_min_max_version`
303 Compiling futures-executor v0.3.28
304 Running `rustc --crate-name futures_executor --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-executor-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=61f53162107ffb32 -C extra-filename=-61f53162107ffb32 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --cap-lints allow`
305 Compiling strsim v0.10.0
306 Running `rustc --crate-name strsim /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strsim-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3314ed6444b5bb69 -C extra-filename=-3314ed6444b5bb69 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
307 Compiling digest v0.10.6
308 Running `rustc --crate-name digest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/digest-0.10.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="block-buffer"' --cfg 'feature="core-api"' --cfg 'feature="default"' --cfg 'feature="mac"' --cfg 'feature="std"' --cfg 'feature="subtle"' -C metadata=d0d34473efe105d1 -C extra-filename=-d0d34473efe105d1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern block_buffer=/work/oxidecomputer/crucible/target/debug/deps/libblock_buffer-0de17b0eee43f62e.rmeta --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
309 Compiling dirs-sys-next v0.1.2
310 Running `rustc --crate-name dirs_sys_next --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-sys-next-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4419c59e785c767e -C extra-filename=-4419c59e785c767e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
311 Compiling libz-sys v1.1.8
312 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libz-sys-1.1.8/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="libc"' -C metadata=664a34d733156495 -C extra-filename=-664a34d733156495 --out-dir /work/oxidecomputer/crucible/target/debug/build/libz-sys-664a34d733156495 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
313 Compiling dirs-next v2.0.0
314 Running `rustc --crate-name dirs_next --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-next-2.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=369b0b19f2fe8c2c -C extra-filename=-369b0b19f2fe8c2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern dirs_sys_next=/work/oxidecomputer/crucible/target/debug/deps/libdirs_sys_next-4419c59e785c767e.rmeta --cap-lints allow`
315 Compiling match_cfg v0.1.0
316 Running `rustc --crate-name match_cfg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/match_cfg-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_core"' -C metadata=a8964d97fe20de7b -C extra-filename=-a8964d97fe20de7b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
317 Compiling siphasher v0.3.10
318 Running `rustc --crate-name siphasher --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/siphasher-0.3.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2eb27360f66646f3 -C extra-filename=-2eb27360f66646f3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
319 Compiling fallible-iterator v0.2.0
320 Running `rustc --crate-name fallible_iterator --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fallible-iterator-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=2f475e2aff163c98 -C extra-filename=-2f475e2aff163c98 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
321 Running `/work/oxidecomputer/crucible/target/debug/build/libz-sys-664a34d733156495/build-script-build`
322 Compiling hostname v0.3.1
323 Running `rustc --crate-name hostname /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hostname-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=4a0f8b1a56e5681a -C extra-filename=-4a0f8b1a56e5681a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern match_cfg=/work/oxidecomputer/crucible/target/debug/deps/libmatch_cfg-a8964d97fe20de7b.rmeta --cap-lints allow`
324 Compiling term v0.7.0
325 Running `rustc --crate-name term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/term-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=9ecdb763f4f627bf -C extra-filename=-9ecdb763f4f627bf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs_next=/work/oxidecomputer/crucible/target/debug/deps/libdirs_next-369b0b19f2fe8c2c.rmeta --cap-lints allow`
326 Compiling futures v0.3.28
327 Running `rustc --crate-name futures --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/futures-0.3.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="async-await"' --cfg 'feature="default"' --cfg 'feature="executor"' --cfg 'feature="futures-executor"' --cfg 'feature="std"' -C metadata=df761c89bfa71e54 -C extra-filename=-df761c89bfa71e54 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_io=/work/oxidecomputer/crucible/target/debug/deps/libfutures_io-bcbbef0c8c581d67.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_task=/work/oxidecomputer/crucible/target/debug/deps/libfutures_task-12b58f257ddc96a4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --cap-lints allow`
328 Compiling atty v0.2.14
329 Running `rustc --crate-name atty /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/atty-0.2.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bfb6a2cdc762f7c4 -C extra-filename=-bfb6a2cdc762f7c4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
330 Fresh autocfg v0.1.8
331 Compiling rand_core v0.4.2
332 Running `rustc --crate-name rand_core /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.4.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=cf5252fd1c1f594a -C extra-filename=-cf5252fd1c1f594a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
333 Compiling take_mut v0.2.2
334 Running `rustc --crate-name take_mut /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/take_mut-0.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0ebd75564ded4875 -C extra-filename=-0ebd75564ded4875 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
335 Compiling cpufeatures v0.2.1
336 Running `rustc --crate-name cpufeatures --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cpufeatures-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6e25ef20a8fa218f -C extra-filename=-6e25ef20a8fa218f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
337 Compiling slog-async v2.8.0
338 Running `rustc --crate-name slog_async /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-async-2.8.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=03e743f036119074 -C extra-filename=-03e743f036119074 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern take_mut=/work/oxidecomputer/crucible/target/debug/deps/libtake_mut-0ebd75564ded4875.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --cap-lints allow --cfg integer128`
339 Compiling slog-term v2.9.0
340 Running `rustc --crate-name slog_term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-term-2.9.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=31c687431f6dd53c -C extra-filename=-31c687431f6dd53c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern term=/work/oxidecomputer/crucible/target/debug/deps/libterm-9ecdb763f4f627bf.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
341 Compiling libgit2-sys v0.15.2+1.6.4
342 Running `rustc --crate-name build_script_build --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libgit2-sys-0.15.2+1.6.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=69da610a4d2f950f -C extra-filename=-69da610a4d2f950f --out-dir /work/oxidecomputer/crucible/target/debug/build/libgit2-sys-69da610a4d2f950f -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cc=/work/oxidecomputer/crucible/target/debug/deps/libcc-2976d4b8f46fa671.rlib --extern pkg_config=/work/oxidecomputer/crucible/target/debug/deps/libpkg_config-e05c47386f4bdcc0.rlib --cap-lints allow`
343 Running `rustc --crate-name libz_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libz-sys-1.1.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="libc"' -C metadata=a3111f279c2174e3 -C extra-filename=-a3111f279c2174e3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow -L native=/usr/lib/amd64 -l z`
344 Compiling errno v0.2.8
345 Running `rustc --crate-name errno /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/errno-0.2.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=af3769d2acbbbf20 -C extra-filename=-af3769d2acbbbf20 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
346 Compiling utf8parse v0.2.1
347 Running `rustc --crate-name utf8parse --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/utf8parse-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=7be01c1e08c14495 -C extra-filename=-7be01c1e08c14495 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
348 Fresh vcpkg v0.2.15
349 Compiling rand_core v0.3.1
350 Running `rustc --crate-name rand_core /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_core-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=588d7d54223a4bae -C extra-filename=-588d7d54223a4bae --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
351 Fresh ident_case v1.0.1
352 Compiling static_assertions v1.1.0
353 Running `rustc --crate-name static_assertions /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/static_assertions-1.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=457e7de98121159f -C extra-filename=-457e7de98121159f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
354 Compiling darling_core v0.20.1
355 Running `rustc --crate-name darling_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling_core-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="strsim"' --cfg 'feature="suggestions"' -C metadata=9504cdbd254aa6a2 -C extra-filename=-9504cdbd254aa6a2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern ident_case=/work/oxidecomputer/crucible/target/debug/deps/libident_case-90a2253e7db4d178.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern strsim=/work/oxidecomputer/crucible/target/debug/deps/libstrsim-3314ed6444b5bb69.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
356 Compiling memoffset v0.7.1
357 Running `rustc --crate-name memoffset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.7.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=7efa3f7abe9851cf -C extra-filename=-7efa3f7abe9851cf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg tuple_ty --cfg allow_clippy --cfg maybe_uninit --cfg doctests --cfg raw_ref_macros`
358 Compiling num-integer v0.1.44
359 Running `rustc --crate-name num_integer /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-integer-0.1.44/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ada2de842cf2fef0 -C extra-filename=-ada2de842cf2fef0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
360 Fresh semver v0.1.20
361 Compiling bitflags v2.3.3
362 Running `rustc --crate-name bitflags --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bitflags-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=109244799287a8c3 -C extra-filename=-109244799287a8c3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
363 Running `/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-69da610a4d2f950f/build-script-build`
364 Fresh rustc_version v0.1.7
365 Compiling libsqlite3-sys v0.26.0
366 Running `rustc --crate-name libsqlite3_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libsqlite3-sys-0.26.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="min_sqlite_version_3_14_0"' --cfg 'feature="pkg-config"' --cfg 'feature="vcpkg"' -C metadata=d6c034ac2a660d5f -C extra-filename=-d6c034ac2a660d5f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow -L native=/usr/lib/amd64 -l sqlite3`
367 Compiling nix v0.26.2
368 Running `rustc --crate-name nix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nix-0.26.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="acct"' --cfg 'feature="aio"' --cfg 'feature="default"' --cfg 'feature="dir"' --cfg 'feature="env"' --cfg 'feature="event"' --cfg 'feature="feature"' --cfg 'feature="fs"' --cfg 'feature="hostname"' --cfg 'feature="inotify"' --cfg 'feature="ioctl"' --cfg 'feature="kmod"' --cfg 'feature="memoffset"' --cfg 'feature="mman"' --cfg 'feature="mount"' --cfg 'feature="mqueue"' --cfg 'feature="net"' --cfg 'feature="personality"' --cfg 'feature="pin-utils"' --cfg 'feature="poll"' --cfg 'feature="process"' --cfg 'feature="pthread"' --cfg 'feature="ptrace"' --cfg 'feature="quota"' --cfg 'feature="reboot"' --cfg 'feature="resource"' --cfg 'feature="sched"' --cfg 'feature="signal"' --cfg 'feature="socket"' --cfg 'feature="term"' --cfg 'feature="time"' --cfg 'feature="ucontext"' --cfg 'feature="uio"' --cfg 'feature="user"' --cfg 'feature="zerocopy"' -C metadata=3635aff9412bf811 -C extra-filename=-3635aff9412bf811 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern memoffset=/work/oxidecomputer/crucible/target/debug/deps/libmemoffset-7efa3f7abe9851cf.rmeta --extern pin_utils=/work/oxidecomputer/crucible/target/debug/deps/libpin_utils-bcfb754cd1ab8c67.rmeta --extern static_assertions=/work/oxidecomputer/crucible/target/debug/deps/libstatic_assertions-457e7de98121159f.rmeta --cap-lints allow`
369 Compiling rustix v0.36.5
370 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.36.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="libc"' --cfg 'feature="std"' --cfg 'feature="termios"' --cfg 'feature="use-libc-auxv"' -C metadata=06529533a7816508 -C extra-filename=-06529533a7816508 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc_errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-af3769d2acbbbf20.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-df7ee936a2a5cbac.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc`
371 Compiling anstyle-parse v0.2.0
372 Running `rustc --crate-name anstyle_parse --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-parse-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="utf8"' -C metadata=1cddf38a13db6213 -C extra-filename=-1cddf38a13db6213 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --cap-lints allow`
373 Compiling async-stream-impl v0.3.5
374 Running `rustc --crate-name async_stream_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-stream-impl-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=499322f076b313bc -C extra-filename=-499322f076b313bc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
375 Compiling hashlink v0.8.0
376 Running `rustc --crate-name hashlink --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hashlink-0.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=20d837d226474f7f -C extra-filename=-20d837d226474f7f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-3ad6614047c487f9.rmeta --cap-lints allow`
377 Compiling fallible-streaming-iterator v0.1.9
378 Running `rustc --crate-name fallible_streaming_iterator /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fallible-streaming-iterator-0.1.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=95608b7ccd5e88f6 -C extra-filename=-95608b7ccd5e88f6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
379 Compiling anstyle-query v1.0.0
380 Running `rustc --crate-name anstyle_query --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-query-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f6b83c0a54455ea0 -C extra-filename=-f6b83c0a54455ea0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
381 Compiling anstyle v1.0.0
382 Running `rustc --crate-name anstyle --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstyle-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=b22d2b8ea1e30552 -C extra-filename=-b22d2b8ea1e30552 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
383 Compiling colorchoice v1.0.0
384 Running `rustc --crate-name colorchoice --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/colorchoice-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=88cb686aa85d1c3b -C extra-filename=-88cb686aa85d1c3b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
385 Compiling rusqlite v0.29.0
386 Running `rustc --crate-name rusqlite --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rusqlite-0.29.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4d2316a88b06837 -C extra-filename=-e4d2316a88b06837 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-109244799287a8c3.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern fallible_streaming_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_streaming_iterator-95608b7ccd5e88f6.rmeta --extern hashlink=/work/oxidecomputer/crucible/target/debug/deps/libhashlink-20d837d226474f7f.rmeta --extern libsqlite3_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibsqlite3_sys-d6c034ac2a660d5f.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --cap-lints allow -L native=/usr/lib/amd64`
387 Compiling terminal_size v0.2.5
388 Running `rustc --crate-name terminal_size --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminal_size-0.2.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0b293d2f7e4e73b7 -C extra-filename=-0b293d2f7e4e73b7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-06529533a7816508.rmeta --cap-lints allow`
389 Compiling anstream v0.5.0
390 Running `rustc --crate-name anstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/anstream-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="auto"' --cfg 'feature="default"' --cfg 'feature="wincon"' -C metadata=ca70a88d33d17944 -C extra-filename=-ca70a88d33d17944 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anstyle=/work/oxidecomputer/crucible/target/debug/deps/libanstyle-b22d2b8ea1e30552.rmeta --extern anstyle_parse=/work/oxidecomputer/crucible/target/debug/deps/libanstyle_parse-1cddf38a13db6213.rmeta --extern anstyle_query=/work/oxidecomputer/crucible/target/debug/deps/libanstyle_query-f6b83c0a54455ea0.rmeta --extern colorchoice=/work/oxidecomputer/crucible/target/debug/deps/libcolorchoice-88cb686aa85d1c3b.rmeta --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --cap-lints allow`
391 Compiling num-bigint v0.2.6
392 Running `rustc --crate-name num_bigint /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-bigint-0.2.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=fb487451ba2d3918 -C extra-filename=-fb487451ba2d3918 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
393 Compiling twox-hash v1.6.3
394 Running `rustc --crate-name twox_hash --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/twox-hash-1.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rand"' --cfg 'feature="std"' -C metadata=9f5dd4f7319ca539 -C extra-filename=-9f5dd4f7319ca539 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern static_assertions=/work/oxidecomputer/crucible/target/debug/deps/libstatic_assertions-457e7de98121159f.rmeta --cap-lints allow`
395 Compiling async-stream v0.3.5
396 Running `rustc --crate-name async_stream --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-stream-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0486f21173e73f9c -C extra-filename=-0486f21173e73f9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream_impl=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream_impl-499322f076b313bc.so --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --cap-lints allow`
397 Compiling bytes v1.4.0
398 Running `rustc --crate-name bytes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bytes-1.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=64a8a55ef81e55dd -C extra-filename=-64a8a55ef81e55dd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
399 Compiling serde_json v1.0.105
400 Compiling indexmap v1.9.3
401 Running `rustc --crate-name serde_json --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_json-1.0.105/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=96027e7dd982d07a -C extra-filename=-96027e7dd982d07a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg limb_width_64`
402 Running `rustc --crate-name indexmap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indexmap-1.9.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' --cfg 'feature="serde-1"' --cfg 'feature="std"' -C metadata=c2c8f74266eebb64 -C extra-filename=-c2c8f74266eebb64 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hashbrown=/work/oxidecomputer/crucible/target/debug/deps/libhashbrown-3ad6614047c487f9.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg has_std`
403 Compiling chrono v0.4.26
404 Running `rustc --crate-name chrono --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/chrono-0.4.26/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="clock"' --cfg 'feature="default"' --cfg 'feature="iana-time-zone"' --cfg 'feature="js-sys"' --cfg 'feature="oldtime"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="time"' --cfg 'feature="wasm-bindgen"' --cfg 'feature="wasmbind"' --cfg 'feature="winapi"' -C metadata=0402a5e17dd99cdf -C extra-filename=-0402a5e17dd99cdf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern iana_time_zone=/work/oxidecomputer/crucible/target/debug/deps/libiana_time_zone-3051bc3a975f54c9.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-fcbf6ea26d482f3a.rmeta --cap-lints allow`
405 Compiling tokio v1.32.0
406 Running `rustc --crate-name tokio --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-1.32.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bytes"' --cfg 'feature="default"' --cfg 'feature="fs"' --cfg 'feature="full"' --cfg 'feature="io-std"' --cfg 'feature="io-util"' --cfg 'feature="libc"' --cfg 'feature="macros"' --cfg 'feature="mio"' --cfg 'feature="net"' --cfg 'feature="num_cpus"' --cfg 'feature="parking_lot"' --cfg 'feature="process"' --cfg 'feature="rt"' --cfg 'feature="rt-multi-thread"' --cfg 'feature="signal"' --cfg 'feature="signal-hook-registry"' --cfg 'feature="socket2"' --cfg 'feature="sync"' --cfg 'feature="test-util"' --cfg 'feature="time"' --cfg 'feature="tokio-macros"' --cfg 'feature="windows-sys"' -C metadata=cf190744403b2ee1 -C extra-filename=-cf190744403b2ee1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern signal_hook_registry=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_registry-4d955479f235827e.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-b464b617227db85e.rmeta --extern tokio_macros=/work/oxidecomputer/crucible/target/debug/deps/libtokio_macros-129d27199cb07668.so --cap-lints allow`
407 Compiling uuid v1.4.1
408 Running `rustc --crate-name uuid --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/uuid-1.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="getrandom"' --cfg 'feature="rng"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="v4"' -C metadata=7cc8e87b4149b49e -C extra-filename=-7cc8e87b4149b49e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
409 Compiling http v0.2.9
410 Running `rustc --crate-name http --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-0.2.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4526ab951518c907 -C extra-filename=-4526ab951518c907 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --cap-lints allow`
411 Compiling serde_urlencoded v0.7.1
412 Running `rustc --crate-name serde_urlencoded --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_urlencoded-0.7.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=779d3ac41d95557a -C extra-filename=-779d3ac41d95557a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
413 Compiling serde_tokenstream v0.2.0
414 Running `rustc --crate-name serde_tokenstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_tokenstream-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=2a83027836bfd0fb -C extra-filename=-2a83027836bfd0fb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
415 Compiling schemars v0.8.12
416 Running `rustc --crate-name schemars --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/schemars-0.8.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bytes"' --cfg 'feature="chrono"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="schemars_derive"' --cfg 'feature="uuid1"' -C metadata=83d20014cee5d9b5 -C extra-filename=-83d20014cee5d9b5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dyn_clone=/work/oxidecomputer/crucible/target/debug/deps/libdyn_clone-6ca330b50294836a.rmeta --extern schemars_derive=/work/oxidecomputer/crucible/target/debug/deps/libschemars_derive-ca659e99c5534de4.so --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid1=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow --cfg std_atomic64 --cfg std_atomic`
417 Compiling openapiv3 v1.0.2
418 Compiling serde_yaml v0.9.17
419 Running `rustc --crate-name openapiv3 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/openapiv3-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="skip_serializing_defaults"' -C metadata=f84072cef6d0d68c -C extra-filename=-f84072cef6d0d68c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --cap-lints allow`
420 Running `rustc --crate-name serde_yaml --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_yaml-0.9.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=c7db3f53bbf8134b -C extra-filename=-c7db3f53bbf8134b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern unsafe_libyaml=/work/oxidecomputer/crucible/target/debug/deps/libunsafe_libyaml-18c99c193ab0b0f5.rmeta --cap-lints allow`
421 Compiling serde_spanned v0.6.3
422 Running `rustc --crate-name serde_spanned --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_spanned-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' -C metadata=92d13640a4d5d836 -C extra-filename=-92d13640a4d5d836 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
423 Compiling toml_datetime v0.6.3
424 Running `rustc --crate-name toml_datetime --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml_datetime-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' -C metadata=c003080b79dfe49d -C extra-filename=-c003080b79dfe49d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
425 Compiling http-body v0.4.4
426 Running `rustc --crate-name http_body --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-body-0.4.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bb1d69dd918c127f -C extra-filename=-bb1d69dd918c127f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --cap-lints allow`
427 Compiling toml_edit v0.19.12
428 Running `rustc --crate-name toml_edit --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml_edit-0.19.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' -C metadata=29ed73c573391add -C extra-filename=-29ed73c573391add --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-32a62b7926f710bd.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_spanned=/work/oxidecomputer/crucible/target/debug/deps/libserde_spanned-92d13640a4d5d836.rmeta --extern toml_datetime=/work/oxidecomputer/crucible/target/debug/deps/libtoml_datetime-c003080b79dfe49d.rmeta --extern winnow=/work/oxidecomputer/crucible/target/debug/deps/libwinnow-e34c187c773d92ef.rmeta --cap-lints allow`
429 Compiling usdt-impl v0.3.5
430 Running `rustc --crate-name usdt_impl --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-impl-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="asm"' -C metadata=aa84cb563c29877d -C extra-filename=-aa84cb563c29877d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern dof=/work/oxidecomputer/crucible/target/debug/deps/libdof-48f06b38719b0295.rmeta --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern thread_id=/work/oxidecomputer/crucible/target/debug/deps/libthread_id-eaa0d9ff93152533.rmeta --cap-lints allow --cfg usdt_stable_asm --cfg usdt_backend_standard`
431 Compiling typify-impl v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
432 Running `rustc --crate-name typify_impl --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5385296f2ea50467 -C extra-filename=-5385296f2ea50467 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern regress=/work/oxidecomputer/crucible/target/debug/deps/libregress-10da65958da1c830.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
433 Compiling semver v1.0.18
434 Running `rustc --crate-name semver --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/semver-1.0.18/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=8c1c5827befd93e7 -C extra-filename=-8c1c5827befd93e7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
435 Compiling serde_tokenstream v0.1.6
436 Running `rustc --crate-name serde_tokenstream --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_tokenstream-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=a3f756c2f035b223 -C extra-filename=-a3f756c2f035b223 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
437 Compiling slog-json v2.6.1
438 Running `rustc --crate-name slog_json --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-json-2.6.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=d8408f8f3a6dd5b7 -C extra-filename=-d8408f8f3a6dd5b7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
439 Compiling usdt-macro v0.3.5
440 Running `rustc --crate-name usdt_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-macro-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="asm"' -C metadata=0ea5e72aa4bb61c5 -C extra-filename=-0ea5e72aa4bb61c5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-a3f756c2f035b223.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rlib --extern proc_macro --cap-lints allow`
441 Compiling usdt-attr-macro v0.3.5
442 Running `rustc --crate-name usdt_attr_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-attr-macro-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="asm"' -C metadata=1e49091236cd57b8 -C extra-filename=-1e49091236cd57b8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-a3f756c2f035b223.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rlib --extern proc_macro --cap-lints allow`
443 Compiling slog-bunyan v2.4.0
444 Running `rustc --crate-name slog_bunyan --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-bunyan-2.4.0/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dce051a6775f1d99 -C extra-filename=-dce051a6775f1d99 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hostname=/work/oxidecomputer/crucible/target/debug/deps/libhostname-4a0f8b1a56e5681a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_json=/work/oxidecomputer/crucible/target/debug/deps/libslog_json-d8408f8f3a6dd5b7.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow`
445 Compiling rustc_version v0.4.0
446 Running `rustc --crate-name rustc_version --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustc_version-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=201ef6100eba532b -C extra-filename=-201ef6100eba532b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --cap-lints allow`
447 Compiling toml v0.7.6
448 Running `rustc --crate-name toml --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/toml-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="display"' --cfg 'feature="parse"' -C metadata=de0eb3fcc3b95b5c -C extra-filename=-de0eb3fcc3b95b5c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_spanned=/work/oxidecomputer/crucible/target/debug/deps/libserde_spanned-92d13640a4d5d836.rmeta --extern toml_datetime=/work/oxidecomputer/crucible/target/debug/deps/libtoml_datetime-c003080b79dfe49d.rmeta --extern toml_edit=/work/oxidecomputer/crucible/target/debug/deps/libtoml_edit-29ed73c573391add.rmeta --cap-lints allow`
449 Compiling darling_macro v0.20.1
450 Running `rustc --crate-name darling_macro --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling_macro-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=9054a39daee42f78 -C extra-filename=-9054a39daee42f78 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling_core=/work/oxidecomputer/crucible/target/debug/deps/libdarling_core-9504cdbd254aa6a2.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
451 Compiling camino v1.1.4
452 Running `rustc --crate-name camino --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/camino-1.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="serde"' --cfg 'feature="serde1"' -C metadata=45f0f4a2c258f934 -C extra-filename=-45f0f4a2c258f934 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow --cfg path_buf_capacity --cfg shrink_to --cfg try_reserve_2 --cfg path_buf_deref_mut`
453 Compiling hex v0.4.3
454 Running `rustc --crate-name hex --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hex-0.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' -C metadata=6531b11cb72de3e5 -C extra-filename=-6531b11cb72de3e5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
455 Compiling tokio-util v0.7.3
456 Running `rustc --crate-name tokio_util --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-util-0.7.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="codec"' --cfg 'feature="default"' --cfg 'feature="io"' --cfg 'feature="tracing"' -C metadata=279b3765a2b5aad1 -C extra-filename=-279b3765a2b5aad1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --cap-lints allow`
457 Compiling tokio-native-tls v0.3.0
458 Running `rustc --crate-name tokio_native_tls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-native-tls-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f56aba82a642e205 -C extra-filename=-f56aba82a642e205 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern native_tls=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64`
459 Compiling usdt v0.3.5
460 Running `rustc --crate-name usdt --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/usdt-0.3.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="asm"' --cfg 'feature="default"' --cfg 'feature="dtrace-parser"' -C metadata=86bb76e3b8fcea87 -C extra-filename=-86bb76e3b8fcea87 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dtrace_parser=/work/oxidecomputer/crucible/target/debug/deps/libdtrace_parser-647a421ab06e4ff3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern usdt_attr_macro=/work/oxidecomputer/crucible/target/debug/deps/libusdt_attr_macro-1e49091236cd57b8.so --extern usdt_impl=/work/oxidecomputer/crucible/target/debug/deps/libusdt_impl-aa84cb563c29877d.rmeta --extern usdt_macro=/work/oxidecomputer/crucible/target/debug/deps/libusdt_macro-0ea5e72aa4bb61c5.so --cap-lints allow`
461 Compiling darling v0.20.1
462 Running `rustc --crate-name darling --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/darling-0.20.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="suggestions"' -C metadata=00ecdd47ee30dd62 -C extra-filename=-00ecdd47ee30dd62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling_core=/work/oxidecomputer/crucible/target/debug/deps/libdarling_core-9504cdbd254aa6a2.rmeta --extern darling_macro=/work/oxidecomputer/crucible/target/debug/deps/libdarling_macro-9054a39daee42f78.so --cap-lints allow`
463 Compiling sha2 v0.10.2
464 Running `rustc --crate-name sha2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sha2-0.10.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9b09b4b286e2cb62 -C extra-filename=-9b09b4b286e2cb62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
465 Compiling hmac v0.12.1
466 Running `rustc --crate-name hmac --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hmac-0.12.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dbc6bea790b908e1 -C extra-filename=-dbc6bea790b908e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
467 Compiling slog-dtrace v0.2.3
468 Running `rustc --crate-name slog_dtrace --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/slog-dtrace-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8024beacfb95325b -C extra-filename=-8024beacfb95325b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --cap-lints allow`
469 Compiling md-5 v0.10.1
470 Running `rustc --crate-name md5 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/md-5-0.10.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=161e8167d58df2d6 -C extra-filename=-161e8167d58df2d6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
471 Compiling h2 v0.3.20
472 Running `rustc --crate-name h2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/h2-0.3.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3e2d8390f23dd48a -C extra-filename=-3e2d8390f23dd48a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_sink=/work/oxidecomputer/crucible/target/debug/deps/libfutures_sink-a6d6ed8a846c5f8a.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern slab=/work/oxidecomputer/crucible/target/debug/deps/libslab-5b7c79e345d6363e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --cap-lints allow`
473 Compiling structmeta-derive v0.1.5
474 Running `rustc --crate-name structmeta_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-derive-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=fab85507a465b1dd -C extra-filename=-fab85507a465b1dd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
475 Compiling stringprep v0.1.2
476 Running `rustc --crate-name stringprep /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/stringprep-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ca9dbc365b4e987e -C extra-filename=-ca9dbc365b4e987e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_bidi=/work/oxidecomputer/crucible/target/debug/deps/libunicode_bidi-2c825f5beb05a037.rmeta --extern unicode_normalization=/work/oxidecomputer/crucible/target/debug/deps/libunicode_normalization-3df8261a03d4248e.rmeta --cap-lints allow`
477 Compiling base64 v0.13.1
478 Running `rustc --crate-name base64 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64-0.13.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=480956047b7063a4 -C extra-filename=-480956047b7063a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
479 Compiling atomic-waker v1.1.1
480 Running `rustc --crate-name atomic_waker --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/atomic-waker-1.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2b745687caafccb6 -C extra-filename=-2b745687caafccb6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
481 Compiling clap_lex v0.5.0
482 Running `rustc --crate-name clap_lex --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_lex-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cd6fc5d5239978f4 -C extra-filename=-cd6fc5d5239978f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
483 Compiling regex-syntax v0.6.28
484 Running `rustc --crate-name regex_syntax --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-syntax-0.6.28/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="unicode"' --cfg 'feature="unicode-age"' --cfg 'feature="unicode-bool"' --cfg 'feature="unicode-case"' --cfg 'feature="unicode-gencat"' --cfg 'feature="unicode-perl"' --cfg 'feature="unicode-script"' --cfg 'feature="unicode-segment"' -C metadata=b9593ef3338880de -C extra-filename=-b9593ef3338880de --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
485 Compiling clap_builder v4.4.0
486 Running `rustc --crate-name clap_builder --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_builder-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="color"' --cfg 'feature="env"' --cfg 'feature="error-context"' --cfg 'feature="help"' --cfg 'feature="std"' --cfg 'feature="suggestions"' --cfg 'feature="usage"' --cfg 'feature="wrap_help"' -C metadata=f6e4b2a8b78f1399 -C extra-filename=-f6e4b2a8b78f1399 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anstream=/work/oxidecomputer/crucible/target/debug/deps/libanstream-ca70a88d33d17944.rmeta --extern anstyle=/work/oxidecomputer/crucible/target/debug/deps/libanstyle-b22d2b8ea1e30552.rmeta --extern clap_lex=/work/oxidecomputer/crucible/target/debug/deps/libclap_lex-cd6fc5d5239978f4.rmeta --extern strsim=/work/oxidecomputer/crucible/target/debug/deps/libstrsim-3314ed6444b5bb69.rmeta --extern terminal_size=/work/oxidecomputer/crucible/target/debug/deps/libterminal_size-0b293d2f7e4e73b7.rmeta --cap-lints allow`
487 Compiling postgres-protocol v0.6.4
488 Running `rustc --crate-name postgres_protocol --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/postgres-protocol-0.6.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2544e50ad0920d44 -C extra-filename=-2544e50ad0920d44 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-480956047b7063a4.rmeta --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern hmac=/work/oxidecomputer/crucible/target/debug/deps/libhmac-dbc6bea790b908e1.rmeta --extern md5=/work/oxidecomputer/crucible/target/debug/deps/libmd5-161e8167d58df2d6.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern stringprep=/work/oxidecomputer/crucible/target/debug/deps/libstringprep-ca9dbc365b4e987e.rmeta --cap-lints allow`
489 Compiling waitgroup v0.1.2
490 Running `rustc --crate-name waitgroup --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/waitgroup-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=db859ead02bd709e -C extra-filename=-db859ead02bd709e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern atomic_waker=/work/oxidecomputer/crucible/target/debug/deps/libatomic_waker-2b745687caafccb6.rmeta --cap-lints allow`
491 Compiling structmeta v0.1.5
492 Running `rustc --crate-name structmeta --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=478a2e76026e2d21 -C extra-filename=-478a2e76026e2d21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern structmeta_derive=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta_derive-fab85507a465b1dd.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rmeta --cap-lints allow`
493 Compiling serde_with_macros v2.3.3
494 Running `rustc --crate-name serde_with_macros --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_with_macros-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=0356ef5a55ed4c76 -C extra-filename=-0356ef5a55ed4c76 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern darling=/work/oxidecomputer/crucible/target/debug/deps/libdarling-00ecdd47ee30dd62.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
495 Compiling dropshot_endpoint v0.9.1-dev (https://github.com/oxidecomputer/dropshot?branch=main#aca6de3c)
496 Running `rustc --crate-name dropshot_endpoint --edition=2018 /home/build/.cargo/git/checkouts/dropshot-a4a923d29dccc492/aca6de3/dropshot_endpoint/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=1ff3a3dd0352c250 -C extra-filename=-1ff3a3dd0352c250 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
497 Compiling num-rational v0.2.4
498 Running `rustc --crate-name num_rational /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-rational-0.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bigint"' --cfg 'feature="num-bigint"' --cfg 'feature="std"' -C metadata=ff1997054aaa4c62 -C extra-filename=-ff1997054aaa4c62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_bigint=/work/oxidecomputer/crucible/target/debug/deps/libnum_bigint-fb487451ba2d3918.rmeta --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128 --cfg has_const_fn`
499 Compiling hyper v0.14.27
500 Running `rustc --crate-name hyper --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-0.14.27/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="client"' --cfg 'feature="default"' --cfg 'feature="full"' --cfg 'feature="h2"' --cfg 'feature="http1"' --cfg 'feature="http2"' --cfg 'feature="runtime"' --cfg 'feature="server"' --cfg 'feature="socket2"' --cfg 'feature="stream"' --cfg 'feature="tcp"' -C metadata=dad943d3b7cc33e9 -C extra-filename=-dad943d3b7cc33e9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern h2=/work/oxidecomputer/crucible/target/debug/deps/libh2-3e2d8390f23dd48a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_body=/work/oxidecomputer/crucible/target/debug/deps/libhttp_body-bb1d69dd918c127f.rmeta --extern httparse=/work/oxidecomputer/crucible/target/debug/deps/libhttparse-59406412a39ce707.rmeta --extern httpdate=/work/oxidecomputer/crucible/target/debug/deps/libhttpdate-e3ef82a990113a54.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-3c3e3607c1c6d64e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tower_service=/work/oxidecomputer/crucible/target/debug/deps/libtower_service-51da71f2ad5117ee.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern want=/work/oxidecomputer/crucible/target/debug/deps/libwant-5737a0d118420ef7.rmeta --cap-lints allow`
501 Compiling serde_path_to_error v0.1.14
502 Running `rustc --crate-name serde_path_to_error --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_path_to_error-0.1.14/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f8fffad8b554a310 -C extra-filename=-f8fffad8b554a310 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
503 Compiling rand_chacha v0.1.1
504 Running `rustc --crate-name rand_chacha /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_chacha-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=60a86e6d4f82a03e -C extra-filename=-60a86e6d4f82a03e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow --cfg rustc_1_26`
505 Compiling rand_pcg v0.1.2
506 Running `rustc --crate-name rand_pcg /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_pcg-0.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=64095680c19f1d75 -C extra-filename=-64095680c19f1d75 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow --cfg rustc_1_26`
507 Compiling num-complex v0.2.4
508 Running `rustc --crate-name num_complex /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-complex-0.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=b0ad3f1350de6722 -C extra-filename=-b0ad3f1350de6722 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128 --cfg has_const_fn`
509 Compiling num-iter v0.1.42
510 Running `rustc --crate-name num_iter /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-iter-0.1.42/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ea5f21eb64e222e3 -C extra-filename=-ea5f21eb64e222e3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow --cfg has_i128`
511 Fresh paste v1.0.14
512 Compiling rand_xorshift v0.1.1
513 Running `rustc --crate-name rand_xorshift /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_xorshift-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=516db51379fddd21 -C extra-filename=-516db51379fddd21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
514 Compiling rand_hc v0.1.0
515 Running `rustc --crate-name rand_hc /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_hc-0.1.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e624ce70fff78cce -C extra-filename=-e624ce70fff78cce --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
516 Compiling rand_isaac v0.1.1
517 Running `rustc --crate-name rand_isaac /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_isaac-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2a69a705c11dd4e8 -C extra-filename=-2a69a705c11dd4e8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-588d7d54223a4bae.rmeta --cap-lints allow`
518 Compiling rand_jitter v0.1.4
519 Running `rustc --crate-name rand_jitter /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_jitter-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=4375ed928e3b252c -C extra-filename=-4375ed928e3b252c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
520 Compiling rand_os v0.1.3
521 Running `rustc --crate-name rand_os /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_os-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5662c3e15991676f -C extra-filename=-5662c3e15991676f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --cap-lints allow`
522 Compiling sha1 v0.10.5
523 Running `rustc --crate-name sha1 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sha1-0.10.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=8fdbd7a715f3bef1 -C extra-filename=-8fdbd7a715f3bef1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
524 Compiling phf_shared v0.10.0
525 Running `rustc --crate-name phf_shared --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_shared-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=ded46b90978b786e -C extra-filename=-ded46b90978b786e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern siphasher=/work/oxidecomputer/crucible/target/debug/deps/libsiphasher-2eb27360f66646f3.rmeta --cap-lints allow`
526 Compiling strum_macros v0.25.2
527 Running `rustc --crate-name strum_macros --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strum_macros-0.25.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=bc907f623478289d -C extra-filename=-bc907f623478289d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern rustversion=/work/oxidecomputer/crucible/target/debug/deps/librustversion-ae2ca065d7ce77a2.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
528 Compiling clap_derive v4.4.0
529 Running `rustc --crate-name clap_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap_derive-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="default"' -C metadata=09787059b4668753 -C extra-filename=-09787059b4668753 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
530 Compiling debug-ignore v1.0.5
531 Running `rustc --crate-name debug_ignore /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/debug-ignore-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2303f500fcbc7093 -C extra-filename=-2303f500fcbc7093 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
532 Compiling fixedbitset v0.4.1
533 Running `rustc --crate-name fixedbitset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fixedbitset-0.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c5fa142e7e51f766 -C extra-filename=-c5fa142e7e51f766 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
534 Compiling either v1.6.1
535 Running `rustc --crate-name either /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/either-1.6.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_std"' -C metadata=e075e5427abad613 -C extra-filename=-e075e5427abad613 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
536 Compiling phf v0.10.1
537 Running `rustc --crate-name phf --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf-0.10.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=692e1d997f5b6ce1 -C extra-filename=-692e1d997f5b6ce1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-ded46b90978b786e.rmeta --cap-lints allow`
538 Compiling petgraph v0.6.3
539 Running `rustc --crate-name petgraph --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/petgraph-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="graphmap"' --cfg 'feature="matrix_graph"' --cfg 'feature="serde"' --cfg 'feature="serde-1"' --cfg 'feature="serde_derive"' --cfg 'feature="stable_graph"' -C metadata=8e9556a1882bb1d5 -C extra-filename=-8e9556a1882bb1d5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fixedbitset=/work/oxidecomputer/crucible/target/debug/deps/libfixedbitset-c5fa142e7e51f766.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --cap-lints allow`
540 Compiling rand v0.6.5
541 Running `rustc --crate-name rand /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand-0.6.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="rand_os"' --cfg 'feature="std"' -C metadata=d54af7a747f291ab -C extra-filename=-d54af7a747f291ab --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-60a86e6d4f82a03e.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-cf5252fd1c1f594a.rmeta --extern rand_hc=/work/oxidecomputer/crucible/target/debug/deps/librand_hc-e624ce70fff78cce.rmeta --extern rand_isaac=/work/oxidecomputer/crucible/target/debug/deps/librand_isaac-2a69a705c11dd4e8.rmeta --extern rand_jitter=/work/oxidecomputer/crucible/target/debug/deps/librand_jitter-4375ed928e3b252c.rmeta --extern rand_os=/work/oxidecomputer/crucible/target/debug/deps/librand_os-5662c3e15991676f.rmeta --extern rand_pcg=/work/oxidecomputer/crucible/target/debug/deps/librand_pcg-64095680c19f1d75.rmeta --extern rand_xorshift=/work/oxidecomputer/crucible/target/debug/deps/librand_xorshift-516db51379fddd21.rmeta --cap-lints allow --cfg rustc_1_25 --cfg rustc_1_26 --cfg rustc_1_27`
542 Compiling parse-display-derive v0.7.0
543 Running `rustc --crate-name parse_display_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parse-display-derive-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=95e2e5d955c96bcf -C extra-filename=-95e2e5d955c96bcf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rlib --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-b9593ef3338880de.rlib --extern structmeta=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta-478a2e76026e2d21.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-837f9a049f77ca38.rlib --extern proc_macro --cap-lints allow`
544 Compiling hyper-tls v0.5.0
545 Running `rustc --crate-name hyper_tls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-tls-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=2dbf57f91f681e2c -C extra-filename=-2dbf57f91f681e2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern native_tls=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_native_tls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_native_tls-f56aba82a642e205.rmeta --cap-lints allow -L native=/usr/ssl-3/lib/amd64`
546 Compiling num v0.2.1
547 Running `rustc --crate-name num /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="num-bigint"' --cfg 'feature="std"' -C metadata=1148753de5cf68fd -C extra-filename=-1148753de5cf68fd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_bigint=/work/oxidecomputer/crucible/target/debug/deps/libnum_bigint-fb487451ba2d3918.rmeta --extern num_complex=/work/oxidecomputer/crucible/target/debug/deps/libnum_complex-b0ad3f1350de6722.rmeta --extern num_integer=/work/oxidecomputer/crucible/target/debug/deps/libnum_integer-ada2de842cf2fef0.rmeta --extern num_iter=/work/oxidecomputer/crucible/target/debug/deps/libnum_iter-ea5f21eb64e222e3.rmeta --extern num_rational=/work/oxidecomputer/crucible/target/debug/deps/libnum_rational-ff1997054aaa4c62.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
548 Compiling newtype_derive v0.1.6
549 Running `rustc --crate-name newtype_derive /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/newtype_derive-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=fedf432d6b5251a4 -C extra-filename=-fedf432d6b5251a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg op_assign`
550 Compiling serde_with v2.3.3
551 Running `rustc --crate-name serde_with --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/serde_with-2.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="macros"' --cfg 'feature="std"' -C metadata=4f9ddd30b380d6cf -C extra-filename=-4f9ddd30b380d6cf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_with_macros=/work/oxidecomputer/crucible/target/debug/deps/libserde_with_macros-0356ef5a55ed4c76.so --cap-lints allow`
552 Compiling strum v0.25.0
553 Running `rustc --crate-name strum --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strum-0.25.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="std"' --cfg 'feature="strum_macros"' -C metadata=59ea3c6704348e58 -C extra-filename=-59ea3c6704348e58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern strum_macros=/work/oxidecomputer/crucible/target/debug/deps/libstrum_macros-bc907f623478289d.so --cap-lints allow`
554 Compiling postgres-types v0.2.3
555 Running `rustc --crate-name postgres_types --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/postgres-types-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="chrono-04"' --cfg 'feature="uuid-1"' --cfg 'feature="with-chrono-0_4"' --cfg 'feature="with-uuid-1"' -C metadata=3d7a8ed523cd84fa -C extra-filename=-3d7a8ed523cd84fa --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono_04=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern postgres_protocol=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_protocol-2544e50ad0920d44.rmeta --extern uuid_1=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow`
556 Compiling proc-macro-crate v1.3.1
557 Running `rustc --crate-name proc_macro_crate --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proc-macro-crate-1.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=3935aefc56866ee2 -C extra-filename=-3935aefc56866ee2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern toml_edit=/work/oxidecomputer/crucible/target/debug/deps/libtoml_edit-29ed73c573391add.rmeta --cap-lints allow`
558 Compiling instant v0.1.12
559 Running `rustc --crate-name instant --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/instant-0.1.12/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3f52fffb2a7c0a97 -C extra-filename=-3f52fffb2a7c0a97 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow`
560 Compiling backoff v0.4.0
561 Running `rustc --crate-name backoff --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/backoff-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="futures"' --cfg 'feature="futures-core"' --cfg 'feature="pin-project-lite"' --cfg 'feature="tokio"' --cfg 'feature="tokio_1"' -C metadata=2bc4a2fd075cf434 -C extra-filename=-2bc4a2fd075cf434 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern getrandom=/work/oxidecomputer/crucible/target/debug/deps/libgetrandom-567199de146d617e.rmeta --extern instant=/work/oxidecomputer/crucible/target/debug/deps/libinstant-3f52fffb2a7c0a97.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern tokio_1=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
562 Compiling clap v4.4.0
563 Running `rustc --crate-name clap --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clap-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="color"' --cfg 'feature="default"' --cfg 'feature="derive"' --cfg 'feature="env"' --cfg 'feature="error-context"' --cfg 'feature="help"' --cfg 'feature="std"' --cfg 'feature="suggestions"' --cfg 'feature="usage"' --cfg 'feature="wrap_help"' -C metadata=49bc17aade028e79 -C extra-filename=-49bc17aade028e79 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern clap_builder=/work/oxidecomputer/crucible/target/debug/deps/libclap_builder-f6e4b2a8b78f1399.rmeta --extern clap_derive=/work/oxidecomputer/crucible/target/debug/deps/libclap_derive-09787059b4668753.so --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --cap-lints allow`
564 Compiling tokio-postgres v0.7.6
565 Running `rustc --crate-name tokio_postgres --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-postgres-0.7.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="runtime"' --cfg 'feature="with-chrono-0_4"' --cfg 'feature="with-uuid-1"' -C metadata=5628b93feb58339b -C extra-filename=-5628b93feb58339b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern fallible_iterator=/work/oxidecomputer/crucible/target/debug/deps/libfallible_iterator-2f475e2aff163c98.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern phf=/work/oxidecomputer/crucible/target/debug/deps/libphf-692e1d997f5b6ce1.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern postgres_protocol=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_protocol-2544e50ad0920d44.rmeta --extern postgres_types=/work/oxidecomputer/crucible/target/debug/deps/libpostgres_types-3d7a8ed523cd84fa.rmeta --extern socket2=/work/oxidecomputer/crucible/target/debug/deps/libsocket2-3c3e3607c1c6d64e.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --cap-lints allow`
566 Compiling steno v0.4.0
567 Running `rustc --crate-name steno --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/steno-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d1d3ce30296926ad -C extra-filename=-d1d3ce30296926ad --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern newtype_derive=/work/oxidecomputer/crucible/target/debug/deps/libnewtype_derive-fedf432d6b5251a4.rmeta --extern petgraph=/work/oxidecomputer/crucible/target/debug/deps/libpetgraph-8e9556a1882bb1d5.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow`
568 Compiling parse-display v0.7.0
569 Running `rustc --crate-name parse_display --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/parse-display-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="once_cell"' --cfg 'feature="regex"' --cfg 'feature="std"' -C metadata=34a1a5d52375b70b -C extra-filename=-34a1a5d52375b70b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern parse_display_derive=/work/oxidecomputer/crucible/target/debug/deps/libparse_display_derive-95e2e5d955c96bcf.so --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --cap-lints allow`
570 Compiling statistical v1.0.0
571 Running `rustc --crate-name statistical /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/statistical-1.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f2809857d7fe1c91 -C extra-filename=-f2809857d7fe1c91 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num=/work/oxidecomputer/crucible/target/debug/deps/libnum-1148753de5cf68fd.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-d54af7a747f291ab.rmeta --cap-lints allow`
572 Compiling serde_human_bytes v0.1.0 (http://github.com/oxidecomputer/serde_human_bytes?branch=main#0a097945)
573 Running `rustc --crate-name serde_human_bytes --edition=2018 /home/build/.cargo/git/checkouts/serde_human_bytes-25cac76e2ef2d15c/0a09794/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7f54d0fcbf9b36f3 -C extra-filename=-7f54d0fcbf9b36f3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
574 Compiling num_enum_derive v0.7.0
575 Running `rustc --crate-name num_enum_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_enum_derive-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no --cfg 'feature="proc-macro-crate"' --cfg 'feature="std"' -C metadata=2bef02e01d5b06d3 -C extra-filename=-2bef02e01d5b06d3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro_crate=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro_crate-3935aefc56866ee2.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
576 Compiling camino-tempfile v1.0.2
577 Running `rustc --crate-name camino_tempfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/camino-tempfile-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=38b6a8d85c9dc0c0 -C extra-filename=-38b6a8d85c9dc0c0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --cap-lints allow`
578 Compiling ipnetwork v0.20.0
579 Running `rustc --crate-name ipnetwork --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ipnetwork-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="schemars"' --cfg 'feature="serde"' -C metadata=0e9e550a49db2c52 -C extra-filename=-0e9e550a49db2c52 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
580 Compiling macaddr v1.0.1
581 Running `rustc --crate-name macaddr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/macaddr-1.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="serde_std"' --cfg 'feature="std"' -C metadata=98e89df75c36be48 -C extra-filename=-98e89df75c36be48 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
582 Compiling api_identity v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
583 Running `rustc --crate-name api_identity --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/api_identity/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=90d45ecc06c8f773 -C extra-filename=-90d45ecc06c8f773 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
584 Compiling base64ct v1.6.0
585 Running `rustc --crate-name base64ct --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/base64ct-1.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=5aa1f74fadba3334 -C extra-filename=-5aa1f74fadba3334 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
586 Compiling bincode v1.3.3
587 Running `rustc --crate-name bincode /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bincode-1.3.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bcb925e8faac86cd -C extra-filename=-bcb925e8faac86cd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
588 Compiling password-hash v0.5.0
589 Running `rustc --crate-name password_hash --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/password-hash-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="rand_core"' --cfg 'feature="std"' -C metadata=6571709e5cce3ff3 -C extra-filename=-6571709e5cce3ff3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64ct=/work/oxidecomputer/crucible/target/debug/deps/libbase64ct-5aa1f74fadba3334.rmeta --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
590 Compiling blake2 v0.10.6
591 Running `rustc --crate-name blake2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/blake2-0.10.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=39c009971635667f -C extra-filename=-39c009971635667f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern digest=/work/oxidecomputer/crucible/target/debug/deps/libdigest-d0d34473efe105d1.rmeta --cap-lints allow`
592 Compiling inout v0.1.3
593 Running `rustc --crate-name inout --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/inout-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=63af7c577909eb44 -C extra-filename=-63af7c577909eb44 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
594 Compiling cipher v0.4.3
595 Running `rustc --crate-name cipher --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/cipher-0.4.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe5b83e4e2b73d72 -C extra-filename=-fe5b83e4e2b73d72 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern inout=/work/oxidecomputer/crucible/target/debug/deps/libinout-63af7c577909eb44.rmeta --cap-lints allow`
596 Compiling argon2 v0.5.0
597 Running `rustc --crate-name argon2 --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/argon2-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="password-hash"' --cfg 'feature="rand"' --cfg 'feature="std"' -C metadata=8f03ab3a8d12bcba -C extra-filename=-8f03ab3a8d12bcba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64ct=/work/oxidecomputer/crucible/target/debug/deps/libbase64ct-5aa1f74fadba3334.rmeta --extern blake2=/work/oxidecomputer/crucible/target/debug/deps/libblake2-39c009971635667f.rmeta --extern password_hash=/work/oxidecomputer/crucible/target/debug/deps/libpassword_hash-6571709e5cce3ff3.rmeta --cap-lints allow`
598 Compiling oximeter-macro-impl v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
599 Running `rustc --crate-name oximeter_macro_impl --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/oximeter-macro-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=e4cc949eda20c416 -C extra-filename=-e4cc949eda20c416 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
600 Compiling omicron-passwords v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
601 Running `rustc --crate-name omicron_passwords --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/passwords/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ac6e3a602e6ad041 -C extra-filename=-ac6e3a602e6ad041 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern argon2=/work/oxidecomputer/crucible/target/debug/deps/libargon2-8f03ab3a8d12bcba.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_with=/work/oxidecomputer/crucible/target/debug/deps/libserde_with-4f9ddd30b380d6cf.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
602 Compiling universal-hash v0.5.0
603 Running `rustc --crate-name universal_hash --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/universal-hash-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4588bd931f24d922 -C extra-filename=-4588bd931f24d922 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --cap-lints allow`
604 Compiling num_enum v0.7.0
605 Running `rustc --crate-name num_enum --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num_enum-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=9cd7a6d9dcf1dd5a -C extra-filename=-9cd7a6d9dcf1dd5a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_enum_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum_derive-2bef02e01d5b06d3.so --cap-lints allow`
606 Compiling opaque-debug v0.3.0
607 Running `rustc --crate-name opaque_debug --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opaque-debug-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bdee85e4e8a367bc -C extra-filename=-bdee85e4e8a367bc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
608 Compiling polyval v0.6.0
609 Compiling aes v0.8.2
610 Running `rustc --crate-name polyval --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/polyval-0.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4b47d7c1f31f998 -C extra-filename=-e4b47d7c1f31f998 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --extern opaque_debug=/work/oxidecomputer/crucible/target/debug/deps/libopaque_debug-bdee85e4e8a367bc.rmeta --extern universal_hash=/work/oxidecomputer/crucible/target/debug/deps/libuniversal_hash-4588bd931f24d922.rmeta --cap-lints allow`
611 Running `rustc --crate-name aes --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aes-0.8.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e0592e8e1d03d860 -C extra-filename=-e0592e8e1d03d860 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --extern cpufeatures=/work/oxidecomputer/crucible/target/debug/deps/libcpufeatures-6e25ef20a8fa218f.rmeta --cap-lints allow`
612 Compiling ctr v0.9.2
613 Running `rustc --crate-name ctr --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ctr-0.9.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=583e100d27882194 -C extra-filename=-583e100d27882194 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --cap-lints allow`
614 Compiling crucible-client-types v0.1.0 (/work/oxidecomputer/crucible/crucible-client-types)
615 Running `rustc --crate-name crucible_client_types --edition=2021 crucible-client-types/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bd54c4335d2370bd -C extra-filename=-bd54c4335d2370bd --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta`
616 Compiling aead v0.5.1
617 Running `rustc --crate-name aead --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aead-0.5.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="getrandom"' --cfg 'feature="rand_core"' -C metadata=f30da292094eb963 -C extra-filename=-f30da292094eb963 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crypto_common=/work/oxidecomputer/crucible/target/debug/deps/libcrypto_common-0953bfc5dcef84b9.rmeta --extern generic_array=/work/oxidecomputer/crucible/target/debug/deps/libgeneric_array-bc31b01978a602e7.rmeta --cap-lints allow`
618 Compiling zeroize v1.3.0
619 Running `rustc --crate-name zeroize --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/zeroize-1.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8e99cf4813483d58 -C extra-filename=-8e99cf4813483d58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
620 Compiling itertools v0.11.0
621 Running `rustc --crate-name itertools --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itertools-0.11.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_alloc"' --cfg 'feature="use_std"' -C metadata=b06e69badd72e55c -C extra-filename=-b06e69badd72e55c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --cap-lints allow`
622 Compiling async-recursion v1.0.5
623 Running `rustc --crate-name async_recursion --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/async-recursion-1.0.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=ce9499495a1cb858 -C extra-filename=-ce9499495a1cb858 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
624 Compiling ringbuffer v0.14.2
625 Running `rustc --crate-name ringbuffer --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ringbuffer-0.14.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' -C metadata=e91d75e4694e6351 -C extra-filename=-e91d75e4694e6351 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
626 Compiling aes-gcm-siv v0.11.1
627 Running `rustc --crate-name aes_gcm_siv --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/aes-gcm-siv-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="aes"' --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="getrandom"' -C metadata=21495b616a07c9a4 -C extra-filename=-21495b616a07c9a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aead=/work/oxidecomputer/crucible/target/debug/deps/libaead-f30da292094eb963.rmeta --extern aes=/work/oxidecomputer/crucible/target/debug/deps/libaes-e0592e8e1d03d860.rmeta --extern cipher=/work/oxidecomputer/crucible/target/debug/deps/libcipher-fe5b83e4e2b73d72.rmeta --extern ctr=/work/oxidecomputer/crucible/target/debug/deps/libctr-583e100d27882194.rmeta --extern polyval=/work/oxidecomputer/crucible/target/debug/deps/libpolyval-e4b47d7c1f31f998.rmeta --extern subtle=/work/oxidecomputer/crucible/target/debug/deps/libsubtle-ee0c8d46ce57336b.rmeta --extern zeroize=/work/oxidecomputer/crucible/target/debug/deps/libzeroize-8e99cf4813483d58.rmeta --cap-lints allow`
628 Compiling unicode-segmentation v1.10.0
629 Running `rustc --crate-name unicode_segmentation --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicode-segmentation-1.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=06176721b7b95955 -C extra-filename=-06176721b7b95955 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
630 Compiling console v0.15.5
631 Running `rustc --crate-name console --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/console-0.15.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="ansi-parsing"' --cfg 'feature="default"' --cfg 'feature="unicode-width"' -C metadata=4236472a6e29ce0a -C extra-filename=-4236472a6e29ce0a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
632 Compiling opentelemetry_api v0.18.0
633 Running `rustc --crate-name opentelemetry_api --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_api-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fnv"' --cfg 'feature="metrics"' --cfg 'feature="pin-project-lite"' --cfg 'feature="trace"' -C metadata=54be0f0e21b05b9c -C extra-filename=-54be0f0e21b05b9c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
634 Compiling dashmap v5.2.0
635 Running `rustc --crate-name dashmap --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dashmap-5.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="send_guard"' -C metadata=e6d3c88fce252f4a -C extra-filename=-e6d3c88fce252f4a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --cap-lints allow`
636 Compiling typify-macro v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
637 Running `rustc --crate-name typify_macro --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify-macro/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=f2da351574d5abd0 -C extra-filename=-f2da351574d5abd0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern typify_impl=/work/oxidecomputer/crucible/target/debug/deps/libtypify_impl-5385296f2ea50467.rlib --extern proc_macro --cap-lints allow`
638 Compiling newline-converter v0.3.0
639 Running `rustc --crate-name newline_converter --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/newline-converter-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=54456d7e7e0ff19b -C extra-filename=-54456d7e7e0ff19b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicode_segmentation=/work/oxidecomputer/crucible/target/debug/deps/libunicode_segmentation-06176721b7b95955.rmeta --cap-lints allow`
640 Compiling num-derive v0.4.0
641 Running `rustc --crate-name num_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/num-derive-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=814c8a0a0a713cba -C extra-filename=-814c8a0a0a713cba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
642 Compiling convert_case v0.5.0
643 Running `rustc --crate-name convert_case --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/convert_case-0.5.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=1b3c1b1b6d16787f -C extra-filename=-1b3c1b1b6d16787f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
644 Compiling similar v2.2.1
645 Running `rustc --crate-name similar --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/similar-2.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="text"' -C metadata=c5a411995c7e1b53 -C extra-filename=-c5a411995c7e1b53 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
646 Compiling opentelemetry_sdk v0.18.0
647 Running `rustc --crate-name opentelemetry_sdk --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_sdk-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="async-trait"' --cfg 'feature="crossbeam-channel"' --cfg 'feature="dashmap"' --cfg 'feature="default"' --cfg 'feature="fnv"' --cfg 'feature="metrics"' --cfg 'feature="percent-encoding"' --cfg 'feature="rand"' --cfg 'feature="trace"' -C metadata=f1be95b27d4b99f6 -C extra-filename=-f1be95b27d4b99f6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern dashmap=/work/oxidecomputer/crucible/target/debug/deps/libdashmap-e6d3c88fce252f4a.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-54be0f0e21b05b9c.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
648 Compiling expectorate v1.0.7
649 Running `rustc --crate-name expectorate --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/expectorate-1.0.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4e45b262baa473cc -C extra-filename=-4e45b262baa473cc --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern console=/work/oxidecomputer/crucible/target/debug/deps/libconsole-4236472a6e29ce0a.rmeta --extern newline_converter=/work/oxidecomputer/crucible/target/debug/deps/libnewline_converter-54456d7e7e0ff19b.rmeta --extern similar=/work/oxidecomputer/crucible/target/debug/deps/libsimilar-c5a411995c7e1b53.rmeta --cap-lints allow`
650 Compiling openapi-lint v0.1.0 (https://github.com/oxidecomputer/openapi-lint?branch=main#9e3ada82)
651 Running `rustc --crate-name openapi_lint --edition=2018 /home/build/.cargo/git/checkouts/openapi-lint-42a90d71bc44c8c6/9e3ada8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=848bb99097a9a843 -C extra-filename=-848bb99097a9a843 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern convert_case=/work/oxidecomputer/crucible/target/debug/deps/libconvert_case-1b3c1b1b6d16787f.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --cap-lints allow`
652 Compiling unicase v2.6.0
653 Running `rustc --crate-name unicase /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unicase-2.6.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=60e4f37ecb23afe1 -C extra-filename=-60e4f37ecb23afe1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg __unicase__iter_cmp --cfg __unicase__default_hasher --cfg __unicase__const_fns --cfg __unicase__core_and_alloc`
654 Compiling urlencoding v2.1.2
655 Running `rustc --crate-name urlencoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/urlencoding-2.1.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=76c058d20df73e40 -C extra-filename=-76c058d20df73e40 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
656 Compiling overload v0.1.1
657 Running `rustc --crate-name overload --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/overload-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c7048b1d029106d7 -C extra-filename=-c7048b1d029106d7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
658 Compiling nu-ansi-term v0.46.0
659 Running `rustc --crate-name nu_ansi_term --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nu-ansi-term-0.46.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=91641c4d652bb213 -C extra-filename=-91641c4d652bb213 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern overload=/work/oxidecomputer/crucible/target/debug/deps/liboverload-c7048b1d029106d7.rmeta --cap-lints allow`
660 Compiling opentelemetry_api v0.20.0
661 Running `rustc --crate-name opentelemetry_api --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_api-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="pin-project-lite"' --cfg 'feature="trace"' -C metadata=04e099a48329a859 -C extra-filename=-04e099a48329a859 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern urlencoding=/work/oxidecomputer/crucible/target/debug/deps/liburlencoding-76c058d20df73e40.rmeta --cap-lints allow`
662 Compiling crucible-smf v0.0.0 (/work/oxidecomputer/crucible/smf)
663 Running `rustc --crate-name crucible_smf --edition=2021 smf/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e1a82c6f17385dc6 -C extra-filename=-e1a82c6f17385dc6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta`
664 Compiling opentelemetry v0.18.0
665 Running `rustc --crate-name opentelemetry --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="metrics"' --cfg 'feature="trace"' -C metadata=f41372bdd269a545 -C extra-filename=-f41372bdd269a545 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-54be0f0e21b05b9c.rmeta --extern opentelemetry_sdk=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_sdk-f1be95b27d4b99f6.rmeta --cap-lints allow`
666 Compiling mime_guess v2.0.4
667 Running `rustc --crate-name build_script_build /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime_guess-2.0.4/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no --cfg 'feature="default"' --cfg 'feature="rev-mappings"' -C metadata=dbd0371f2a220632 -C extra-filename=-dbd0371f2a220632 --out-dir /work/oxidecomputer/crucible/target/debug/build/mime_guess-dbd0371f2a220632 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern unicase=/work/oxidecomputer/crucible/target/debug/deps/libunicase-60e4f37ecb23afe1.rlib --cap-lints allow`
668 Compiling tracing-log v0.1.3
669 Running `rustc --crate-name tracing_log --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-log-0.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="log-tracer"' --cfg 'feature="std"' -C metadata=d77683210f5d223d -C extra-filename=-d77683210f5d223d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --cap-lints allow`
670 Compiling sharded-slab v0.1.4
671 Running `rustc --crate-name sharded_slab --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sharded-slab-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9ba6a384880b5495 -C extra-filename=-9ba6a384880b5495 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --cap-lints allow`
672 Compiling ordered-float v3.7.0
673 Running `rustc --crate-name ordered_float --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ordered-float-3.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=c435708391037060 -C extra-filename=-c435708391037060 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
674 Compiling ordered-float v1.1.1
675 Running `rustc --crate-name ordered_float /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ordered-float-1.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=b7d47a63e5afa582 -C extra-filename=-b7d47a63e5afa582 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --cap-lints allow`
676 Compiling threadpool v1.8.1
677 Running `rustc --crate-name threadpool /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/threadpool-1.8.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=e4e99e14eaa48b3e -C extra-filename=-e4e99e14eaa48b3e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --cap-lints allow`
678 Compiling integer-encoding v3.0.3
679 Running `rustc --crate-name integer_encoding --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/integer-encoding-3.0.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=d6f198d882d03cc4 -C extra-filename=-d6f198d882d03cc4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
680 Running `/work/oxidecomputer/crucible/target/debug/build/mime_guess-dbd0371f2a220632/build-script-build`
681 Compiling opentelemetry_sdk v0.20.0
682 Running `rustc --crate-name opentelemetry_sdk --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry_sdk-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="async-trait"' --cfg 'feature="crossbeam-channel"' --cfg 'feature="default"' --cfg 'feature="percent-encoding"' --cfg 'feature="rand"' --cfg 'feature="trace"' -C metadata=6fef3b11a1793dc3 -C extra-filename=-6fef3b11a1793dc3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern futures_channel=/work/oxidecomputer/crucible/target/debug/deps/libfutures_channel-34a7a018f58dc5a7.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-04e099a48329a859.rmeta --extern ordered_float=/work/oxidecomputer/crucible/target/debug/deps/libordered_float-c435708391037060.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --cap-lints allow`
683 Compiling opentelemetry-semantic-conventions v0.10.0
684 Running `rustc --crate-name opentelemetry_semantic_conventions --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-semantic-conventions-0.10.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=18113e5418aec521 -C extra-filename=-18113e5418aec521 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --cap-lints allow`
685 Compiling thrift v0.16.0
686 Running `rustc --crate-name thrift --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/thrift-0.16.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="server"' --cfg 'feature="threadpool"' -C metadata=1fac1fb5888ad036 -C extra-filename=-1fac1fb5888ad036 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern integer_encoding=/work/oxidecomputer/crucible/target/debug/deps/libinteger_encoding-d6f198d882d03cc4.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern ordered_float=/work/oxidecomputer/crucible/target/debug/deps/libordered_float-b7d47a63e5afa582.rmeta --extern threadpool=/work/oxidecomputer/crucible/target/debug/deps/libthreadpool-e4e99e14eaa48b3e.rmeta --cap-lints allow`
687 Compiling tracing-subscriber v0.3.17
688 Running `rustc --crate-name tracing_subscriber --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-subscriber-0.3.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="ansi"' --cfg 'feature="default"' --cfg 'feature="fmt"' --cfg 'feature="nu-ansi-term"' --cfg 'feature="registry"' --cfg 'feature="sharded-slab"' --cfg 'feature="smallvec"' --cfg 'feature="std"' --cfg 'feature="thread_local"' --cfg 'feature="tracing-log"' -C metadata=e39dae5ba339bc78 -C extra-filename=-e39dae5ba339bc78 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern nu_ansi_term=/work/oxidecomputer/crucible/target/debug/deps/libnu_ansi_term-91641c4d652bb213.rmeta --extern sharded_slab=/work/oxidecomputer/crucible/target/debug/deps/libsharded_slab-9ba6a384880b5495.rmeta --extern smallvec=/work/oxidecomputer/crucible/target/debug/deps/libsmallvec-397f26bd8c84e528.rmeta --extern thread_local=/work/oxidecomputer/crucible/target/debug/deps/libthread_local-771d112d9219dc9f.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --extern tracing_log=/work/oxidecomputer/crucible/target/debug/deps/libtracing_log-d77683210f5d223d.rmeta --cap-lints allow`
689 Compiling phf_shared v0.11.1
690 Running `rustc --crate-name phf_shared --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_shared-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=f4c611c5b95ef91c -C extra-filename=-f4c611c5b95ef91c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern siphasher=/work/oxidecomputer/crucible/target/debug/deps/libsiphasher-2eb27360f66646f3.rmeta --cap-lints allow`
691 Compiling typify v0.0.13 (https://github.com/oxidecomputer/typify#92bfed8b)
692 Running `rustc --crate-name typify --edition=2021 /home/build/.cargo/git/checkouts/typify-288d5a84bbbe6a46/92bfed8/typify/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="macro"' --cfg 'feature="typify-macro"' -C metadata=60072fbcea665837 -C extra-filename=-60072fbcea665837 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern typify_impl=/work/oxidecomputer/crucible/target/debug/deps/libtypify_impl-5385296f2ea50467.rmeta --extern typify_macro=/work/oxidecomputer/crucible/target/debug/deps/libtypify_macro-f2da351574d5abd0.so --cap-lints allow`
693 Compiling progenitor-impl v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
694 Running `rustc --crate-name progenitor_impl --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-impl/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4eeb9d4c0f08c454 -C extra-filename=-4eeb9d4c0f08c454 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern getopts=/work/oxidecomputer/crucible/target/debug/deps/libgetopts-93a8419d37acce69.rmeta --extern heck=/work/oxidecomputer/crucible/target/debug/deps/libheck-29af75c938b110f7.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern typify=/work/oxidecomputer/crucible/target/debug/deps/libtypify-60072fbcea665837.rmeta --extern unicode_ident=/work/oxidecomputer/crucible/target/debug/deps/libunicode_ident-81e7752fff89e70f.rmeta --cap-lints allow`
695 Running `rustc --crate-name mime_guess /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/mime_guess-2.0.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rev-mappings"' -C metadata=66974d6c31968dc2 -C extra-filename=-66974d6c31968dc2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern mime=/work/oxidecomputer/crucible/target/debug/deps/libmime-ac14a9115eddd3c2.rmeta --extern unicase=/work/oxidecomputer/crucible/target/debug/deps/libunicase-60e4f37ecb23afe1.rmeta --cap-lints allow`
696 Compiling opentelemetry v0.20.0
697 Running `rustc --crate-name opentelemetry --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-0.20.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="trace"' -C metadata=5524fd7817ad57fb -C extra-filename=-5524fd7817ad57fb --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern opentelemetry_api=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_api-04e099a48329a859.rmeta --extern opentelemetry_sdk=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_sdk-6fef3b11a1793dc3.rmeta --cap-lints allow`
698 Compiling opentelemetry-jaeger v0.17.0
699 Running `rustc --crate-name opentelemetry_jaeger --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/opentelemetry-jaeger-0.17.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=9ebf47742e5e063f -C extra-filename=-9ebf47742e5e063f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_executor=/work/oxidecomputer/crucible/target/debug/deps/libfutures_executor-61f53162107ffb32.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --extern opentelemetry_semantic_conventions=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_semantic_conventions-18113e5418aec521.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern thrift=/work/oxidecomputer/crucible/target/debug/deps/libthrift-1fac1fb5888ad036.rmeta --cap-lints allow`
700 Compiling signal-hook v0.3.17
701 Running `rustc --crate-name signal_hook --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-0.3.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="channel"' --cfg 'feature="default"' --cfg 'feature="iterator"' -C metadata=ae206b38743b6815 -C extra-filename=-ae206b38743b6815 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern signal_hook_registry=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_registry-4d955479f235827e.rmeta --cap-lints allow`
702 Compiling http-range v0.1.5
703 Running `rustc --crate-name http_range --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/http-range-0.1.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=4885ae91fe1ea75f -C extra-filename=-4885ae91fe1ea75f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
704 Compiling memoffset v0.6.5
705 Running `rustc --crate-name memoffset /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/memoffset-0.6.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=508ecbf92309dc73 -C extra-filename=-508ecbf92309dc73 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg tuple_ty --cfg allow_clippy --cfg maybe_uninit --cfg doctests --cfg raw_ref_macros`
706 Compiling tracing-opentelemetry v0.18.0
707 Running `rustc --crate-name tracing_opentelemetry --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tracing-opentelemetry-0.18.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="metrics"' --cfg 'feature="tracing-log"' -C metadata=8f8cfc1900c3a663 -C extra-filename=-8f8cfc1900c3a663 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-f41372bdd269a545.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern tracing_core=/work/oxidecomputer/crucible/target/debug/deps/libtracing_core-adac59f754126e83.rmeta --extern tracing_log=/work/oxidecomputer/crucible/target/debug/deps/libtracing_log-d77683210f5d223d.rmeta --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rmeta --cap-lints allow`
708 Compiling hyper-staticfile v0.9.4
709 Running `rustc --crate-name hyper_staticfile --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-staticfile-0.9.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=559b4389ef952563 -C extra-filename=-559b4389ef952563 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_range=/work/oxidecomputer/crucible/target/debug/deps/libhttp_range-4885ae91fe1ea75f.rmeta --extern httpdate=/work/oxidecomputer/crucible/target/debug/deps/libhttpdate-e3ef82a990113a54.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --cap-lints allow`
710 Compiling phf_generator v0.11.1
711 Running `rustc --crate-name phf_generator --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_generator-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=41d436cb237437bf -C extra-filename=-41d436cb237437bf --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --cap-lints allow`
712 Compiling phf_codegen v0.11.1
713 Running `rustc --crate-name phf_codegen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf_codegen-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=f8063558873b33a1 -C extra-filename=-f8063558873b33a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_generator=/work/oxidecomputer/crucible/target/debug/deps/libphf_generator-41d436cb237437bf.rmeta --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --cap-lints allow`
714 Compiling crossbeam-epoch v0.9.8
715 Running `rustc --crate-name crossbeam_epoch --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-epoch-0.9.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="lazy_static"' --cfg 'feature="std"' -C metadata=44fc1cc817fcdd23 -C extra-filename=-44fc1cc817fcdd23 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern memoffset=/work/oxidecomputer/crucible/target/debug/deps/libmemoffset-508ecbf92309dc73.rmeta --extern scopeguard=/work/oxidecomputer/crucible/target/debug/deps/libscopeguard-00b7ece4eb7b8e7e.rmeta --cap-lints allow --cfg crossbeam_const_fn_trait_bound`
716 Compiling crossbeam-deque v0.8.1
717 Running `rustc --crate-name crossbeam_deque --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossbeam-deque-0.8.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="crossbeam-epoch"' --cfg 'feature="crossbeam-utils"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=46df68c0a92e69f5 -C extra-filename=-46df68c0a92e69f5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern crossbeam_epoch=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_epoch-44fc1cc817fcdd23.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --cap-lints allow`
718 Compiling terminfo v0.8.0
719 Running `rustc --crate-name build_script_build --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminfo-0.8.0/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=bed6d7dcce5eac66 -C extra-filename=-bed6d7dcce5eac66 --out-dir /work/oxidecomputer/crucible/target/debug/build/terminfo-bed6d7dcce5eac66 -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_codegen=/work/oxidecomputer/crucible/target/debug/deps/libphf_codegen-f8063558873b33a1.rlib --cap-lints allow`
720 Compiling io-lifetimes v0.5.3
721 Running `rustc --crate-name io_lifetimes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/io-lifetimes-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=59f01ff60b1f438f -C extra-filename=-59f01ff60b1f438f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
722 Compiling signal-hook-mio v0.2.3
723 Running `rustc --crate-name signal_hook_mio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-mio-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="mio-0_8"' --cfg 'feature="support-v0_8"' -C metadata=b78bc232ff08be19 -C extra-filename=-b78bc232ff08be19 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio_0_8=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --cap-lints allow`
724 Compiling structmeta-derive v0.2.0
725 Running `rustc --crate-name structmeta_derive --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-derive-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=6868cfc37bbab5f5 -C extra-filename=-6868cfc37bbab5f5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
726 Compiling vte_generate_state_changes v0.1.1
727 Running `rustc --crate-name vte_generate_state_changes --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vte_generate_state_changes-0.1.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=52ea4d6c5be5fc21 -C extra-filename=-52ea4d6c5be5fc21 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern proc_macro --cap-lints allow`
728 Compiling subprocess v0.2.9
729 Running `rustc --crate-name subprocess --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/subprocess-0.2.9/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0acfc5c9b903588a -C extra-filename=-0acfc5c9b903588a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
730 Compiling dirs-sys v0.3.7
731 Running `rustc --crate-name dirs_sys /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-sys-0.3.7/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=15239ab4fd89ac89 -C extra-filename=-15239ab4fd89ac89 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
732 Compiling wait-timeout v0.2.0
733 Running `rustc --crate-name wait_timeout /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/wait-timeout-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=509e2becba2710d2 -C extra-filename=-509e2becba2710d2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
734 Compiling adler v1.0.2
735 Running `rustc --crate-name adler /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/adler-1.0.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=de7aee9f30a32373 -C extra-filename=-de7aee9f30a32373 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
736 Compiling quick-error v1.2.3
737 Running `rustc --crate-name quick_error /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/quick-error-1.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c8c4727ecc8bb914 -C extra-filename=-c8c4727ecc8bb914 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
738 Compiling minimal-lexical v0.2.1
739 Running `rustc --crate-name minimal_lexical --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/minimal-lexical-0.2.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=fec5e2c45ccf032b -C extra-filename=-fec5e2c45ccf032b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
740 Compiling regex-automata v0.1.10
741 Running `rustc --crate-name regex_automata /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/regex-automata-0.1.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=054d4c6c4feb4142 -C extra-filename=-054d4c6c4feb4142 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
742 Compiling bit-vec v0.6.3
743 Running `rustc --crate-name bit_vec /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bit-vec-0.6.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="std"' -C metadata=35546b1439b435ea -C extra-filename=-35546b1439b435ea --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
744 Compiling rusty-fork v0.3.0
745 Running `rustc --crate-name rusty_fork --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rusty-fork-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="timeout"' --cfg 'feature="wait-timeout"' -C metadata=20bbf553eecf6baa -C extra-filename=-20bbf553eecf6baa --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern quick_error=/work/oxidecomputer/crucible/target/debug/deps/libquick_error-c8c4727ecc8bb914.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern wait_timeout=/work/oxidecomputer/crucible/target/debug/deps/libwait_timeout-509e2becba2710d2.rmeta --cap-lints allow`
746 Compiling miniz_oxide v0.6.2
747 Running `rustc --crate-name miniz_oxide --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/miniz_oxide-0.6.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="with-alloc"' -C metadata=f7bade0e2b1a9da4 -C extra-filename=-f7bade0e2b1a9da4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern adler=/work/oxidecomputer/crucible/target/debug/deps/libadler-de7aee9f30a32373.rmeta --cap-lints allow`
748 Compiling bstr v0.2.17
749 Running `rustc --crate-name bstr --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bstr-0.2.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="lazy_static"' --cfg 'feature="regex-automata"' --cfg 'feature="std"' --cfg 'feature="unicode"' -C metadata=f0785d87dddebfcd -C extra-filename=-f0785d87dddebfcd --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern regex_automata=/work/oxidecomputer/crucible/target/debug/deps/libregex_automata-054d4c6c4feb4142.rmeta --cap-lints allow`
750 Compiling bit-set v0.5.3
751 Running `rustc --crate-name bit_set /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/bit-set-0.5.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=ec12e43f0b47413c -C extra-filename=-ec12e43f0b47413c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bit_vec=/work/oxidecomputer/crucible/target/debug/deps/libbit_vec-35546b1439b435ea.rmeta --cap-lints allow`
752 Compiling nom v7.1.3
753 Running `rustc --crate-name nom --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nom-7.1.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="std"' -C metadata=7d7f045245d4ae57 -C extra-filename=-7d7f045245d4ae57 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --extern minimal_lexical=/work/oxidecomputer/crucible/target/debug/deps/libminimal_lexical-fec5e2c45ccf032b.rmeta --cap-lints allow`
754 Running `/work/oxidecomputer/crucible/target/debug/build/terminfo-bed6d7dcce5eac66/build-script-build`
755 Compiling vte v0.11.1
756 Running `rustc --crate-name vte --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vte-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=66f6ba7b673381f9 -C extra-filename=-66f6ba7b673381f9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern utf8parse=/work/oxidecomputer/crucible/target/debug/deps/libutf8parse-7be01c1e08c14495.rmeta --extern vte_generate_state_changes=/work/oxidecomputer/crucible/target/debug/deps/libvte_generate_state_changes-52ea4d6c5be5fc21.so --cap-lints allow`
757 Compiling dirs v4.0.0
758 Running `rustc --crate-name dirs /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/dirs-4.0.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=b6e400c266a3b937 -C extra-filename=-b6e400c266a3b937 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs_sys=/work/oxidecomputer/crucible/target/debug/deps/libdirs_sys-15239ab4fd89ac89.rmeta --cap-lints allow`
759 Compiling rustix v0.33.4
760 Running `rustc --crate-name rustix --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustix-0.33.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="io-lifetimes"' --cfg 'feature="std"' -C metadata=f2ea1fb682f42c58 -C extra-filename=-f2ea1fb682f42c58 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern errno=/work/oxidecomputer/crucible/target/debug/deps/liberrno-af3769d2acbbbf20.rmeta --extern io_lifetimes=/work/oxidecomputer/crucible/target/debug/deps/libio_lifetimes-59f01ff60b1f438f.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow --cfg libc`
761 Compiling rayon-core v1.9.2
762 Running `rustc --crate-name rayon_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-core-1.9.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=000ce3790ebea6a1 -C extra-filename=-000ce3790ebea6a1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern crossbeam_deque=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_deque-46df68c0a92e69f5.rmeta --extern crossbeam_utils=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_utils-a1fb255bfa31483a.rmeta --extern num_cpus=/work/oxidecomputer/crucible/target/debug/deps/libnum_cpus-67a451bebfcc5086.rmeta --cap-lints allow`
763 Compiling structmeta v0.2.0
764 Running `rustc --crate-name structmeta --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/structmeta-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=3c927247a3e97db4 -C extra-filename=-3c927247a3e97db4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rmeta --extern structmeta_derive=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta_derive-6868cfc37bbab5f5.so --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rmeta --cap-lints allow`
765 Compiling crc32fast v1.3.2
766 Running `rustc --crate-name crc32fast /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crc32fast-1.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=bbced466bd6966b3 -C extra-filename=-bbced466bd6966b3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --cap-lints allow --cfg crc32fast_stdarchx86`
767 Compiling phf v0.11.1
768 Running `rustc --crate-name phf --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/phf-0.11.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=26f57b9bc310a574 -C extra-filename=-26f57b9bc310a574 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern phf_shared=/work/oxidecomputer/crucible/target/debug/deps/libphf_shared-f4c611c5b95ef91c.rmeta --cap-lints allow`
769 Compiling tokio-stream v0.1.8
770 Compiling rand_xorshift v0.3.0
771 Running `rustc --crate-name tokio_stream --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-stream-0.1.8/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="time"' -C metadata=4c3a15ae8dcea990 -C extra-filename=-4c3a15ae8dcea990 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
772 Running `rustc --crate-name rand_xorshift --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rand_xorshift-0.3.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a099defc127d0ae4 -C extra-filename=-a099defc127d0ae4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rand_core=/work/oxidecomputer/crucible/target/debug/deps/librand_core-e2870cc0e01c33c9.rmeta --cap-lints allow`
773 Compiling xattr v0.2.2
774 Running `rustc --crate-name xattr /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/xattr-0.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="unsupported"' -C metadata=70407947526a7a4b -C extra-filename=-70407947526a7a4b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
775 Compiling filetime v0.2.17
776 Running `rustc --crate-name filetime --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/filetime-0.2.17/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=337368c6d4c995d8 -C extra-filename=-337368c6d4c995d8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
777 Compiling csv-core v0.1.10
778 Running `rustc --crate-name csv_core --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/csv-core-0.1.10/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=6a47c452f1df15e2 -C extra-filename=-6a47c452f1df15e2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern memchr=/work/oxidecomputer/crucible/target/debug/deps/libmemchr-9611c546f9b73cca.rmeta --cap-lints allow`
779 Compiling unarray v0.1.4
780 Running `rustc --crate-name unarray --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/unarray-0.1.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=bb988de2629d2530 -C extra-filename=-bb988de2629d2530 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
781 Compiling same-file v1.0.6
782 Running `rustc --crate-name same_file --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/same-file-1.0.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=b7a6da1044557b8b -C extra-filename=-b7a6da1044557b8b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
783 Compiling proptest v1.2.0
784 Running `rustc --crate-name proptest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/proptest-1.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bit-set"' --cfg 'feature="break-dead-code"' --cfg 'feature="default"' --cfg 'feature="fork"' --cfg 'feature="lazy_static"' --cfg 'feature="regex-syntax"' --cfg 'feature="rusty-fork"' --cfg 'feature="std"' --cfg 'feature="tempfile"' --cfg 'feature="timeout"' -C metadata=327f7f2cf6858f27 -C extra-filename=-327f7f2cf6858f27 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bit_set=/work/oxidecomputer/crucible/target/debug/deps/libbit_set-ec12e43f0b47413c.rmeta --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern rand_xorshift=/work/oxidecomputer/crucible/target/debug/deps/librand_xorshift-a099defc127d0ae4.rmeta --extern regex_syntax=/work/oxidecomputer/crucible/target/debug/deps/libregex_syntax-b9593ef3338880de.rmeta --extern rusty_fork=/work/oxidecomputer/crucible/target/debug/deps/librusty_fork-20bbf553eecf6baa.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern unarray=/work/oxidecomputer/crucible/target/debug/deps/libunarray-bb988de2629d2530.rmeta --cap-lints allow`
785 Compiling walkdir v2.3.2
786 Running `rustc --crate-name walkdir --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/walkdir-2.3.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5232f739d2ba1b5e -C extra-filename=-5232f739d2ba1b5e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern same_file=/work/oxidecomputer/crucible/target/debug/deps/libsame_file-b7a6da1044557b8b.rmeta --cap-lints allow`
787 Compiling csv v1.2.2
788 Running `rustc --crate-name csv --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/csv-1.2.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=187f0e890389cec3 -C extra-filename=-187f0e890389cec3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern csv_core=/work/oxidecomputer/crucible/target/debug/deps/libcsv_core-6a47c452f1df15e2.rmeta --extern itoa=/work/oxidecomputer/crucible/target/debug/deps/libitoa-f0781104e344570e.rmeta --extern ryu=/work/oxidecomputer/crucible/target/debug/deps/libryu-062e5ac4087417b3.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --cap-lints allow`
789 Compiling tokio-test v0.4.2
790 Running `rustc --crate-name tokio_test --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-test-0.4.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=12a28be646ff63e6 -C extra-filename=-12a28be646ff63e6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream-0486f21173e73f9c.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_stream=/work/oxidecomputer/crucible/target/debug/deps/libtokio_stream-4c3a15ae8dcea990.rmeta --cap-lints allow`
791 Running `rustc --crate-name terminfo --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/terminfo-0.8.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cd93b6cd14f79089 -C extra-filename=-cd93b6cd14f79089 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern dirs=/work/oxidecomputer/crucible/target/debug/deps/libdirs-b6e400c266a3b937.rmeta --extern fnv=/work/oxidecomputer/crucible/target/debug/deps/libfnv-5641130f60a8056b.rmeta --extern nom=/work/oxidecomputer/crucible/target/debug/deps/libnom-7d7f045245d4ae57.rmeta --extern phf=/work/oxidecomputer/crucible/target/debug/deps/libphf-26f57b9bc310a574.rmeta --cap-lints allow`
792 Compiling fd-lock v3.0.4
793 Running `rustc --crate-name fd_lock --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/fd-lock-3.0.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=dd6f5c85295045f7 -C extra-filename=-dd6f5c85295045f7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern cfg_if=/work/oxidecomputer/crucible/target/debug/deps/libcfg_if-a93276574f0edf39.rmeta --extern rustix=/work/oxidecomputer/crucible/target/debug/deps/librustix-f2ea1fb682f42c58.rmeta --cap-lints allow`
794 Compiling tar v0.4.38
795 Running `rustc --crate-name tar --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tar-0.4.38/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="xattr"' -C metadata=b33bc6012d78be3d -C extra-filename=-b33bc6012d78be3d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern filetime=/work/oxidecomputer/crucible/target/debug/deps/libfiletime-337368c6d4c995d8.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern xattr=/work/oxidecomputer/crucible/target/debug/deps/libxattr-70407947526a7a4b.rmeta --cap-lints allow`
796 Compiling strip-ansi-escapes v0.2.0
797 Running `rustc --crate-name strip_ansi_escapes /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/strip-ansi-escapes-0.2.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8090d3de2e6bf9be -C extra-filename=-8090d3de2e6bf9be --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern vte=/work/oxidecomputer/crucible/target/debug/deps/libvte-66f6ba7b673381f9.rmeta --cap-lints allow`
798 Compiling rayon v1.5.2
799 Running `rustc --crate-name rayon --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rayon-1.5.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=f2d40ba22c8b185e -C extra-filename=-f2d40ba22c8b185e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crossbeam_deque=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_deque-46df68c0a92e69f5.rmeta --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --extern rayon_core=/work/oxidecomputer/crucible/target/debug/deps/librayon_core-000ce3790ebea6a1.rmeta --cap-lints allow --cfg has_step_by_rev --cfg has_min_const_generics --cfg has_control_flow`
800 Compiling test-strategy v0.3.1
801 Running `rustc --crate-name test_strategy --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/test-strategy-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=5eb6b90d55d9f739 -C extra-filename=-5eb6b90d55d9f739 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern structmeta=/work/oxidecomputer/crucible/target/debug/deps/libstructmeta-3c927247a3e97db4.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
802 Compiling flate2 v1.0.25
803 Running `rustc --crate-name flate2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/flate2-1.0.25/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="miniz_oxide"' --cfg 'feature="rust_backend"' -C metadata=e91a1b496d4e6ad4 -C extra-filename=-e91a1b496d4e6ad4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crc32fast=/work/oxidecomputer/crucible/target/debug/deps/libcrc32fast-bbced466bd6966b3.rmeta --extern miniz_oxide=/work/oxidecomputer/crucible/target/debug/deps/libminiz_oxide-f7bade0e2b1a9da4.rmeta --cap-lints allow`
804 Compiling httptest v0.15.4
805 Running `rustc --crate-name httptest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/httptest-0.15.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=174da737d96e2af6 -C extra-filename=-174da737d96e2af6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bstr=/work/oxidecomputer/crucible/target/debug/deps/libbstr-f0785d87dddebfcd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern crossbeam_channel=/work/oxidecomputer/crucible/target/debug/deps/libcrossbeam_channel-19ff6fa486e51c63.rmeta --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern regex=/work/oxidecomputer/crucible/target/debug/deps/libregex-f9e3a4eb3da387ce.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
806 Compiling progenitor-macro v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
807 Running `rustc --crate-name progenitor_macro --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-macro/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type proc-macro --emit=dep-info,link -C prefer-dynamic -C embed-bitcode=no -C metadata=4c13a8353939c841 -C extra-filename=-4c13a8353939c841 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rlib --extern progenitor_impl=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_impl-4eeb9d4c0f08c454.rlib --extern quote=/work/oxidecomputer/crucible/target/debug/deps/libquote-2ffbaa80dd156621.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern serde_tokenstream=/work/oxidecomputer/crucible/target/debug/deps/libserde_tokenstream-2a83027836bfd0fb.rlib --extern serde_yaml=/work/oxidecomputer/crucible/target/debug/deps/libserde_yaml-c7db3f53bbf8134b.rlib --extern syn=/work/oxidecomputer/crucible/target/debug/deps/libsyn-baedf68a9175a6da.rlib --extern proc_macro --cap-lints allow`
808 Compiling portable-atomic v1.4.1
809 Running `rustc --crate-name portable_atomic --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/portable-atomic-1.4.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="fallback"' -C metadata=f0a1a94e9d6381ba -C extra-filename=-f0a1a94e9d6381ba --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow --cfg portable_atomic_llvm_16`
810 Compiling crossterm v0.26.1
811 Running `rustc --crate-name crossterm --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossterm-0.26.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bracketed-paste"' --cfg 'feature="default"' --cfg 'feature="serde"' -C metadata=ddbacbe0f657f0ff -C extra-filename=-ddbacbe0f657f0ff --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern signal_hook_mio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_mio-b78bc232ff08be19.rmeta --cap-lints allow`
812 Compiling signal-hook-tokio v0.3.1
813 Running `rustc --crate-name signal_hook_tokio --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/signal-hook-tokio-0.3.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="futures-core-0_3"' --cfg 'feature="futures-v0_3"' -C metadata=6a6b104c61918fa0 -C extra-filename=-6a6b104c61918fa0 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern futures_core_0_3=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow`
814 Compiling which v4.4.0
815 Running `rustc --crate-name which --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/which-4.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=23fb4550fe083323 -C extra-filename=-23fb4550fe083323 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --cap-lints allow`
816 Compiling itertools v0.10.5
817 Running `rustc --crate-name itertools --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/itertools-0.10.5/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="use_alloc"' --cfg 'feature="use_std"' -C metadata=09aeacd112427d42 -C extra-filename=-09aeacd112427d42 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern either=/work/oxidecomputer/crucible/target/debug/deps/libeither-e075e5427abad613.rmeta --cap-lints allow`
818 Compiling nu-ansi-term v0.49.0
819 Running `rustc --crate-name nu_ansi_term --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nu-ansi-term-0.49.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=cdbbf2d007fd1e63 -C extra-filename=-cdbbf2d007fd1e63 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
820 Compiling number_prefix v0.4.0
821 Running `rustc --crate-name number_prefix /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/number_prefix-0.4.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=be3728a6ec19cda2 -C extra-filename=-be3728a6ec19cda2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
822 Compiling utf8-width v0.1.6
823 Running `rustc --crate-name utf8_width --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/utf8-width-0.1.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=7e583d5482ac364b -C extra-filename=-7e583d5482ac364b --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --cap-lints allow`
824 Compiling indicatif v0.17.6
825 Running `rustc --crate-name indicatif --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/indicatif-0.17.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="rayon"' --cfg 'feature="unicode-width"' -C metadata=297a26a70875006e -C extra-filename=-297a26a70875006e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern console=/work/oxidecomputer/crucible/target/debug/deps/libconsole-4236472a6e29ce0a.rmeta --extern number_prefix=/work/oxidecomputer/crucible/target/debug/deps/libnumber_prefix-be3728a6ec19cda2.rmeta --extern portable_atomic=/work/oxidecomputer/crucible/target/debug/deps/libportable_atomic-f0a1a94e9d6381ba.rmeta --extern rayon=/work/oxidecomputer/crucible/target/debug/deps/librayon-f2d40ba22c8b185e.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
826 Compiling byte-unit v4.0.19
827 Running `rustc --crate-name byte_unit --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/byte-unit-4.0.19/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="serde"' --cfg 'feature="std"' --cfg 'feature="u128"' -C metadata=02cb17c857e20dac -C extra-filename=-02cb17c857e20dac --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern utf8_width=/work/oxidecomputer/crucible/target/debug/deps/libutf8_width-7e583d5482ac364b.rmeta --cap-lints allow`
828 Compiling crossterm v0.27.0
829 Running `rustc --crate-name crossterm --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/crossterm-0.27.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="bracketed-paste"' --cfg 'feature="default"' --cfg 'feature="events"' --cfg 'feature="windows"' -C metadata=3c787fd4c4d4bc45 -C extra-filename=-3c787fd4c4d4bc45 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-109244799287a8c3.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mio=/work/oxidecomputer/crucible/target/debug/deps/libmio-27a8136cf12de2bb.rmeta --extern parking_lot=/work/oxidecomputer/crucible/target/debug/deps/libparking_lot-970d5c0acece447c.rmeta --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rmeta --extern signal_hook_mio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_mio-b78bc232ff08be19.rmeta --cap-lints allow`
830 Compiling nbd v0.2.3
831 Running `rustc --crate-name nbd /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/nbd-0.2.3/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6a0c01a24def5e9a -C extra-filename=-6a0c01a24def5e9a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern byteorder=/work/oxidecomputer/crucible/target/debug/deps/libbyteorder-1306999913c8e1b3.rmeta --cap-lints allow`
832 Compiling crucible-integration-tests v0.1.0 (/work/oxidecomputer/crucible/integration_tests)
833 Running `rustc --crate-name crucible_integration_tests --edition=2021 integration_tests/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=521d4724b4b30c4a -C extra-filename=-521d4724b4b30c4a --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps`
834 Running `rustc --crate-name info --edition=2021 smf/examples/info.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 -C metadata=434b2d59a81079b5 -C extra-filename=-434b2d59a81079b5 --out-dir /work/oxidecomputer/crucible/target/debug/examples -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib`
835 Compiling clearscreen v2.0.1
836 Running `rustc --crate-name clearscreen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/clearscreen-2.0.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5e923be7ef236a41 -C extra-filename=-5e923be7ef236a41 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern terminfo=/work/oxidecomputer/crucible/target/debug/deps/libterminfo-cd93b6cd14f79089.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern which=/work/oxidecomputer/crucible/target/debug/deps/libwhich-23fb4550fe083323.rmeta --cap-lints allow`
837 Compiling reedline v0.23.0
838 Running `rustc --crate-name reedline --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/reedline-0.23.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=6e6244e0f6aa654d -C extra-filename=-6e6244e0f6aa654d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern crossterm=/work/oxidecomputer/crucible/target/debug/deps/libcrossterm-ddbacbe0f657f0ff.rmeta --extern fd_lock=/work/oxidecomputer/crucible/target/debug/deps/libfd_lock-dd6f5c85295045f7.rmeta --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-09aeacd112427d42.rmeta --extern nu_ansi_term=/work/oxidecomputer/crucible/target/debug/deps/libnu_ansi_term-cdbbf2d007fd1e63.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern strip_ansi_escapes=/work/oxidecomputer/crucible/target/debug/deps/libstrip_ansi_escapes-8090d3de2e6bf9be.rmeta --extern strum=/work/oxidecomputer/crucible/target/debug/deps/libstrum-59ea3c6704348e58.rmeta --extern strum_macros=/work/oxidecomputer/crucible/target/debug/deps/libstrum_macros-bc907f623478289d.so --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern unicode_segmentation=/work/oxidecomputer/crucible/target/debug/deps/libunicode_segmentation-06176721b7b95955.rmeta --extern unicode_width=/work/oxidecomputer/crucible/target/debug/deps/libunicode_width-87c5262a4c4bb0e9.rmeta --cap-lints allow`
839 Running `rustc --crate-name crucible_smf --edition=2021 smf/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=6114df38a9482a0c -C extra-filename=-6114df38a9482a0c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib`
840 Running `rustc --crate-name crucible_client_types --edition=2021 crucible-client-types/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=5ba9f9d411803900 -C extra-filename=-5ba9f9d411803900 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib`
841 Running `rustc --crate-name ring --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/ring-0.16.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="dev_urandom_fallback"' --cfg 'feature="once_cell"' -C metadata=76ccf829b8b489e1 -C extra-filename=-76ccf829b8b489e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern spin=/work/oxidecomputer/crucible/target/debug/deps/libspin-bfb6115ad3135235.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -l static=ring-core -l static=ring-test`
842 Compiling rustls v0.21.6
843 Running `/work/oxidecomputer/crucible/target/debug/build/rustls-0c105edc866f624d/build-script-build`
844 Compiling rustls-webpki v0.101.4
845 Running `rustc --crate-name webpki --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-webpki-0.101.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="alloc"' --cfg 'feature="default"' --cfg 'feature="std"' -C metadata=34d764dbf1af1e62 -C extra-filename=-34d764dbf1af1e62 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
846 Compiling sct v0.7.0
847 Running `rustc --crate-name sct --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/sct-0.7.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=78bb43c10db32a31 -C extra-filename=-78bb43c10db32a31 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern untrusted=/work/oxidecomputer/crucible/target/debug/deps/libuntrusted-4b93784238d33e58.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
848 Running `rustc --crate-name rustls --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/rustls-0.21.6/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="dangerous_configuration"' --cfg 'feature="default"' --cfg 'feature="log"' --cfg 'feature="logging"' --cfg 'feature="tls12"' -C metadata=3df6867cfa5c4a0a -C extra-filename=-3df6867cfa5c4a0a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern webpki=/work/oxidecomputer/crucible/target/debug/deps/libwebpki-34d764dbf1af1e62.rmeta --extern sct=/work/oxidecomputer/crucible/target/debug/deps/libsct-78bb43c10db32a31.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
849 Compiling tokio-rustls v0.24.1
850 Running `rustc --crate-name tokio_rustls --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-rustls-0.24.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' --cfg 'feature="logging"' --cfg 'feature="tls12"' -C metadata=eafe4ab74a176b7d -C extra-filename=-eafe4ab74a176b7d --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
851 Compiling hyper-rustls v0.24.0
852 Compiling dropshot v0.9.1-dev (https://github.com/oxidecomputer/dropshot?branch=main#aca6de3c)
853 Running `rustc --crate-name hyper_rustls --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/hyper-rustls-0.24.0/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=9a83fdcd4675665e -C extra-filename=-9a83fdcd4675665e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
854 Running `rustc --crate-name dropshot --edition=2018 /home/build/.cargo/git/checkouts/dropshot-a4a923d29dccc492/aca6de3/dropshot/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="usdt"' --cfg 'feature="usdt-probes"' -C metadata=a49a4505c9c6b86f -C extra-filename=-a49a4505c9c6b86f --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern async_stream=/work/oxidecomputer/crucible/target/debug/deps/libasync_stream-0486f21173e73f9c.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern debug_ignore=/work/oxidecomputer/crucible/target/debug/deps/libdebug_ignore-2303f500fcbc7093.rmeta --extern dropshot_endpoint=/work/oxidecomputer/crucible/target/debug/deps/libdropshot_endpoint-1ff3a3dd0352c250.so --extern form_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libform_urlencoded-ef731295a29c9150.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hostname=/work/oxidecomputer/crucible/target/debug/deps/libhostname-4a0f8b1a56e5681a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern indexmap=/work/oxidecomputer/crucible/target/debug/deps/libindexmap-c2c8f74266eebb64.rmeta --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rmeta --extern paste=/work/oxidecomputer/crucible/target/debug/deps/libpaste-251489637fc3d2bc.so --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern proc_macro2=/work/oxidecomputer/crucible/target/debug/deps/libproc_macro2-a0e1129b171da08c.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_path_to_error=/work/oxidecomputer/crucible/target/debug/deps/libserde_path_to_error-f8fffad8b554a310.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern sha1=/work/oxidecomputer/crucible/target/debug/deps/libsha1-8fdbd7a715f3bef1.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rmeta --extern slog_json=/work/oxidecomputer/crucible/target/debug/deps/libslog_json-d8408f8f3a6dd5b7.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --extern waitgroup=/work/oxidecomputer/crucible/target/debug/deps/libwaitgroup-db859ead02bd709e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
855 Running `rustc --crate-name libgit2_sys --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/libgit2-sys-0.15.2+1.6.4/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=8cfb4e998561bba5 -C extra-filename=-8cfb4e998561bba5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern libz_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibz_sys-a3111f279c2174e3.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -l static=git2 -L native=/usr/lib/amd64 --cfg libgit2_vendored`
856 Compiling reqwest v0.11.20
857 Running `rustc --crate-name reqwest --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/reqwest-0.11.20/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="__rustls"' --cfg 'feature="__tls"' --cfg 'feature="blocking"' --cfg 'feature="default"' --cfg 'feature="default-tls"' --cfg 'feature="hyper-rustls"' --cfg 'feature="hyper-tls"' --cfg 'feature="json"' --cfg 'feature="native-tls-crate"' --cfg 'feature="rustls"' --cfg 'feature="rustls-pemfile"' --cfg 'feature="rustls-tls"' --cfg 'feature="rustls-tls-webpki-roots"' --cfg 'feature="serde_json"' --cfg 'feature="stream"' --cfg 'feature="tokio-native-tls"' --cfg 'feature="tokio-rustls"' --cfg 'feature="tokio-util"' --cfg 'feature="wasm-streams"' --cfg 'feature="webpki-roots"' -C metadata=6407fc4e9374ca8e -C extra-filename=-6407fc4e9374ca8e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern encoding_rs=/work/oxidecomputer/crucible/target/debug/deps/libencoding_rs-3255048793b3f7a6.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern h2=/work/oxidecomputer/crucible/target/debug/deps/libh2-3e2d8390f23dd48a.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern http_body=/work/oxidecomputer/crucible/target/debug/deps/libhttp_body-bb1d69dd918c127f.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern hyper_rustls=/work/oxidecomputer/crucible/target/debug/deps/libhyper_rustls-9a83fdcd4675665e.rmeta --extern hyper_tls=/work/oxidecomputer/crucible/target/debug/deps/libhyper_tls-2dbf57f91f681e2c.rmeta --extern ipnet=/work/oxidecomputer/crucible/target/debug/deps/libipnet-8b250db103a32779.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern mime=/work/oxidecomputer/crucible/target/debug/deps/libmime-ac14a9115eddd3c2.rmeta --extern native_tls_crate=/work/oxidecomputer/crucible/target/debug/deps/libnative_tls-320c05ab5bbd33c9.rmeta --extern once_cell=/work/oxidecomputer/crucible/target/debug/deps/libonce_cell-fdb5f9e769d1e589.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern pin_project_lite=/work/oxidecomputer/crucible/target/debug/deps/libpin_project_lite-c035e320730303c2.rmeta --extern rustls=/work/oxidecomputer/crucible/target/debug/deps/librustls-3df6867cfa5c4a0a.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_native_tls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_native_tls-f56aba82a642e205.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern tower_service=/work/oxidecomputer/crucible/target/debug/deps/libtower_service-51da71f2ad5117ee.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --extern webpki_roots=/work/oxidecomputer/crucible/target/debug/deps/libwebpki_roots-31272bd9a7615638.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
858 Compiling git2 v0.17.2
859 Running `rustc --crate-name git2 --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/git2-0.17.2/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C metadata=c3276e37b1dd24e5 -C extra-filename=-c3276e37b1dd24e5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bitflags=/work/oxidecomputer/crucible/target/debug/deps/libbitflags-b5bc15fb96af61fc.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern libgit2_sys=/work/oxidecomputer/crucible/target/debug/deps/liblibgit2_sys-8cfb4e998561bba5.rmeta --extern log=/work/oxidecomputer/crucible/target/debug/deps/liblog-930e200cffaa7930.rmeta --extern url=/work/oxidecomputer/crucible/target/debug/deps/liburl-ff56943ab9066fdc.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64`
860 Compiling progenitor-client v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
861 Compiling omicron-zone-package v0.9.1
862 Running `rustc --crate-name progenitor_client --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=a49921826ff8ec2a -C extra-filename=-a49921826ff8ec2a --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_urlencoded=/work/oxidecomputer/crucible/target/debug/deps/libserde_urlencoded-779d3ac41d95557a.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
863 Running `rustc --crate-name omicron_zone_package --edition=2018 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/omicron-zone-package-0.9.1/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=8423a7bf8bd88040 -C extra-filename=-8423a7bf8bd88040 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern filetime=/work/oxidecomputer/crucible/target/debug/deps/libfiletime-337368c6d4c995d8.rmeta --extern flate2=/work/oxidecomputer/crucible/target/debug/deps/libflate2-e91a1b496d4e6ad4.rmeta --extern futures_util=/work/oxidecomputer/crucible/target/debug/deps/libfutures_util-b4da5d5433271d56.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --extern tar=/work/oxidecomputer/crucible/target/debug/deps/libtar-b33bc6012d78be3d.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern walkdir=/work/oxidecomputer/crucible/target/debug/deps/libwalkdir-5232f739d2ba1b5e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
864 Compiling progenitor v0.3.0 (https://github.com/oxidecomputer/progenitor?branch=main#76716eea)
865 Running `rustc --crate-name progenitor --edition=2021 /home/build/.cargo/git/checkouts/progenitor-639bd64206ac5e43/76716ee/progenitor/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0319e0dfd841f493 -C extra-filename=-0319e0dfd841f493 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern progenitor_client=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_client-a49921826ff8ec2a.rmeta --extern progenitor_impl=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_impl-4eeb9d4c0f08c454.rmeta --extern progenitor_macro=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor_macro-4c13a8353939c841.so --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
866 Compiling repair-client v0.0.1 (/work/oxidecomputer/crucible/repair-client)
867 Compiling dsc-client v0.0.1 (/work/oxidecomputer/crucible/dsc-client)
868 Running `rustc --crate-name repair_client --edition=2021 repair-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=1452d56087b6ccb7 -C extra-filename=-1452d56087b6ccb7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
869 Running `rustc --crate-name dsc_client --edition=2021 dsc-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=15b0c81fa833cf0f -C extra-filename=-15b0c81fa833cf0f --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
870 Compiling crucible-pantry-client v0.0.1 (/work/oxidecomputer/crucible/pantry-client)
871 Running `rustc --crate-name crucible_pantry_client --edition=2021 pantry-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=ccb9ddeebb23cea2 -C extra-filename=-ccb9ddeebb23cea2 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
872 Compiling omicron-common v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
873 Running `rustc --crate-name omicron_common --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=52df1fff8b36d94c -C extra-filename=-52df1fff8b36d94c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern api_identity=/work/oxidecomputer/crucible/target/debug/deps/libapi_identity-90d45ecc06c8f773.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern backoff=/work/oxidecomputer/crucible/target/debug/deps/libbackoff-2bc4a2fd075cf434.rmeta --extern camino=/work/oxidecomputer/crucible/target/debug/deps/libcamino-45f0f4a2c258f934.rmeta --extern camino_tempfile=/work/oxidecomputer/crucible/target/debug/deps/libcamino_tempfile-38b6a8d85c9dc0c0.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern ipnetwork=/work/oxidecomputer/crucible/target/debug/deps/libipnetwork-0e9e550a49db2c52.rmeta --extern lazy_static=/work/oxidecomputer/crucible/target/debug/deps/liblazy_static-9c1a21fa7aad0259.rmeta --extern macaddr=/work/oxidecomputer/crucible/target/debug/deps/libmacaddr-98e89df75c36be48.rmeta --extern parse_display=/work/oxidecomputer/crucible/target/debug/deps/libparse_display-34a1a5d52375b70b.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ring=/work/oxidecomputer/crucible/target/debug/deps/libring-76ccf829b8b489e1.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern semver=/work/oxidecomputer/crucible/target/debug/deps/libsemver-8c1c5827befd93e7.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_derive=/work/oxidecomputer/crucible/target/debug/deps/libserde_derive-3441a1f9756a6d5b.so --extern serde_human_bytes=/work/oxidecomputer/crucible/target/debug/deps/libserde_human_bytes-7f54d0fcbf9b36f3.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern serde_with=/work/oxidecomputer/crucible/target/debug/deps/libserde_with-4f9ddd30b380d6cf.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern steno=/work/oxidecomputer/crucible/target/debug/deps/libsteno-d1d3ce30296926ad.rmeta --extern strum=/work/oxidecomputer/crucible/target/debug/deps/libstrum-59ea3c6704348e58.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_postgres=/work/oxidecomputer/crucible/target/debug/deps/libtokio_postgres-5628b93feb58339b.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
874 Compiling crucible-control-client v0.0.1 (/work/oxidecomputer/crucible/control-client)
875 Running `rustc --crate-name crucible_control_client --edition=2021 control-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=3d0142c7d3790e17 -C extra-filename=-3d0142c7d3790e17 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
876 Compiling vergen v8.2.4
877 Running `rustc --crate-name vergen --edition=2021 /home/build/.cargo/registry/src/index.crates.io-6f17d22bba15001f/vergen-8.2.4/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no --cfg 'feature="cargo"' --cfg 'feature="default"' --cfg 'feature="git"' --cfg 'feature="git2"' --cfg 'feature="git2-rs"' --cfg 'feature="rustc"' --cfg 'feature="rustc_version"' --cfg 'feature="time"' -C metadata=e75c33287bd2547c -C extra-filename=-e75c33287bd2547c --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern git2_rs=/work/oxidecomputer/crucible/target/debug/deps/libgit2-c3276e37b1dd24e5.rmeta --extern rustc_version=/work/oxidecomputer/crucible/target/debug/deps/librustc_version-201ef6100eba532b.rmeta --extern time=/work/oxidecomputer/crucible/target/debug/deps/libtime-9b604407a0d52f86.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64 --cfg stable --cfg msrv`
878 Compiling crucible-agent-client v0.0.1 (/work/oxidecomputer/crucible/agent-client)
879 Running `rustc --crate-name crucible_agent_client --edition=2021 agent-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=86e1c18945d61be3 -C extra-filename=-86e1c18945d61be3 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
880 Compiling crucible-common v0.0.1 (/work/oxidecomputer/crucible/common)
881 Running `rustc --crate-name build_script_build --edition=2021 common/build.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type bin --emit=dep-info,link -C embed-bitcode=no -C metadata=e7c30bee09b7ef3b -C extra-filename=-e7c30bee09b7ef3b --out-dir /work/oxidecomputer/crucible/target/debug/build/crucible-common-e7c30bee09b7ef3b -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern vergen=/work/oxidecomputer/crucible/target/debug/deps/libvergen-e75c33287bd2547c.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/usr/lib/amd64`
882 Running `/work/oxidecomputer/crucible/target/debug/build/crucible-common-e7c30bee09b7ef3b/build-script-build`
883 Running `rustc --crate-name crucible_common --edition=2021 common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=c45901e9152d33f4 -C extra-filename=-c45901e9152d33f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rmeta --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rmeta --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
884 Running `rustc --crate-name crucible_common --edition=2021 common/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=9a32809bdbdf85c4 -C extra-filename=-9a32809bdbdf85c4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
885 Compiling crucible-protocol v0.0.0 (/work/oxidecomputer/crucible/protocol)
886 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=0682e169c907a102 -C extra-filename=-0682e169c907a102 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
887 Compiling dsc v0.1.0 (/work/oxidecomputer/crucible/dsc)
888 Running `rustc --crate-name crucible_agent_client --edition=2021 agent-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=6f3900e8033b57ec -C extra-filename=-6f3900e8033b57ec --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
889 Running `rustc --crate-name dsc --edition=2021 dsc/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=2b80980cbc3bac2c -C extra-filename=-2b80980cbc3bac2c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern byte_unit=/work/oxidecomputer/crucible/target/debug/deps/libbyte_unit-02cb17c857e20dac.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern csv=/work/oxidecomputer/crucible/target/debug/deps/libcsv-187f0e890389cec3.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
890 Running `rustc --crate-name dsc_client --edition=2021 dsc-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=f8a5b497695371e1 -C extra-filename=-f8a5b497695371e1 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
891 Running `rustc --crate-name crucible_pantry_client --edition=2021 pantry-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8a27f01eb086219e -C extra-filename=-8a27f01eb086219e --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
892 Compiling nexus-client v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
893 Running `rustc --crate-name nexus_client --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/nexus-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fd6034b144d15fe8 -C extra-filename=-fd6034b144d15fe8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern ipnetwork=/work/oxidecomputer/crucible/target/debug/deps/libipnetwork-0e9e550a49db2c52.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern omicron_passwords=/work/oxidecomputer/crucible/target/debug/deps/libomicron_passwords-ac6e3a602e6ad041.rmeta --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rmeta --extern regress=/work/oxidecomputer/crucible/target/debug/deps/libregress-10da65958da1c830.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
894 Compiling oximeter v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
895 Running `rustc --crate-name oximeter --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/oximeter/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=90ae047d6b643e4e -C extra-filename=-90ae047d6b643e4e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter_macro_impl=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_macro_impl-e4cc949eda20c416.so --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
896 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=c776b78ce4b42bf6 -C extra-filename=-c776b78ce4b42bf6 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
897 Compiling crucible-package v0.1.0 (/work/oxidecomputer/crucible/package)
898 Running `rustc --crate-name crucible_package --edition=2021 package/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=e8ff0170d25e0da5 -C extra-filename=-e8ff0170d25e0da5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern omicron_zone_package=/work/oxidecomputer/crucible/target/debug/deps/libomicron_zone_package-8423a7bf8bd88040.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
899 Running `rustc --crate-name crucible_control_client --edition=2021 control-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=d0a58354872d46d9 -C extra-filename=-d0a58354872d46d9 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
900 Running `rustc --crate-name repair_client --edition=2021 repair-client/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=5353c8de97b4615f -C extra-filename=-5353c8de97b4615f --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
901 Running `rustc --crate-name crucible_protocol --edition=2021 protocol/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=d81e6562be2ffe77 -C extra-filename=-d81e6562be2ffe77 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out`
902 Compiling crucible-agent v0.0.1 (/work/oxidecomputer/crucible/agent)
903 Running `rustc --crate-name crucible_agent --edition=2021 agent/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=acbf118b39db417b -C extra-filename=-acbf118b39db417b --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
904 Compiling oximeter-producer v0.1.0 (https://github.com/oxidecomputer/omicron?branch=main#4c05962d)
905 Running `rustc --crate-name oximeter_producer --edition=2021 /home/build/.cargo/git/checkouts/omicron-d039c41f152bda83/4c05962/oximeter/producer/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=5dc4f732e258486e -C extra-filename=-5dc4f732e258486e --out-dir /work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern nexus_client=/work/oxidecomputer/crucible/target/debug/deps/libnexus_client-fd6034b144d15fe8.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta --cap-lints allow -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
906 Compiling crucible v0.0.1 (/work/oxidecomputer/crucible/upstairs)
907 Running `rustc --crate-name crucible --edition=2021 upstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=41ca439abdc23695 -C extra-filename=-41ca439abdc23695 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rmeta --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
908 Running `rustc --crate-name crucible --edition=2021 upstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=53d074fabbf363e8 -C extra-filename=-53d074fabbf363e8 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rlib --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern proptest=/work/oxidecomputer/crucible/target/debug/deps/libproptest-327f7f2cf6858f27.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern test_strategy=/work/oxidecomputer/crucible/target/debug/deps/libtest_strategy-5eb6b90d55d9f739.so --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_test=/work/oxidecomputer/crucible/target/debug/deps/libtokio_test-12a28be646ff63e6.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
909 Compiling crucible-downstairs v0.0.1 (/work/oxidecomputer/crucible/downstairs)
910 Compiling crucible-pantry v0.0.1 (/work/oxidecomputer/crucible/pantry)
911 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg 'feature="default"' -C metadata=6276be71be5284a4 -C extra-filename=-6276be71be5284a4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rmeta --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rmeta --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rmeta --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rmeta --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rmeta --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rmeta --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rmeta --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rmeta --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rmeta --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rmeta --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rmeta --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rmeta --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rmeta --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rmeta --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rmeta --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rmeta --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rmeta --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rmeta --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rmeta --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
912 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 -C metadata=fe633af5059fe3a7 -C extra-filename=-fe633af5059fe3a7 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rmeta --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rmeta --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rmeta --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rmeta --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rmeta --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rmeta --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rmeta --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rmeta --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rmeta --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rmeta --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rmeta --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rmeta --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rmeta --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rmeta --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rmeta --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rmeta --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rmeta --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rmeta --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rmeta --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rmeta --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rmeta -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
913 Compiling crutest v0.1.0 (/work/oxidecomputer/crucible/crutest)
914 Compiling measure-iops v0.0.1 (/work/oxidecomputer/crucible/measure_iops)
915 Compiling crucible-nbd-server v0.1.0 (/work/oxidecomputer/crucible/nbd_server)
916 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=b51bc30f7a0cbfa5 -C extra-filename=-b51bc30f7a0cbfa5 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
917 Running `rustc --crate-name crutest --edition=2021 crutest/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=af78e92d646e2d06 -C extra-filename=-af78e92d646e2d06 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crossterm=/work/oxidecomputer/crucible/target/debug/deps/libcrossterm-3c787fd4c4d4bc45.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern csv=/work/oxidecomputer/crucible/target/debug/deps/libcsv-187f0e890389cec3.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern indicatif=/work/oxidecomputer/crucible/target/debug/deps/libindicatif-297a26a70875006e.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reedline=/work/oxidecomputer/crucible/target/debug/deps/libreedline-6e6244e0f6aa654d.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rlib --extern signal_hook_tokio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_tokio-6a6b104c61918fa0.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
918 Running `rustc --crate-name measure_iops --edition=2021 measure_iops/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=cbdca99bf515defe -C extra-filename=-cbdca99bf515defe --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
919 Running `rustc --crate-name crucible_nbd_server --edition=2021 nbd_server/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8c1612631a1669fd -C extra-filename=-8c1612631a1669fd --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern nbd=/work/oxidecomputer/crucible/target/debug/deps/libnbd-6a0c01a24def5e9a.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
920 Running `rustc --crate-name crucible_pantry --edition=2021 pantry/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8e1bf492bfe90e8c -C extra-filename=-8e1bf492bfe90e8c --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
921 Compiling cmon v0.1.0 (/work/oxidecomputer/crucible/cmon)
922 Running `rustc --crate-name cmon --edition=2021 cmon/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=2a9909624d24c98d -C extra-filename=-2a9909624d24c98d --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern clearscreen=/work/oxidecomputer/crucible/target/debug/deps/libclearscreen-5e923be7ef236a41.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_control_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_control_client-3d0142c7d3790e17.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
923 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test --cfg 'feature="default"' -C metadata=dce67baac661a5f4 -C extra-filename=-dce67baac661a5f4 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
924 Compiling crudd v0.1.0 (/work/oxidecomputer/crucible/crudd)
925 Running `rustc --crate-name crudd --edition=2021 crudd/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=3e9b00990c25260e -C extra-filename=-3e9b00990c25260e --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern signal_hook=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook-ae206b38743b6815.rlib --extern signal_hook_tokio=/work/oxidecomputer/crucible/target/debug/deps/libsignal_hook_tokio-6a6b104c61918fa0.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
926 Compiling crucible-hammer v0.1.0 (/work/oxidecomputer/crucible/hammer)
927 Running `rustc --crate-name crucible_hammer --edition=2021 hammer/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=9622fb9be260fb45 -C extra-filename=-9622fb9be260fb45 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
928 Running `rustc --crate-name crucible_integration_tests --edition=2021 integration_tests/src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test -C metadata=8902d603847d3610 -C extra-filename=-8902d603847d3610 --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern httptest=/work/oxidecomputer/crucible/target/debug/deps/libhttptest-174da737d96e2af6.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
929 Running `rustc --crate-name crucible_downstairs --edition=2021 downstairs/src/main.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --emit=dep-info,link -C embed-bitcode=no -C debuginfo=2 --test --cfg 'feature="default"' -C metadata=3ed9735920c1592d -C extra-filename=-3ed9735920c1592d --out-dir /work/oxidecomputer/crucible/target/debug/deps -C incremental=/work/oxidecomputer/crucible/target/debug/incremental -L dependency=/work/oxidecomputer/crucible/target/debug/deps --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -L native=/usr/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out -L native=/usr/ssl-3/lib/amd64`
930 Finished test [unoptimized + debuginfo] target(s) in 5m 19s
931 Running `/work/oxidecomputer/crucible/target/debug/deps/cmon-2a9909624d24c98d --nocapture`
932 
933 running 0 tests
934 
935 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
936 
937 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible-53d074fabbf363e8 --nocapture`
938 
939 running 351 tests
940 test block_req::test::test_blockreqwaiter_send ... ok
941 test block_req::test::test_blockreq_and_blockreqwaiter_err ... ok
942 test block_req::test::test_blockreq_and_blockreqwaiter ... ok
943 {{{""msg"msg":"Upstairs starts":"Upstairs starts",,""v"v"::00,","name":name"":"crucible"crucible","level",:"level":3030{"msg":"Upstairs starts","v":0,"name":"crucible","level":30"msg":"Upstairs starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.417161849Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,"":pid"":42912023-09-22T23:08:02.417107971Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,",upstairs"":upstairs":11}
944 ,"time":"2023-09-22T23:08:02.417217374Z"{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":msg"4291:","upstairs"Crucible Version: BuildInfo {:\n1 version: }\"
945 0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",{\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \""msg":"90c541806f23a127002de5b4038be731ba1458ca\",\nCrucible Version: BuildInfo { cargo_triple: \n\" version: \"x86_64-unknown-illumos\"0.0.1\",,\n\n git_sha: \" debug: true,\n opt_level: 0,\ned48f294784d46ea7d4bb99336918b74358eca46}"\",\n git_commit_timestamp: \","v"2023-09-22T22:51:18.000000000Z:\",\n0 git_branch: \",main\"",\nname":" rustc_semver: \"crucible"1.70.0\",,\n"level rustc_channel: \""stable\":,\n30 rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:08:02.417922851Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
946 ,"time":"2023-09-22T23:08:02.417944281Z",{"hostname":""msgip-10-150-1-74.us-west-2.compute.internal"",:""pid":4291Upstairs <-> Downstairs Message Version: 4",","upstairsv""::01,"}name
947 ":"crucible","level":30{"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.417988891Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
948 ,"time":"{2023-09-22T23:08:02.418002831Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"Crucible stats registered with UUID: 024c3783-b9b4-4453-823f-769dad90d4f2"upstairs":,1"v}"
949 :0,"name":"{crucible","level":30"msg":"Crucible stats registered with UUID: ed581aed-4fb6-4c12-84d1-a857283bcda2","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.418041683Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,",time":""upstairs":12023-09-22T23:08:02.418051531Z"}
950 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291",msg"":"upstairs":1}
951 Crucible 024c3783-b9b4-4453-823f-769dad90d4f2 has session id: 3ce64a0f-e985-43b7-a27a-96f3099e6cf4","v":0{,"name":"crucible",""level"msg:"30:"Crucible ed581aed-4fb6-4c12-84d1-a857283bcda2 has session id: 9bfc6996-1929-4f26-805c-da7e600d5990","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.418095651Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,",upstairs"":time"1:"}
952 2023-09-22T23:08:02.418105923Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
953 }
9542023-09-22T23:08:02.418ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, } upstairs = 1
9552023-09-22T23:08:02.418ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 upstairs = 1
9562023-09-22T23:08:02.418ZINFOcrucible: Crucible stats registered with UUID: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 upstairs = 1
9572023-09-22T23:08:02.418ZINFOcrucible: Crucible df6648fa-e480-4dd9-b7c4-7fec45dbddf8 has session id: c2bd09c5-6ed6-4145-ba72-617a46286f0f upstairs = 1
958 ,"time":"2023-09-22T23:08:02.417100875Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
9592023-09-22T23:08:02.418ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, } upstairs = 1
960 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30{"msg":"[0] connecting to 127.0.0.1:52643","v":,0",time":""name":"crucible2023-09-22T23:08:02.418514768Z"",","hostname"level"::"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
961 {"msg":"Crucible stats registered with UUID: a2b1abbb-2148-4d24-93b2-2a0a303e7718","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:08:02.418536976Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,","time":"2023-09-22T23:08:02.418559377Z","hostname":"looper"ip-10-150-1-74.us-west-2.compute.internal":,""0pid"":4291,","upstairs":upstairs":11}}
962 
9632023-09-22T23:08:02.418ZINFOcrucible: [0] connecting to 127.0.0.1:60909 looper = 0 upstairs = 1
9642023-09-22T23:08:02.418ZINFOcrucible: [1] connecting to 127.0.0.1:41466 looper = 1 upstairs = 1
9652023-09-22T23:08:02.419ZINFOcrucible: [2] connecting to 127.0.0.1:55647 looper = 2 upstairs = 1
9662023-09-22T23:08:02.419ZINFOcrucible: up_listen starts task = up_listen upstairs = 1
9672023-09-22T23:08:02.419ZINFOcrucible: Wait for all three downstairs to come online upstairs = 1
9682023-09-22T23:08:02.419ZINFOcrucible: Flush timeout: 86400 upstairs = 1
9692023-09-22T23:08:02.419ZINFOcrucible: [0] connecting to 127.0.0.1:39941 looper = 0 upstairs = 1
9702023-09-22T23:08:02.419ZINFOcrucible: [1] connecting to 127.0.0.1:51919 looper = 1 upstairs = 1
9712023-09-22T23:08:02.419ZINFOcrucible: [2] connecting to 127.0.0.1:45755 looper = 2 upstairs = 1
9722023-09-22T23:08:02.419ZINFOcrucible: up_listen starts task = up_listen upstairs = 1
9732023-09-22T23:08:02.419ZINFOcrucible: Wait for all three downstairs to come online upstairs = 1
9742023-09-22T23:08:02.419ZINFOcrucible: Flush timeout: 86400 upstairs = 1
9752023-09-22T23:08:02.419ZINFOcrucible: [1] connecting to 127.0.0.1:34619 looper = 1 upstairs = 1
9762023-09-22T23:08:02.419ZINFOcrucible: [2] connecting to 127.0.0.1:44064 looper = 2 upstairs = 1
9772023-09-22T23:08:02.419ZINFOcrucible: up_listen starts task = up_listen upstairs = 1
978 {"msg":"Wait for all three downstairs to come online","v":0{,"name":"crucible","level":30"msg":"Crucible a2b1abbb-2148-4d24-93b2-2a0a303e7718 has session id: 8079de44-89af-4e82-ab2d-095962b2363a","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.419751463Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
979 {"msg":","Flush timeout: 86400"time",:""v":02023-09-22T23:08:02.419767569Z","name,":""crucible"hostname":,""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
980 ,"time{":"2023-09-22T23:08:02.419798287Z"",msg":""hostname":"[0] connecting to 127.0.0.1:61368","ip-10-150-1-74.us-west-2.compute.internal"v,"":pid":04291,"name",:""crucible"upstairs,"":level1":}30
981 ,"time":"2023-09-22T23:08:02.419834637Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"looper":"0","upstairs":1}
9822023-09-22T23:08:02.419ZINFOcrucible: [1] connecting to 127.0.0.1:50978 looper = 1 upstairs = 1
983 {"msg{":"[2] connecting to 127.0.0.1:57228"","vmsg""::"0,"name":"crucible"[0] ed581aed-4fb6-4c12-84d1-a857283bcda2 looper connected","level",:"v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.419925649Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid:"":4291,2023-09-22T23:08:02.419930312Z""looper",:""2"hostname",:""upstairs":1ip-10-150-1-74.us-west-2.compute.internal}"
984 ,"pid":4291{,"looper":""0"msg":,""upstairs"up_listen starts:"1,"v"}:
985 0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.41996917Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1,"task":"up_listen"}
986 {"msg":"Wait for all three downstairs to come online","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.420005751Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":{1}
987 "msg":"{"[0] 024c3783-b9b4-4453-823f-769dad90d4f2 looper connected"msg":"{,"Flush timeout: 86400v"":,"0"v":,0msg"",:"name""name"::""crucible[0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 looper connected""crucible",,""level"level:,30""v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.420060398Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1,}"
988 ,time"":"time"{:"2023-09-22T23:08:02.420063804Z"",2023-09-22T23:08:02.420068874Z"msg"",:"hostname""hostname"::"[0] a2b1abbb-2148-4d24-93b2-2a0a303e7718 looper connected"","v"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal:,""pid0",,""pid:"4291name:"4291:,"",looper""crucible"looper"::,""0"0,"""level",upstairs""::30upstairs"1:1}
989 }
990 ,"time":"2023-09-22T23:08:02.420134198Z","{hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4291[0] Proc runs for 127.0.0.1:39941 in state New",",looper"":"v0"":,"0upstairs,":"1name":}"
991 crucible","level":30,"time":"2023-09-22T23:08:02.420175591Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
9922023-09-22T23:08:02.420ZINFOcrucible: [0] Proc runs for 127.0.0.1:52643 in state New upstairs = 1
993 {"msg":"[0] Proc runs for 127.0.0.1:61368 in state New","v":0,"name":"crucible","level":30{"msg":","time":"[0] Proc runs for 127.0.0.1:60909 in state New"2023-09-22T23:08:02.420330114Z",",v"":hostname":"0,"name":ip-10-150-1-74.us-west-2.compute.internal"","crucible"pid":,4291"level,"":upstairs"30:1}
994 ,"time":"2023-09-22T23:08:02.420359995Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
995 {{""msgmsg""::""{[1] a2b1abbb-2148-4d24-93b2-2a0a303e7718 looper connected[1] ed581aed-4fb6-4c12-84d1-a857283bcda2 looper connected""",,""vvmsg""::"0:,"{"0name",:""name":crucible""[1] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 looper connectedcrucible"","",msg"levellevel"":,:3030"":v"":0,"name":"[1] 024c3783-b9b4-4453-823f-769dad90d4f2 looper connected"crucible",","level"v"::300,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:08:02.420541835Z2023-09-22T23:08:02.420541832Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid"pid:":42914291,",,""timelooperlooper""::""1,"1,"""",upstairs"":upstairs:"time1:"":}1
996 }2023-09-22T23:08:02.420552781Z""
997 ,2023-09-22T23:08:02.420557696Z""hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""pid",:"4291pid":4291,"looper,"":"looper1"":,""1"upstairs":,"1upstairs":}1
998 }
999 {"msg":"[1] Proc runs for 127.0.0.1:51919 in state New","v":0,"name":"crucible","level":30{,""time":"msg":"2023-09-22T23:08:02.420642014Z","[1] Proc runs for 127.0.0.1:41466 in state New"hostname":","v":0ip-10-150-1-74.us-west-2.compute.internal",,""name"pid:":"4291crucible",,""level":upstairs"30:1}
1000 {"msg":"[2] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 looper connected","v":0,"name":"crucible",",level"":time"30:"2023-09-22T23:08:02.420675248Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1001 ,"time":"2023-09-22T23:08:02.420695691Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"looper":"2","upstairs":1}
1002 {"msg":"[2] Proc runs for 127.0.0.1:45755 in state New","v":0,"name":"crucible","level":{30"msg":"[2] ed581aed-4fb6-4c12-84d1-a857283bcda2 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.420743127Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1003 ,"time":"2023-09-22T23:08:02.420756491Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"looper":"2","upstairs":1}
1004 {"msg":"[2] Proc runs for 127.0.0.1:55647 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.420806756Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}The guest has requested activation
1005 
10062023-09-22T23:08:02.420ZINFOcrucible: [1] Proc runs for 127.0.0.1:34619 in state New upstairs = 1
10072023-09-22T23:08:02.421ZINFOcrucible: [2] 024c3783-b9b4-4453-823f-769dad90d4f2 looper connected looper = 2 upstairs = 1
10082023-09-22T23:08:02.421ZINFOcrucible: [2] Proc runs for 127.0.0.1:44064 in state New upstairs = 1
1009 The guest has requested activation
1010 {"msg":"[1] Proc runs for 127.0.0.1:50978 in state New","v":0,"name":"crucible","level":30The guest has requested activation
1011 ,"time":"2023-09-22T23:08:02.421160928Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
10122023-09-22T23:08:02.421ZINFOcrucible: [2] a2b1abbb-2148-4d24-93b2-2a0a303e7718 looper connected looper = 2 upstairs = 1
10132023-09-22T23:08:02.421ZINFOcrucible: [2] Proc runs for 127.0.0.1:57228 in state New upstairs = 1
10142023-09-22T23:08:02.421ZINFOcrucible: a2b1abbb-2148-4d24-93b2-2a0a303e7718 active request set upstairs = 1
1015 {{"msg"":"msg":"024c3783-b9b4-4453-823f-769dad90d4f2 active request set"df6648fa-e480-4dd9-b7c4-7fec45dbddf8 active request set","v",:"v0",:"name":0","crucible"name":","crucible"level":,"30level":30The guest has requested activation
1016 ,"time":,""time":"2023-09-22T23:08:02.421365898Z","2023-09-22T23:08:02.421367674Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid":,"4291upstairs":,1"}upstairs
1017 ":1}
10182023-09-22T23:08:02.421ZINFOcrucible: ed581aed-4fb6-4c12-84d1-a857283bcda2 active request set upstairs = 1
10192023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
10202023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
10212023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 2
10222023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 3
10232023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 2
10242023-09-22T23:08:02.421ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 3
10252023-09-22T23:08:02.421ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) New New New ds_transition to WaitActive upstairs = 1
10262023-09-22T23:08:02.422ZINFOcrucible: [0] Transition from New to WaitActive upstairs = 1
10272023-09-22T23:08:02.422ZINFOcrucible: [0] client is_active_req TRUE, promote! session b793572f-19dc-423b-877e-0f0cfe9147d9 upstairs = 1
10282023-09-22T23:08:02.422ZINFOcrucible: [1] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) WaitActive New New ds_transition to WaitActive upstairs = 1
10292023-09-22T23:08:02.422ZINFOcrucible: [1] Transition from New to WaitActive upstairs = 1
10302023-09-22T23:08:02.422ZINFOcrucible: [1] client is_active_req TRUE, promote! session b793572f-19dc-423b-877e-0f0cfe9147d9 upstairs = 1
10312023-09-22T23:08:02.422ZINFOcrucible: [2] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) WaitActive WaitActive New ds_transition to WaitActive upstairs = 1
10322023-09-22T23:08:02.422ZINFOcrucible: [2] Transition from New to WaitActive upstairs = 1
10332023-09-22T23:08:02.422ZINFOcrucible: [2] client is_active_req TRUE, promote! session b793572f-19dc-423b-877e-0f0cfe9147d9 upstairs = 1
10342023-09-22T23:08:02.422ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1 } downstairs = 1
10352023-09-22T23:08:02.422ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1 } downstairs = 2
10362023-09-22T23:08:02.422ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1 } downstairs = 3
10372023-09-22T23:08:02.422ZINFOcrucible: [0] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) New New New ds_transition to WaitActive upstairs = 1
10382023-09-22T23:08:02.422ZINFOcrucible: [0] Transition from New to WaitActive upstairs = 1
10392023-09-22T23:08:02.422ZINFOcrucible: [0] client is_active_req TRUE, promote! session 99a7dfa5-d205-4d07-9135-9f9e70387e3a upstairs = 1
10402023-09-22T23:08:02.422ZINFOcrucible: [1] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) WaitActive New New ds_transition to WaitActive upstairs = 1
10412023-09-22T23:08:02.422ZINFOcrucible: [1] Transition from New to WaitActive upstairs = 1
10422023-09-22T23:08:02.422ZINFOcrucible: [1] client is_active_req TRUE, promote! session 99a7dfa5-d205-4d07-9135-9f9e70387e3a upstairs = 1
10432023-09-22T23:08:02.422ZINFOcrucible: [2] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) WaitActive WaitActive New ds_transition to WaitActive upstairs = 1
10442023-09-22T23:08:02.422ZINFOcrucible: [2] Transition from New to WaitActive upstairs = 1
10452023-09-22T23:08:02.422ZINFOcrucible: [2] client is_active_req TRUE, promote! session 99a7dfa5-d205-4d07-9135-9f9e70387e3a upstairs = 1
10462023-09-22T23:08:02.422ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1 } downstairs = 1
10472023-09-22T23:08:02.422ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1 } downstairs = 2
1048 {"msg":"negotiate packet PromoteToActive { upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1 }","v":0,"name":"crucible","level":30{","timemsg":"":"2023-09-22T23:08:02.42298288Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs"negotiate packet HereIAm { version: 4, upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }":3},
1049 "v":0,"{name":"crucible"","msg"level":":30negotiate packet RegionInfoPlease","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.423029769Z"time":,""hostname":"2023-09-22T23:08:02.42302412Z","ip-10-150-1-74.us-west-2.compute.internalhostname"":,""pid":4291ip-10-150-1-74.us-west-2.compute.internal",","downstairspid""::42911}
1050 ,"downstairs":1{}
1051 "msg":"negotiate packet RegionInfoPlease","v{":0,"name":""crucible"msg",:""level":30negotiate packet HereIAm { version: 4, upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }","v":0,"name":"crucible,""time",:""level":302023-09-22T23:08:02.423085422Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":2}
1052 {,""time"msg:"":"2023-09-22T23:08:02.42310327Z"negotiate packet RegionInfoPlease",","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",,""pid":level"4291:30,"downstairs":2}
1053 {"msg":,""time":"2023-09-22T23:08:02.423135861Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291negotiate packet HereIAm { version: 4, upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1, read_only: false, encrypted: false, alternate_versions: [] },"","downstairs"v"::30},
1054 "name":"crucible"{,"level":30"msg":"[0] downstairs client at 127.0.0.1:60909 has UUID 761a4173-d6d6-4e0b-98cb-d77e4463419a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423175522Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,,""time":downstairs"":3}2023-09-22T23:08:02.423186888Z
1055 ","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":","upstairs":1}
1056 [0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) New New New ds_transition to WaitActive","v":0{,"name":"crucible"","msg":level":"30[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 761a4173-d6d6-4e0b-98cb-d77e4463419a, encrypted: false, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible",,""level"time"::30"2023-09-22T23:08:02.423231359Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1057 ,"time":"{2023-09-22T23:08:02.423250072Z",""hostname":"msg":"ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from New to WaitActive,""pid",:"4291v":,0","upstairs"name"::"1crucible"}
1058 ,"level":30,"time":"2023-09-22T23:08:02.423289472Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1059 {"msg":"[0] client is_active_req TRUE, promote! session db076280-2529-45a1-a093-a3cd3f0799dc"{,"v":0,"name":""crucible"msg",:""level":30negotiate packet RegionInfoPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423336014Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":time4291":","upstairs":2023-09-22T23:08:02.423342762Z1"}
1060 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg,"":"downstairs":1}
1061 [1] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) WaitActive New New ds_transition to WaitActive","v":0,{"name":"crucible"",msg":""level":30negotiate packet RegionInfoPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423397859Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.42340416Z"pid":,"4291hostname":","upstairs":1ip-10-150-1-74.us-west-2.compute.internal"},"
1062 pid":4291,"downstairs{":2}
1063 "msg":"{[1] Transition from New to WaitActive","v":"0msg",:""name":"crucible"negotiate packet RegionInfoPlease",","level":v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423454785Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.423459493Z"pid":,4291"hostname":,""upstairs":1ip-10-150-1-74.us-west-2.compute.internal"},"
1064 pid":4291,"{downstairs":3}
1065 "msg":"{"msg":[1] client is_active_req TRUE, promote! session db076280-2529-45a1-a093-a3cd3f0799dc"","v":0,"name":"crucible"[0] downstairs client at 127.0.0.1:39941 has UUID c7df82f0-e00e-410b-ac45-3ec316aefac5",","level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.42351133Z",",hostname":""time":"ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.423515401Z""pid,"":hostname":4291","upstairs":ip-10-150-1-74.us-west-2.compute.internal"1,"}pid"
1066 :4291,"upstairs":{1}
1067 "msg":"{"msg":"[2] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: c7df82f0-e00e-410b-ac45-3ec316aefac5, encrypted: false, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423569209Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time",:""upstairs":12023-09-22T23:08:02.423577538Z"}
1068 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg",:""upstairs":1}[2] Transition from New to WaitActive
1069 ","v":0,"{name":"crucible"","msg"level"::"30df6648fa-e480-4dd9-b7c4-7fec45dbddf8 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.42362419Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":","upstairs":2023-09-22T23:08:02.42363307Z"1,}"
1070 hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg",:""upstairs":1}
1071 [2] client is_active_req TRUE, promote! session db076280-2529-45a1-a093-a3cd3f0799dc","v":{0,"name":""cruciblemsg"":,""level":30[1] downstairs client at 127.0.0.1:51919 has UUID 2a63ad8c-68c4-4a78-a731-eeb47bc60138","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423677979Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","pid"time"::"4291,"2023-09-22T23:08:02.42368647Z"upstairs":,1"}hostname"
1072 :"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":","upstairs":1}
1073 negotiate packet PromoteToActive { upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1 }","v":0{,"name":"crucible"",msg"":"level":30[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 2a63ad8c-68c4-4a78-a731-eeb47bc60138, encrypted: false, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible",",level"":time30":"2023-09-22T23:08:02.42373233Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":1}
1074 ,"time":"{2023-09-22T23:08:02.423749563Z",""hostname"msg"::""ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1negotiate packet PromoteToActive { upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1 }"}
1075 ,"v":0,"name":"{crucible","level":30"msg":"df6648fa-e480-4dd9-b7c4-7fec45dbddf8 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423793905Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":,2"}time":
1076 "2023-09-22T23:08:02.423804144Z",{"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1077 negotiate packet PromoteToActive { upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1 }","v":0,"{name":"crucible",""level"msg"::30"[2] downstairs client at 127.0.0.1:45755 has UUID f4127e0c-bc93-4b19-ac81-caee623ec347","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.423849045Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,","downstairs":time3":"}
1078 2023-09-22T23:08:02.423859566Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal"","msg"pid"::"4291,"negotiate packet RegionInfoPlease"upstairs":,1"v"}:
1079 0,"name":"crucible"{,"level":30"msg":"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: f4127e0c-bc93-4b19-ac81-caee623ec347, encrypted: false, database_read_version: 1, database_write_version: 1 }",,""time":v"":0,"2023-09-22T23:08:02.423902561Zname"":","crucible"hostname",:""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":1}
1080 {,""time"msg"::""2023-09-22T23:08:02.423926464Z"negotiate packet RegionInfoPlease",","v"hostname"::0","name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",",pid"":level4291":30,"upstairs":1}
1081 {"msg":","df6648fa-e480-4dd9-b7c4-7fec45dbddf8 WaitActive WaitActive WaitActivetime"":","v":2023-09-22T23:08:02.423958066Z"0,,""name":"hostname":"crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291,"downstairs":2}
1082 {"msg",:""time":"negotiate packet RegionInfoPlease"2023-09-22T23:08:02.423983605Z,""v",:"0,hostname"":"name":"crucible",ip-10-150-1-74.us-west-2.compute.internal"","level"pid"::304291,"upstairs":1}
1083 {"msg":"negotiate packet ExtentVersionsPlease",,""time"v"::"0,"name"2023-09-22T23:08:02.424013241Z":","crucible"hostname":","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":3}
1084 {,""time"msg"::""2023-09-22T23:08:02.424038688Z","hostname":"[0] downstairs client at 127.0.0.1:52643 has UUID af1eb908-dd0e-4704-a165-8346bf74d171","vip-10-150-1-74.us-west-2.compute.internal"":,0","pid"name"::4291"crucible",","downstairs"level"::130}
1085 {"msg":"negotiate packet ExtentVersionsPlease","v":0,"name":","crucible"time",:""level":302023-09-22T23:08:02.424073582Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1086 ,"time":"{2023-09-22T23:08:02.424091801Z",""hostname":msg"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":2}
1087 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: af1eb908-dd0e-4704-a165-8346bf74d171, encrypted: false, database_read_version: 1, database_write_version: 1 }","v":{0,"name":""cruciblemsg"":","level":30negotiate packet ExtentVersionsPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.424130306Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"",:""pid":42912023-09-22T23:08:02.42413634Z",","upstairs":hostname":1"}
1088 ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":3}
10892023-09-22T23:08:02.424ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) WaitActive WaitActive WaitActive ds_transition to WaitQuorum upstairs = 1
10902023-09-22T23:08:02.424ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum upstairs = 1
10912023-09-22T23:08:02.424ZWARNcrucible: [0] new RM replaced this: None upstairs = 1
10922023-09-22T23:08:02.424ZINFOcrucible: [0] Starts reconcile loop upstairs = 1
10932023-09-22T23:08:02.424ZINFOcrucible: [1] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum upstairs = 1
10942023-09-22T23:08:02.424ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum upstairs = 1
10952023-09-22T23:08:02.424ZWARNcrucible: [1] new RM replaced this: None upstairs = 1
10962023-09-22T23:08:02.424ZINFOcrucible: [1] Starts reconcile loop upstairs = 1
10972023-09-22T23:08:02.424ZINFOcrucible: [2] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum upstairs = 1
1098 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0The guest has finished waiting for activation
1099 ,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.424571289Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
11002023-09-22T23:08:02.424ZWARNcrucible: [2] new RM replaced this: None upstairs = 1
11012023-09-22T23:08:02.424ZINFOcrucible: [2] Starts reconcile loop upstairs = 1
11022023-09-22T23:08:02.424ZINFOcrucible: [0] 127.0.0.1:39941 task reports connection:true upstairs = 1
1103 {{"msg":""msg"df6648fa-e480-4dd9-b7c4-7fec45dbddf8 WaitQuorum WaitQuorum WaitQuorum":,""v":0,"name":"crucible","level":30negotiate packet HereIAm { version: 4, upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.424717941Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1104 {"msg":"[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]",",v"":time":0","name":"2023-09-22T23:08:02.424730984Zcrucible"","level",:"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"downstairs":1}
1105 ,"time":"2023-09-22T23:08:02.42476087Z","hostname{":"ip-10-150-1-74.us-west-2.compute.internal",""msg"pid:":"4291,"upstairs":1}
1106 negotiate packet HereIAm { version: 4, upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"{,"v":0","{msg"name":":"crucible",""[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]levelmsg"""::30","v":0,"name":"024c3783-b9b4-4453-823f-769dad90d4f2 WaitActive WaitActive WaitActive"crucible","level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.424810014Z",",hostname"":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.424816447Z,""pid",":hostname4291":","downstairs":ip-10-150-1-74.us-west-2.compute.internal"2,"pid":}4291
1107 ,,""timeupstairs""{::"1"}msg"
1108 2023-09-22T23:08:02.424823109Z:"","{hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg":","pid":negotiate packet HereIAm { version: 4, upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1, read_only: false, encrypted: false, alternate_versions: [] }"4291[0]R dirty: [false, false, false, false, false, false, false, false, false, false]",",v":"0,,upstairs""":v"name1"::"}0
1109 test impacted_blocks::test::empty_contains_nothing ... {{"ok"msg"msg
1110 :"":"crucibleed581aed-4fb6-4c12-84d1-a857283bcda2 WaitActive WaitActive WaitActive[1] downstairs client at 127.0.0.1:34619 has UUID b5944748-a762-4e0a-bbf1-fcda234e697f""","v",:,""0level",":name"30:"v"crucible":0,","level"name"::"30crucible","level":30,"time":"2023-09-22T23:08:02.424938213Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,","time":downstairs"":,3"2023-09-22T23:08:02.42494519Z"time"}:"
1111 ,"2023-09-22T23:08:02.424947725Z"hostname":","hostname"{:"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291","msgpid"",::"4291"upstairs",:"1upstairs":}1
1112 [0] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) New New New ds_transition to WaitActive"},
1113 "{v":0,"name":""crucible"msg{,"":"level":"30msg":","[1] downstairs client at 127.0.0.1:41466 has UUID e69c25b9-b0d6-406a-850b-6b5dba8b6e69"name",:""v"crucible[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: b5944748-a762-4e0a-bbf1-fcda234e697f, encrypted: false, database_read_version: 1, database_write_version: 1 }"":,,""0vlevel""::,030,,""name"time"name"::"""crucible:"2023-09-22T23:08:02.425012404Z""crucible,",",""level"level"hostname::3030":,""time":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.425027911Z"",","pid":hostname"4291:","upstairs":ip-10-150-1-74.us-west-2.compute.internal,""1time,"":pid""}:
1114 ,2023-09-22T23:08:02.425046405Z4291"","timehostname,"{:"""upstairs"::ip-10-150-1-74.us-west-2.compute.internal"1,"}"
1115 "2023-09-22T23:08:02.425046431Zpidmsg"""::{4291,"",""hostname"upstairs:msg"""::1[0] Transition from New to WaitActive}"
1116 [1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""ip-10-150-1-74.us-west-2.compute.internal"{,,"",""pidv"v"":::msg042910",:"",,"name"024c3783-b9b4-4453-823f-769dad90d4f2 WaitActive WaitActive WaitActive:"""upstairsnamecrucible,"""v,""level:"0:,30":"1name"::}""
1117 cruciblecrucible,"","time"level:"":"{2023-09-22T23:08:02.425121117Z30",","level"hostname""::"msg30":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":,""upstairs":12023-09-22T23:08:02.42513842Z"},
1118 "hostname":"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: e69c25b9-b0d6-406a-850b-6b5dba8b6e69, encrypted: false, database_read_version: 1, database_write_version: 1 }"{ip-10-150-1-74.us-west-2.compute.internal,""",msg""pid:"":v4291":,0[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"",upstairs",",:time1""v}"
1119 :"name{0:"",""2023-09-22T23:08:02.425145289Znamemsg""::"":""cruciblecrucible""[2] downstairs client at 127.0.0.1:44064 has UUID f94c50e4-ac3c-41cd-806f-21404d88edf4,"",level""v:"30:,,0","hostname"":name"":level"":ip-10-150-1-74.us-west-2.compute.internal,crucible""time,""30:"",level""2023-09-22T23:08:02.425190309Z:"30pid,"":hostname"4291:","upstairs":ip-10-150-1-74.us-west-2.compute.internal"1,"}pid"
1120 ,:"4291time":","upstairs":,1"}2023-09-22T23:08:02.425207483Z
1121 "{time,{"":hostname"":msg""":""ip-10-150-1-74.us-west-2.compute.internalmsg2023-09-22T23:08:02.425203803Z""",,"[1]R dirty: [false, false, false, false, false, false, false, false, false, false]:""pid",:"4291v":,0",""upstairsname":":"1[0] client is_active_req TRUE, promote! session a7f1d842-f19a-4c81-abcd-c1f78df01722"hostname"}crucible
1122 ":",{","ip-10-150-1-74.us-west-2.compute.internalvlevel"""msg:"30:,"""pid"::04291,"name":","crucible"upstairs",:"1level":,30"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: f94c50e4-ac3c-41cd-806f-21404d88edf4, encrypted: false, database_read_version: 1, database_write_version: 1 }time"":,""}v"2023-09-22T23:08:02.425258673Z":
1123 ,{"time":"2023-09-22T23:08:02.425270299Z","hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":","pid":4291,"ed581aed-4fb6-4c12-84d1-a857283bcda2 WaitActive WaitActive WaitActive"upstairs":1,"}0v"
1124 ,:"0name",:"{"crucible"name",""msg":level"::30""crucible","level":30[1] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) WaitActive New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.425321427Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":",",2023-09-22T23:08:02.425325215Zupstairs":"1",}"time
1125 "hostname"::""{2023-09-22T23:08:02.425332437Z"ip-10-150-1-74.us-west-2.compute.internal","",msg""hostname"::""pid":4291ip-10-150-1-74.us-west-2.compute.internal"024c3783-b9b4-4453-823f-769dad90d4f2 WaitActive WaitActive WaitActive",,"","pid"v"upstairs":::014291,}"
1126 name",:""upstairs"crucible{":,"1level":"}msg30":
1127 "{[2] downstairs client at 127.0.0.1:55647 has UUID 6fbc425f-e617-4cfe-95cb-547a3dd7a1a3","v":0","msg"name"::""The guest has finished waiting for activation
1128 crucible,""[1] Transition from New to WaitActive",time,"""v:"level""::2023-09-22T23:08:02.425392735Z"030,,""hostname"name"::""crucible","level"ip-10-150-1-74.us-west-2.compute.internal":,"30pid":4291,"upstairs":1}
1129 ,"time":"{2023-09-22T23:08:02.425425363Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal"negotiate packet ExtentVersionsPlease,"",pid",v""":time"0:,:"4291name":",""crucible"upstairs2023-09-22T23:08:02.425434748Z","",:level"1:"}30
1130 hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4291","msg":upstairs"":1,"}time"
1131 :"2023-09-22T23:08:02.425472103Z"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 6fbc425f-e617-4cfe-95cb-547a3dd7a1a3, encrypted: false, database_read_version: 1, database_write_version: 1 }",{","hostname"v"":"msg"::"ip-10-150-1-74.us-west-2.compute.internal"0,",pid"":name4291[1] client is_active_req TRUE, promote! session a7f1d842-f19a-4c81-abcd-c1f78df01722"":,,"""downstairs"v":crucible"1:,}0
1132 ","levelname""::{"30crucible",""levelmsg""::"30negotiate packet ExtentVersionsPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.425525857Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid2023-09-22T23:08:02.425530466Z"":,",4291hostname"":,"time":""ip-10-150-1-74.us-west-2.compute.internal"upstairs,"2023-09-22T23:08:02.42553781Z"",pid:""1:hostname}4291"
1133 :,""upstairs":1ip-10-150-1-74.us-west-2.compute.internal"},
1134 "{pid":4291,""{downstairs"msg"::2""}msg"
1135 :"ed581aed-4fb6-4c12-84d1-a857283bcda2 WaitActive WaitActive WaitActive","v":{0,"name":""cruciblemsg":"",[2] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) WaitActive WaitActive New ds_transition to WaitActive""levelnegotiate packet ExtentVersionsPlease",",:"30v"":v"0:,"0name",:""name":"crucible"crucible",","levellevel""::3030,"time":"2023-09-22T23:08:02.42560798Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":,1"}time
1136 ,":""time":"{2023-09-22T23:08:02.425621189Z"2023-09-22T23:08:02.425620978Z","",msg"hostname""::""hostname":"ip-10-150-1-74.us-west-2.compute.internal"negotiate packet ExtentVersionsPlease"ip-10-150-1-74.us-west-2.compute.internal",,,""pid"pid""v:"4291::42910,,,"""name"upstairs"downstairs::"1:"}3
1137 crucible"}
1138 ,{"level":{30"msg":""msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level[0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"":,30"v":0,"name":"crucible",","level":time":30"2023-09-22T23:08:02.425686193Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":,""downstairs":2023-09-22T23:08:02.425698288Z"1,",}"
1139 time"hostname"::""{2023-09-22T23:08:02.425705354Z"ip-10-150-1-74.us-west-2.compute.internal",,"""msghostname"pid":":"4291:",ip-10-150-1-74.us-west-2.compute.internal""negotiate packet ExtentVersionsPlease",upstairs""pid,"":v"1::4291}0,
1140 test impacted_blocks::test::empty_impacted_blocks_never_conflict ... ,"{upstairs""okname"":msg:
1141 ":"1"crucible"}
1142 ,[2] client is_active_req TRUE, promote! session a7f1d842-f19a-4c81-abcd-c1f78df01722""level"{,":v"30":msg"0:","name":"crucible"[0] Transition from WaitActive to WaitQuorum",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.425788672Z","hostname":","ip-10-150-1-74.us-west-2.compute.internaltime"",,":"timepid""::"4291"2023-09-22T23:08:02.425796997Z","2023-09-22T23:08:02.425801644Z",,downstairs""":hostnamehostname""::""2ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid"4291:}4291,
1143 ","upstairs":upstairs"1:1{}}
1144 
1145 {"msg":""msg"{:"negotiate packet ExtentVersionsPlease","v"[0] new RM replaced this: None"":,0"msg",v":":name0":"","crucible"name",:""level"crucible":,30"negotiate packet PromoteToActive { upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1 }level"":40,"v":0,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:08:02.425884668Z"2023-09-22T23:08:02.425881654Z,"",hostname"":,hostname""":time""ip-10-150-1-74.us-west-2.compute.internal":,"ip-10-150-1-74.us-west-2.compute.internal"","2023-09-22T23:08:02.425890717Z"pid"pid",:"4291:hostname4291",:"","upstairs":downstairs"1:ip-10-150-1-74.us-west-2.compute.internal"3,}}
1146 "
1147 {pid":4291","{msg"downstairs:"":"1[0] Starts reconcile loop"}msg":",
1148 "v":0,"[0] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"name"{:,"""cruciblemsg"":v":,""0level",:"30name":"crucible","level"negotiate packet PromoteToActive { upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1 }":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.42596589Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":,1"},time
1149 "":time"":{"2023-09-22T23:08:02.42597117Z",2023-09-22T23:08:02.425976017Z"""hostnamemsg"":,:"""hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid"":[1] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum"4291pid,",:"4291"upstairs",":v1"downstairs"::}02,
1150 }"
1151 {name":"crucible",""{msg"level"::""30msg[0] Transition from WaitActive to WaitQuorum","v":"0:,""name":"crucible","level":30The guest has finished waiting for activation
1152 negotiate packet PromoteToActive { upstairs_id: a2b1abbb-2148-4d24-93b2-2a0a303e7718, session_id: a7f1d842-f19a-4c81-abcd-c1f78df01722, gen: 1 }",",time"":v"":0,"2023-09-22T23:08:02.426041175Zname"":","crucible"hostname,"",level"::""30time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.426050422Z"pid":,4291"hostname",:""upstairs":1}ip-10-150-1-74.us-west-2.compute.internal"
1153 ,"pid":4291,"{,"time":upstairs""":msg12023-09-22T23:08:02.426071288Z"}"
1154 ,:""hostname":"{[1] Transition from WaitActive to WaitQuorum"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid"::04291,""name"msg"::,"""crucible"downstairs"[0] new RM replaced this: None",":,3"levelv"":}0
1155 :,30"name":"crucible{","level":40"msg":"negotiate packet RegionInfoPlease","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.426127536Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","time"pid"::"4291,2023-09-22T23:08:02.42613225Z""upstairs",:"1hostname":"}
1156 ip-10-150-1-74.us-west-2.compute.internal",,""pid"time"{:":42912023-09-22T23:08:02.426140725Z","msg"upstairs":"":,"1[1] new RM replaced this: None"}hostname
1157 ,"":"v":0{,ip-10-150-1-74.us-west-2.compute.internal"",name"":""pid"msgcrucible""::,4291""level",:"40[0] Starts reconcile loopdownstairs"":,1"v":}0
1158 ,"name":"crucible","level":{30",msg"":"time":"negotiate packet RegionInfoPlease"2023-09-22T23:08:02.4261991Z",",v"":hostname"0:","name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",",",level"":pid"30time"::4291","2023-09-22T23:08:02.426212025Z"upstairs":,1"}hostname"
1159 :,""time":"ip-10-150-1-74.us-west-2.compute.internal{"2023-09-22T23:08:02.426231706Z","pid",:"4291"hostname":msg"":,""ip-10-150-1-74.us-west-2.compute.internal"upstairs[1] Starts reconcile loop"":,",pid"1":v"4291}:
1160 ,0","downstairs"name":{2:"}crucible"
1161 ,""msglevel""::30"{"msg":"negotiate packet RegionInfoPlease"[1] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v",:"0v",:"0name":","crucible"name":,,""leveltime"":":"crucible"30,2023-09-22T23:08:02.426288745Z"",level"":hostname"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1162 ,"time":"{2023-09-22T23:08:02.426311232Z"","msg":hostname","":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.426316107Z,""[2] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum,pid"":"4291hostname",:"",v""ip-10-150-1-74.us-west-2.compute.internal:downstairs"0,"",pid":name"3:"":}crucible"
1163 4291,"level":,30"{upstairs":1}
1164 "msg":"{[0] downstairs client at 127.0.0.1:61368 has UUID 56a24d20-5fb0-4556-90e5-68512760a2f4","v",:"0"msg","time"name"::"":"crucible2023-09-22T23:08:02.426366087Z"",,""[1] Transition from WaitActive to WaitQuorum"hostname"level",:":"v"30:0ip-10-150-1-74.us-west-2.compute.internal",","namepid""::"4291crucible",,""upstairs"level"::130}
1165 ,"time":"{2023-09-22T23:08:02.426399712Z",""hostname":msg"":"ip-10-150-1-74.us-west-2.compute.internal"[2] Transition from WaitActive to WaitQuorum",",pid"":v4291":0,,,"""upstairs":time1name"}"::
1166 ""crucible","2023-09-22T23:08:02.426412432Z{level"":30,""msg":hostname"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1167 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 56a24d20-5fb0-4556-90e5-68512760a2f4, encrypted: false, database_read_version: 1, database_write_version: 1 }",","time"v":{:"0,"name":""crucible2023-09-22T23:08:02.426447329Z""msg,,""level":"hostname"":[1] new RM replaced this: None":"30,"v":0ip-10-150-1-74.us-west-2.compute.internal",,""pid"name:"4291:"crucible,"","upstairs"level:"1:,40"}time"
1168 :"2023-09-22T23:08:02.426480715Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal"[2] new RM replaced this: None",,""pid":v"4291:,0",,time"""name:upstairs""":2023-09-22T23:08:02.426494912Z"1:","}crucible
1169 hostname"":,""{level":40ip-10-150-1-74.us-west-2.compute.internal",""pid"msg"::"4291,"upstairs":a2b1abbb-2148-4d24-93b2-2a0a303e7718 WaitActive WaitActive WaitActive"1,"}v
1170 ",:"0time",:"{"name":"crucible"2023-09-22T23:08:02.426531338Z",,"""msglevel"hostname":""::"[1] Starts reconcile loop"30,"v"ip-10-150-1-74.us-west-2.compute.internal":,"0pid",:"4291name":","crucible"upstairs",":1level"}:
1171 30,{"time"":"msg":"2023-09-22T23:08:02.426560775Z"[2] Starts reconcile loop",","hostname"v:"":0,"name":ip-10-150-1-74.us-west-2.compute.internal"","crucible"pid,"",:level4291"",:"30time"upstairs"::"1}
1172 2023-09-22T23:08:02.42657612Z",",hostname"":{"time":"ip-10-150-1-74.us-west-2.compute.internal"",msg2023-09-22T23:08:02.426597074Z"",""pid"hostname"::":"4291ip-10-150-1-74.us-west-2.compute.internal",,""upstairspid[1] downstairs client at 127.0.0.1:50978 has UUID 4a0427cd-ca30-4862-a8cb-bfc11d93d1ff""":,:"1v":}04291,
1173 ,""name":upstairs"":crucible1{"}The guest has finished waiting for activation
1174 
1175 ","msg"level"{:":"30msg":"[0] 127.0.0.1:52643 task reports connection:true","[2] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"v":0,,""v"name"::"0crucible",,""name"level":":,30"crucible"time":","level":2023-09-22T23:08:02.426656078Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time",:""upstairs":12023-09-22T23:08:02.426672258Z"},
1176 "hostname":","{time"ip-10-150-1-74.us-west-2.compute.internal":,"""pid"2023-09-22T23:08:02.426676677Z":msg,4291"",:""hostname":upstairs":"1}
1177 ip-10-150-1-74.us-west-2.compute.internal","pid"{:4291[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 4a0427cd-ca30-4862-a8cb-bfc11d93d1ff, encrypted: false, database_read_version: 1, database_write_version: 1 }""msg":,"","v":upstairs"0:,1024c3783-b9b4-4453-823f-769dad90d4f2 WaitQuorum WaitQuorum WaitQuorum"",}name""
1178 v"::"0,crucible""{,name"":"level":crucible""30,msg"":level"":30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.426748226Z"time":,""hostname":"2023-09-22T23:08:02.426744678Z","hostname"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291,ip-10-150-1-74.us-west-2.compute.internal",,""pid"upstairs"":time":42911:}",
1179 "2023-09-22T23:08:02.426755916Z"upstairs"{:,""1msg"hostname"}::"
1180 "ip-10-150-1-74.us-west-2.compute.internal","pid":4291[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"{,,""v":upstairs"0:,"1"msg"name"::""}crucible"
1181 ,a2b1abbb-2148-4d24-93b2-2a0a303e7718 WaitActive WaitActive WaitActive""level":,"30v"{:0,"name":""crucible"msg",:""level":30[2] new RM replaced this: None",",v"":time"0:","name":"2023-09-22T23:08:02.426819392Z"crucible",","hostname"level"::"40ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs,"":1time"}:
1182 "{2023-09-22T23:08:02.426827874Z"",msg"":"hostname":","time":[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"ip-10-150-1-74.us-west-2.compute.internal,""",v"2023-09-22T23:08:02.426839692Z"pid",""::hostname"0:,""4291name":"ip-10-150-1-74.us-west-2.compute.internalcrucible"",,,"""levelpid""upstairs":::4291301,"}upstairs"
1183 :1}
1184 ,{"time":"{"2023-09-22T23:08:02.426884196Z"msg",:"""hostname"msg"::""[2] downstairs client at 127.0.0.1:57228 has UUID 2ac204bf-fd46-40b7-a314-196d508e471d[2] Starts reconcile loop""ip-10-150-1-74.us-west-2.compute.internal,,""vv""":,0:",pid"0:",4291name",:""name"upstairs"::""1crucible}"
1185 crucible","{,level":""30level"msg"::"30[0]R dirty: [false, false, false, false, false, false, false, false, false, false]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.426936938Z",",time"":,time"""hostname":2023-09-22T23:08:02.426939613Z":"","2023-09-22T23:08:02.426948382Z"hostname"ip-10-150-1-74.us-west-2.compute.internal,"",":pid"hostname:"4291":",ip-10-150-1-74.us-west-2.compute.internal""ip-10-150-1-74.us-west-2.compute.internal","upstairs",pid"::"42911pid":,}"4291
1186 ,upstairs"":upstairs"1:{1}}
1187 
1188 "{msg":"{"[0] 127.0.0.1:60909 task reports connection:true"msg",msg""":":v"":[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"0,,""vname""::"0crucible",","name"level":[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 2ac204bf-fd46-40b7-a314-196d508e471d, encrypted: false, database_read_version: 1, database_write_version: 1 }"":crucible"30,","v"level"::030,"name":"crucible","level":30,"time":","time":2023-09-22T23:08:02.427028838Z"","hostname":2023-09-22T23:08:02.427032937Z"",,""timeip-10-150-1-74.us-west-2.compute.internal"":"hostname",:""pid"2023-09-22T23:08:02.427037501Z":ip-10-150-1-74.us-west-2.compute.internal",,4291""hostname"pid:","":upstairs4291":ip-10-150-1-74.us-west-2.compute.internal1,"",}upstairs""
1189 :pid"1:}4291
1190 ,"{upstairs":{1}
1191 ""msg"msg:":""{[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]",""ed581aed-4fb6-4c12-84d1-a857283bcda2 WaitQuorum WaitQuorum WaitQuorum"v",msg""::0v",:""0name",:""crucible"a2b1abbb-2148-4d24-93b2-2a0a303e7718 WaitActive WaitActive WaitActive",name""level:,"":"30v"crucible":,"0level":,"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.427123338Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1192 ,"time{",:""msg"":time":2023-09-22T23:08:02.427128267Z""","hostname":2023-09-22T23:08:02.427131734Z"","[1]R dirty: [false, false, false, false, false, false, false, false, false, false]hostname"ip-10-150-1-74.us-west-2.compute.internal"",:,"""v"pid"ip-10-150-1-74.us-west-2.compute.internal::"0,4291,""pid",":name4291":upstairs,"":"upstairs"crucible1:"1,}"}
1193 level"
1194 :{30"{msg":""[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"msg":","v":0negotiate packet ExtentVersionsPlease,"",",time""name"v"::":"0crucible"2023-09-22T23:08:02.427194738Z",,",level""":name"hostname"30::""crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291,"upstairs":1}
1195 ,"time":"{2023-09-22T23:08:02.42722207Z""msg",:",""time":"hostname":"[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"2023-09-22T23:08:02.427227026Z","ip-10-150-1-74.us-west-2.compute.internal"v",,"":pidhostname0",""::name""4291:"ip-10-150-1-74.us-west-2.compute.internal"crucible,",,"""pidupstairs"level:"":1:30}4291
1196 ,"downstairs":1}
1197 ,{"time":"{"2023-09-22T23:08:02.427274535Z"msg":,"""msg":hostname""[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]":",negotiate packet ExtentVersionsPlease""v"ip-10-150-1-74.us-west-2.compute.internal":,,"0"pid"v"::4291,"0name,,""":upstairs""name"::crucible""1,crucible"}",level"
1198 :"30level"{:30"msg":"[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":0,"name":"crucible","level":30,"time":","time"2023-09-22T23:08:02.427326017Z":,"","time"2023-09-22T23:08:02.427328198Z:hostname""":",2023-09-22T23:08:02.42733701Z""ip-10-150-1-74.us-west-2.compute.internal"hostname",":,"hostname"":"pid"ip-10-150-1-74.us-west-2.compute.internal":,"ip-10-150-1-74.us-west-2.compute.internalpid"":4291,4291",pid","":upstairs4291downstairs",:"2"upstairs"}::
1199 11}
1200 }{
1201 {"msg":""msg":"{negotiate packet ExtentVersionsPlease","v":0"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]",",msg"":v""name:"0:,""[0]R dirty: [false, false, false, false, false, false, false, false, false, false]name"crucible:""",crucible",,"""vlevellevel":"30"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.427421016Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,","upstairs":time"1:}",
1202 2023-09-22T23:08:02.427422952Z""{,time"":"hostname"msg""::""2023-09-22T23:08:02.427427433Z"Max found gen is 1ip-10-150-1-74.us-west-2.compute.internal,""",hostname",""v"::pid0"",:"4291name"ip-10-150-1-74.us-west-2.compute.internal",:"",crucible"downstairs"",pid""::level4291"3:,}30
1203 "upstairs":1}
1204 ,{"time":"{2023-09-22T23:08:02.427481988Z"","msg":hostname"":""msg":"ip-10-150-1-74.us-west-2.compute.internal","[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"pid":,"4291v",:"0[0] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) WaitActive WaitActive WaitActive ds_transition to WaitQuorum,"upstairsname"""::,"1v}"
1205 "crucible":{,0"",msg"level"":name"30::""crucible","Generation requested: 1 >= found:1"level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.427529747Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid",:2023-09-22T23:08:02.427540289Z"4291",",hostname"time"upstairs"":::""1}ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.427536227Z","
1206 pid",:"4291hostname"{,:""upstairs"":msg"ip-10-150-1-74.us-west-2.compute.internal"1:"},
1207 [1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""pid,{"":v"4291":msg",:"0",Next flush: 1upstairs""":name",1:""}v":crucible
1208 "0,","level"name"::{"30crucible","level":"30msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,","time":"time":"2023-09-22T23:08:02.427615145Z"2023-09-22T23:08:02.427612502Z,""hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid",ip-10-150-1-74.us-west-2.compute.internal":"4291time",",:""upstairs"pid"::12023-09-22T23:08:02.427624354Z4291}"
1209 ,","hostname":"upstairs"{:ip-10-150-1-74.us-west-2.compute.internal"1,"}"pidmsg":""
1210 :All extents match"4291,"v":,"0,upstairs""{name"::1"}"
1211 crucible"msg",:""level{":30[1]R dirty: [false, false, false, false, false, false, false, false, false, false]","v":0,"name":"crucible"","msg"level"::"30[0] new RM replaced this: None","v":0,"name":"crucible","level":40,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1212 {"msg":"[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:08:02.427690719Z2023-09-22T23:08:02.427756577Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291,",",upstairs""upstairs:":11time}}
1213 
1214 {"msg":"No downstairs repair required"{,"v":0",msg"":"name":"crucible","[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]",level,":"30v"":0time",:""name":"crucible"2023-09-22T23:08:02.427700749Z,""level":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,":"4291time":","upstairs2023-09-22T23:08:02.427817291Z"",,"":hostnametime""::""1}2023-09-22T23:08:02.427825784Z"ip-10-150-1-74.us-west-2.compute.internal",
1215 ","hostname"pid:"":4291,"ip-10-150-1-74.us-west-2.compute.internalupstairs"":,1{"}
1216 pid"":msg"4291{:",""msg":upstairs"":1[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"}No initial repair work was required
1217 ",",{"vv"":"0msg,"":name""::"0crucible",,"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]level"",":v30"":name0":,""crucible"name":,""level":crucible"30,","level"time:":30"2023-09-22T23:08:02.427878304Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1218 ,"time":"{2023-09-22T23:08:02.427888504Z""msg,"":,hostname"":""Set Downstairs and Upstairs activetime"":",ip-10-150-1-74.us-west-2.compute.internal""v,"":pid0"2023-09-22T23:08:02.427887456Z":,"4291,,name"""upstairs:"hostname":"1:"crucible}"
1219 ,"ip-10-150-1-74.us-west-2.compute.internal"level{","":pid30"msg"::"4291Max found gen is 1",,""vupstairs""::10,},
1220 ""timename""::""crucible","2023-09-22T23:08:02.427922787Zlevel"":,30"{hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg",":pid"":4291,,""timeupstairs"":":[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]1"}2023-09-22T23:08:02.427941028Z
1221 ",,"{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal",""pidv""::024c3783-b9b4-4453-823f-769dad90d4f2 is now active with session: db076280-2529-45a1-a093-a3cd3f0799dc4291"0,,"",upstairs"v:""1:}name"test control::test::test_crucible_control_openapi ... :
1222 0"crucible{,"""name"msg:ok"":crucible"",
1223 "level"Generation requested: 1 >= found:1,"":level,"":v30"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.427999413Z","hostname,"":"time":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.428004146Z"",",,pid"":hostname"4291":",time"":ip-10-150-1-74.us-west-2.compute.internalupstairs"",:"1"}pid
1224 ":2023-09-22T23:08:02.428000542Z"{4291,",""hostname":upstairsmsg""::"1"}
1225 024c3783-b9b4-4453-823f-769dad90d4f2 Set Active after no repair"ip-10-150-1-74.us-west-2.compute.internal,{"v":"0msg,"":"name":"Next flush: 1"crucible"",,,""v"level:"0:,30""pid"name"::"4291crucible",","level":upstairs30":1}
1226 ,,"time":""{time":"2023-09-22T23:08:02.428054335Z","2023-09-22T23:08:02.428047478Zhostname"":,"""msg":"hostnameip-10-150-1-74.us-west-2.compute.internal"":",""pid"::ip-10-150-1-74.us-west-2.compute.internal""4291[2]R dirty: [false, false, false, false, false, false, false, false, false, false]",,"",upstairs2023-09-22T23:08:02.427707985Z"pid""::1"}4291
1227 ,,v"{"hostname"upstairs""::msg1"}:"
1228 All extents match""{":,"0ip-10-150-1-74.us-west-2.compute.internal,""name":msg""":vcrucible"",:"Notify all downstairs, region set compare is done.0",pid"":",,4291""namev"",level":::0""crucible,"upstairs","":levelname130"":":}crucible30"
1229 ,"level":30{,"time":","2023-09-22T23:08:02.428130599Z","",time""msghostname:"":time"2023-09-22T23:08:02.428137042Z""":":ip-10-150-1-74.us-west-2.compute.internal,""","hostname"pid[0] Starts reconcile loop""::",2023-09-22T23:08:02.428128412Z""ip-10-150-1-74.us-west-2.compute.internalv4291",",,"""pidupstairs"":::14291hostname,}0"
1230 ":"upstairs{","ip-10-150-1-74.us-west-2.compute.internalmsg""::1""},
1231 No downstairs repair required""name,{"pid""vmsg""::0",:"Set check for repair":"name,"":"4291"v"crucible:crucible0","","name,level"""::level"30,:""30upstairs":crucible"1,"}level"
1232 ,:"30time":"2023-09-22T23:08:02.428214842Z"{,"hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":","pid,""Max found gen is 1time:"4291:","",,"2023-09-22T23:08:02.428226056Z"upstairs"",:"1time"v:}hostname
1233 """:{:"02023-09-22T23:08:02.428217517Z""ip-10-150-1-74.us-west-2.compute.internalmsg"",:"",,pidNo initial repair work was required""":,4291"hostname"v:",:"0"upstairs,"""namename"ip-10-150-1-74.us-west-2.compute.internal"",::"1"}pid"
1234 crucible::""{crucible"4291",msg""level:,,"""upstairs":[1] 127.0.0.1:34619 task reports connection:true30"",":level1v""::0},30"
1235 name,"":"time":crucible"","level"2023-09-22T23:08:02.428297015Z:"30{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1,",}"
1236 timetime"[1] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum:""{":",2023-09-22T23:08:02.428305722Z"2023-09-22T23:08:02.428314105Z"v"",:"0,""hostname"msghostname""::"":","Set Downstairs and Upstairs active"ip-10-150-1-74.us-west-2.compute.internal",,name"""vpidip-10-150-1-74.us-west-2.compute.internal"""::42910:,,"","name"upstairs:"":crucible"",1cruciblepid"":"},
1237 "level"level{":":msg30"4291:30","upstairs":1024c3783-b9b4-4453-823f-769dad90d4f2 Active Active Active"},"
1238 ,v"":time0":,""name":"2023-09-22T23:08:02.428371884Zcrucible""{,,""levelhostname""::"30","msg"ip-10-150-1-74.us-west-2.compute.internal:time"":,"""pid":Generation requested: 1 >= found:12023-09-22T23:08:02.42837382Z"",4291",time,"":upstairs",""2023-09-22T23:08:02.428388638Z"":,1v"}hostname"
1239 "::hostname{"":""msg":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal"0,,,"df6648fa-e480-4dd9-b7c4-7fec45dbddf8 is now active with session: b793572f-19dc-423b-877e-0f0cfe9147d9"pidname"""pid",:"4291v":,:"":0",4291upstairs""name:,crucible"1"}"
1240 :,"{upstairs"""levelmsgcrucible"":,""level:""1::Set check for repair30"},30"
1241 v":0,"name":,""timecrucible"",:""{level":302023-09-22T23:08:02.428454271Z",""hostname"msg"::""ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from WaitActive to WaitQuorum,,",pid""time:"4291:",""",upstairstime"""2023-09-22T23:08:02.428467397Z:v"::"1"02023-09-22T23:08:02.428457095Z",}",,hostname
1242 """name"{::hostname""":""msgip-10-150-1-74.us-west-2.compute.internal"":,""ip-10-150-1-74.us-west-2.compute.internalpid"":crucibledf6648fa-e480-4dd9-b7c4-7fec45dbddf8 Set Active after no repair4291",,,""v"","upstairs:"0:,pid"":name1"}:
1243 "4291"crucible{"level,",""msglevel""::"30:"30upstairs"[2] 127.0.0.1:44064 task reports connection:true":,"1v":}0
1244 ,,""timename""::""crucible","2023-09-22T23:08:02.42853364Z"level",:"30{hostname",:""time":""msg"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.428534983Z",:,",""pidtime"Next flush: 1":"":",4291hostname",""2023-09-22T23:08:02.428548947Z"upstairs"v":,1"}:hostname:"":
1245 "0ip-10-150-1-74.us-west-2.compute.internal",,{ip-10-150-1-74.us-west-2.compute.internal"""msg,"":"pid""pidname"Notify all downstairs, region set compare is done.:"4291,","::"4291v"":upstairs,crucible"0",:"1"},name
1246 "":{"upstairs":"cruciblemsg""1level",:""level"}::
1247 30024c3783-b9b4-4453-823f-769dad90d4f2 Active Active Active"30,"v":0{,"name":"crucible",""msg"level"::,30""time":"[1] new RM replaced this: None"2023-09-22T23:08:02.428616483Z",",v":"0,hostname"":"name",,ip-10-150-1-74.us-west-2.compute.internal:""","time"crucible"timepid"""::"4291,:,2023-09-22T23:08:02.428625951Z""level"""2023-09-22T23:08:02.428617985Zupstairs,"":hostname1"}:
1248 ":"40{ip-10-150-1-74.us-west-2.compute.internal",,""msg"pid:"":"4291Set check for repair",hostname","":upstairsv""::10",}"
1249 name"ip-10-150-1-74.us-west-2.compute.internal"{:,"",cruciblemsg""time""",:""pid"Set check for repairlevel"",::30"":v":42912023-09-22T23:08:02.428659097Z"0,,",""name"upstairs:,"hostname""":crucible:""time,"":1ip-10-150-1-74.us-west-2.compute.internal""level,"2023-09-22T23:08:02.42868559Z:"30"}pid,
1250 "":,hostname"":time""4291:{",ip-10-150-1-74.us-west-2.compute.internal""2023-09-22T23:08:02.428707246Z,""",pid"msg"hostname:upstairs"4291"::,"""1upstairsip-10-150-1-74.us-west-2.compute.internal"":,1"}pid:""}:
1251 4291
1252 ,{All extents match"""msgupstairs""::1{}"
1253 [1] 127.0.0.1:51919 task reports connection:true""{,,""msg"v:"""msg"v"[0] received reconcile message:"0::,,"""vname""::"[1] Starts reconcile loopcrucible0","",name""0:level,,"""crucible:"30"name"v"::"0,,""crucible"level":,"name",30"level"time:"":"crucible","2023-09-22T23:08:02.428778624Z"level",:",30hostname"":time"":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.428789601Z"",","pid":hostname4291":","upstairs":1ip-10-150-1-74.us-west-2.compute.internal"},
1254 ",pid":{4291":",time"msg"":upstairs:30""":1}
1255 df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Active Active Active"2023-09-22T23:08:02.428799162Z",{"v,"":msg0",:""name"":"[0] All repairs completed, exitcrucible"",hostname","":vlevel""::030,"",name""ip-10-150-1-74.us-west-2.compute.internal":time"":,crucible"""pid",:,""2023-09-22T23:08:02.428823255Zleveltime"4291":":30,"","upstairs"2023-09-22T23:08:02.428839709Zhostname":"":,"ip-10-150-1-74.us-west-2.compute.internal"1hostname,":"",time"ip-10-150-1-74.us-west-2.compute.internal:"",""2023-09-22T23:08:02.428855962Z"pid",":pid"hostname4291":,:}4291
1256 "","upstairs"upstairsip-10-150-1-74.us-west-2.compute.internal:"1,"}"
1257 pid"{::4291{1,""upstairs""msg":}:"
1258 1msg":}"
1259 Set check for repair"[2] a2b1abbb-2148-4d24-93b2-2a0a303e7718 (a7f1d842-f19a-4c81-abcd-c1f78df01722) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum",{",{"vmsg""::0",""[0] Starts cmd_loopname"""msg":,""v:cruciblev""":,0",""level"name"::30:No downstairs repair required""0crucible,"",v,"""name",:"leveltime0""::30:,"""name"crucible2023-09-22T23:08:02.428927922Z"",:",""hostname,"":time""crucible":level"ip-10-150-1-74.us-west-2.compute.internal"":,30"2023-09-22T23:08:02.428941469Zpid"",,:"4291"hostname,level""":upstairs:"":301}ip-10-150-1-74.us-west-2.compute.internal
1260 ","pid":{4291,""upstairsmsg""::",1"}
1261 [2] 127.0.0.1:45755 task reports connection:true"time":{,"""msgv""::"02023-09-22T23:08:02.428959465Z",[1] received reconcile message"",,name,""""v:"time":":crucible0",,"hostname"namelevel""::""crucible30":"","level"2023-09-22T23:08:02.428967806Z":ip-10-150-1-74.us-west-2.compute.internal","30,hostname"":pid,"":time"4291":",",ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.428999471Ztimeupstairs""":,""":,2023-09-22T23:08:02.429006077Z""hostname"1:,"pid"}"ip-10-150-1-74.us-west-2.compute.internal
1262 :hostname""4291:,""pid"{,ip-10-150-1-74.us-west-2.compute.internal:"4291",,""upstairspid""::42911"},
1263 "msg"upstairs{":":msg1"}:
1264 "upstairs"":{df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Active Active Active1"",msg"}[2] Transition from WaitActive to WaitQuorum"
1265 "v:"":,0"[1] All repairs completed, exit,{""name",:""v"v:crucible0",,""level"""::name30"0:msg"":,crucible"",""namelevelNo initial repair work was required"":,","":time30"crucible":v""":,0"2023-09-22T23:08:02.429080675Z"level",,"","timehostname""::"":name"30:"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.429094514Z"",",crucible"pid"",hostname:"4291:"",level"":ip-10-150-1-74.us-west-2.compute.internalupstairs"30,"":pid1":}4291
1266 ,"upstairs":1{,}""
1267 msg"time":{:"""msg":"Set check for repair"2023-09-22T23:08:02.429110226Z[1] Starts cmd_loop,"""v,"":v0",:"0,name,"""name:":"",cruciblecrucible"",,"""levellevel"hostname"time:"30::30"":"2023-09-22T23:08:02.429120559Z"ip-10-150-1-74.us-west-2.compute.internal",,""pid"hostname"::4291,"",time"":time"ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.429154222Z""":,""upstairs,hostname2023-09-22T23:08:02.42915556Z"":",""pidhostnameip-10-150-1-74.us-west-2.compute.internal"":,""""pid:"ip-10-150-1-74.us-west-2.compute.internal::42914291"1,,,}""pid""upstairsupstairs""::42911:},
1268 1
1269 "}{
1270 "upstairs{msg"""::1msg}""
1271 :[2] new RM replaced this: None"{{"[0] received reconcile message""msg",:""v,"[2] received reconcile message"":v"0",,""namev""::"0:cruciblemsg"",,:""name"":"Set Downstairs and Upstairs active0cruciblelevel"","",:"30,levelname"""::v""30:crucible"0,",,"level"name"::"40",time"crucible"time""::"","level":2023-09-22T23:08:02.429245985Z2023-09-22T23:08:02.429252315Z""30,,""hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",","pid"pid:":42914291,","upstairs"upstairs:"1:}1
1272 },
1273 {"time"{":""msg"msg:"":",2023-09-22T23:08:02.429261283Z""[0] All repairs completed, exit[2] All repairs completed, exit"time"",:,"",v""v""::00,,""hostnamenamename""::"2023-09-22T23:08:02.429267906Z"crucible""crucible",,"",levellevel""::3030"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid"":pid"4291:4291,",,""timetime""::""upstairs",:2023-09-22T23:08:02.429308641Z2023-09-22T23:08:02.429308819Z""",,upstairs"":hostname"11hostname""}::""
1274 ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid"pid:"}{
1275 ":4291msg4291,","{:upstairs"""upstairs:"1"msg}:
1276 1"}{
1277 ":msg"{:"ed581aed-4fb6-4c12-84d1-a857283bcda2 is now active with session: 99a7dfa5-d205-4d07-9135-9f9e70387e3a"""[2] Starts reconcile loop",[2] Starts cmd_loopmsg""":",v"",v:"[0] Starts cmd_loop":"00v",,"",vname":"0"::0",crucible"",name,""name"level:"":"30:"crucible"name,"":crucible"level"",:,30crucible"""level,time""::""30level",2023-09-22T23:08:02.429401345Z"":time,"":30hostname"":"2023-09-22T23:08:02.429410517Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":"pid":4291,"ip-10-150-1-74.us-west-2.compute.internal"upstairs",:"1pid"}:
1278 4291,"upstairs,"":time1"}:
1279 "{,2023-09-22T23:08:02.429418202Z"""time"msg:"",:""2023-09-22T23:08:02.4294226Z"hostname":[1] received reconcile message","","hostnameip-10-150-1-74.us-west-2.compute.internal""v":,:""pid"0:,4291ip-10-150-1-74.us-west-2.compute.internal"",name"":,"upstairs""crucible"pid,"":level":4291:130,}"
1280 upstairs":1}
1281 {,"time":""msg":{"2023-09-22T23:08:02.429474936Z","hostname":""[0] 127.0.0.1:61368 task reports connection:true"msg":",ip-10-150-1-74.us-west-2.compute.internal"",v"ed581aed-4fb6-4c12-84d1-a857283bcda2 Set Active after no repair:"0"pid",:,4291""vname"","::"0upstairs":crucible1","}name":",
1282 crucible"","level"level{:"30:"30msg":"[1] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.429525699Z",","hostname":"time":,""ip-10-150-1-74.us-west-2.compute.internaltime"2023-09-22T23:08:02.429527512Z:"",""2023-09-22T23:08:02.429535164Zpid,""":,hostname4291"":",hostname"":ip-10-150-1-74.us-west-2.compute.internal"upstairs"",:"1pid":ip-10-150-1-74.us-west-2.compute.internal}4291"
1283 ,,""upstairs"pid"{::42911,}""
1284 upstairs"msg":":1}
1285 a2b1abbb-2148-4d24-93b2-2a0a303e7718 WaitQuorum WaitQuorum WaitQuorum"{,"v":{0,""name"msg:"":"crucible"",msg":"Notify all downstairs, region set compare is done."level","":[1] Starts cmd_loop"v"30:,"0v",:"0name":,""crucible"name",:""level"crucible":,"30level":30,"time":"2023-09-22T23:08:02.429610036Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1},
1286 ,""time"time":":{"2023-09-22T23:08:02.429621862Z","2023-09-22T23:08:02.429622967Z"hostname"":"msg",:ip-10-150-1-74.us-west-2.compute.internal"""hostname",:""[0]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"pid":,ip-10-150-1-74.us-west-2.compute.internal"4291"v",,:""0pid":,upstairs"4291":,1name""}:
1287 "upstairs":crucible"1,"}level{":
1288 30"msg":"Set check for repair"{,"v":0,""name"msg"::""crucible","[2] received reconcile message"level":,"30v":0,"name":"crucible",","time"level"::"302023-09-22T23:08:02.429686674Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,,""upstairs"time"::1"}
1289 ,2023-09-22T23:08:02.429698833Z""time",{:""hostname":""2023-09-22T23:08:02.429705848Zmsg"":ip-10-150-1-74.us-west-2.compute.internal,"""hostname",[0]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]":pid""",:"4291ip-10-150-1-74.us-west-2.compute.internal"v",:,""pid0"upstairs,:""name"4291::"1,"crucible"upstairs"},
1290 ":level"1:30}{
1291 "msg":"[1] 127.0.0.1:41466 task reports connection:true"{,"v":0,""msg"name":":"crucible",","[2] All repairs completed, exitlevel"time"":",:"30v":2023-09-22T23:08:02.429765269Z"0,","name":"hostname":crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1292 {"msg":","time":"2023-09-22T23:08:02.429794083Z"[0]R dirty: [false, false, false, false, false, false, false, false, false, false]",","hostnamev":"0:,""name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",","level":pid"30:4291,"upstairs":1},
1293 "time":"{2023-09-22T23:08:02.429786605Z",""hostname",msg":"":"time"[2] Starts cmd_loop:"ip-10-150-1-74.us-west-2.compute.internal"",,""v"2023-09-22T23:08:02.429824677Z":pid",0:,4291""hostname"name":,":""upstairs"ip-10-150-1-74.us-west-2.compute.internal":crucible"1,,""pid"}:
1294 4291level":30,"upstairs":1}{
1295 "msg":"{ed581aed-4fb6-4c12-84d1-a857283bcda2 Active Active Active",""msg"v"::,"0","time"name[1]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]""::"","crucible2023-09-22T23:08:02.42987923Z"v",:""0,hostname","":levelname"""::ip-10-150-1-74.us-west-2.compute.internal"30,""pidcrucible"":,4291"level,"":upstairs"30:1}
1296 ,"time":"2023-09-22T23:08:02.429915503Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time",:""pid":42912023-09-22T23:08:02.429924309Z",","upstairs"hostname"::"1}
1297 ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"{upstairs":1}
1298 "msg":"Set check for repair"{,"v":0,""name"msg"::""crucible","level":[1]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.42997416Z","hostname,"":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.429980185Z,""pid",:"4291hostname":","upstairs":1ip-10-150-1-74.us-west-2.compute.internal"},
1299 "pid":4291,"upstairs"{:1}
1300 "msg":"{[2] 127.0.0.1:55647 task reports connection:true","v"":msg"0:","name":"crucible","level":[1]R dirty: [false, false, false, false, false, false, false, false, false, false]"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430030764Z","hostname":,""time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.430036248Zpid"":,4291"hostname",:""upstairs":1}ip-10-150-1-74.us-west-2.compute.internal"
1301 ,"pid":4291,"{upstairs":1}
1302 "msg":"{ed581aed-4fb6-4c12-84d1-a857283bcda2 Active Active Active",""msg"v"::"0,"name":"[2]R flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"crucible",",v"":level0",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.430093784Z,"",time"":"hostname":"2023-09-22T23:08:02.430096672Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid"::"4291,"ip-10-150-1-74.us-west-2.compute.internal"upstairs",:"1pid":}4291
1303 ,"upstairs":1}
1304 {{"msg":""Set check for repairmsg"":","v":0,"[2]R generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]"name":","crucible"v":,"0level",:"30name":"crucible","level":30,"time":","2023-09-22T23:08:02.430155178Z"time":","hostname":"2023-09-22T23:08:02.430158275Z","hostnameip-10-150-1-74.us-west-2.compute.internal"",:""pid":4291ip-10-150-1-74.us-west-2.compute.internal,""upstairs",:"1pid":}4291
1305 ,"upstairs":1}
1306 {{"msg":""[0] received reconcile messagemsg":"","v":0,"name":"[2]R dirty: [false, false, false, false, false, false, false, false, false, false]"crucible",","v"level"::030,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.4302157Z"time":,""hostname":"2023-09-22T23:08:02.43021928Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid"::"4291,"upstairs"ip-10-150-1-74.us-west-2.compute.internal":,"1pid":}4291
1307 ,"upstairs":1}
1308 {{"msg":""msg"[0] All repairs completed, exit":","v":Max found gen is 1"0,","name"v"::"0crucible",","name"level"::"30crucible","level":30,",time":""time":"2023-09-22T23:08:02.430277798Z"2023-09-22T23:08:02.430279866Z",","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid",:"4291pid":4291,",upstairs"":upstairs"1:1}
1309 }
1310 {{"msg":""[0] Starts cmd_loop"msg":","v":0Generation requested: 1 >= found:1",","namev""::0","crucible"name",:""level":crucible"30,"level":30,,""time":time"":"2023-09-22T23:08:02.430341221Z"2023-09-22T23:08:02.430342917Z",",hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid",:"4291pid":,4291"upstairs",:"1upstairs":}1
1311 }
1312 {"msg{":"[1] received reconcile message"",msg"":v"":0Next flush: 1",","namev""::"0crucible",","name"level:"":crucible"30,"level":30,"time":"2023-09-22T23:08:02.430406918Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1313 ,"time":"{2023-09-22T23:08:02.430405349Z",""hostname"msg":":"All extents match"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid:"0:,4291"name":,""upstairs"crucible":,"1level":}30
1314 ,"time":"{2023-09-22T23:08:02.430457556Z","hostname"":"msg":"ip-10-150-1-74.us-west-2.compute.internal"[1] All repairs completed, exit",",pid":"4291v":0,","upstairs"name"::"1crucible"}
1315 ,"level":30{"msg":"No downstairs repair required","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430493671Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,","upstairs":time"1:"}
1316 2023-09-22T23:08:02.43050522Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg":,""upstairs":[1] Starts cmd_loop"1,"}v"
1317 :0,"name":"{crucible","level":30"msg":"No initial repair work was required","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430548376Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time,"":upstairs"":1}2023-09-22T23:08:02.430557579Z"
1318 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291","msg":upstairs"":1}[2] received reconcile message"
1319 ,"v":0,"{name":"crucible",""level"msg"::"30Set Downstairs and Upstairs active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.43060304Z","hostname":","time":ip-10-150-1-74.us-west-2.compute.internal"","pid":2023-09-22T23:08:02.430609946Z"4291,","hostname":"upstairs":1}ip-10-150-1-74.us-west-2.compute.internal"
1320 ,"pid":4291,"{upstairs":1}
1321 "msg":"{[2] All repairs completed, exit","v":"0msg",:""name":"crucible","level":30a2b1abbb-2148-4d24-93b2-2a0a303e7718 is now active with session: a7f1d842-f19a-4c81-abcd-c1f78df01722","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430657605Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid":2023-09-22T23:08:02.430664643Z"4291,",hostname"":"upstairs":1}
1322 ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"{upstairs":1}
1323 "msg":"[2] Starts cmd_loop"{,"v":0,""name"msg:"":"crucible","level":30a2b1abbb-2148-4d24-93b2-2a0a303e7718 Set Active after no repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.43070992Z","hostname":","time"ip-10-150-1-74.us-west-2.compute.internal":","pid":42912023-09-22T23:08:02.430716927Z",","upstairs"hostname"::"1}
1324 ip-10-150-1-74.us-west-2.compute.internal","pid":4291,{"upstairs":1}"
1325 msg":"[0] downstairs disconnected"{,"v":0,""msg"name"::""crucible","level":Notify all downstairs, region set compare is done."40,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430765717Z","hostname":","time"ip-10-150-1-74.us-west-2.compute.internal":,""pid":42912023-09-22T23:08:02.430771486Z",","upstairs"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1326 {"msg":"Set check for repair","v":0:,"1name":"}
1327 crucible","level":30{"msg":"[0] ed581aed-4fb6-4c12-84d1-a857283bcda2 Gone missing, transition from Active to Offline","v":0,"name":"crucible",",level"":time30":"2023-09-22T23:08:02.430820897Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1328 {,"time"":"msg":"2023-09-22T23:08:02.430838357Z","[1] 127.0.0.1:50978 task reports connection:truehostname"":","v":0ip-10-150-1-74.us-west-2.compute.internal,"","name":pid"":crucible4291",","level"upstairs:"30:1}
1329 {"msg":","time":"[0] ed581aed-4fb6-4c12-84d1-a857283bcda2 connection to 127.0.0.1:60909 closed"2023-09-22T23:08:02.430874485Z",",v"":hostname"0:,""name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",","levelpid""::429130,"upstairs":1}
1330 {"msg":"a2b1abbb-2148-4d24-93b2-2a0a303e7718 Active Active Active",,""v":time":0","name":"2023-09-22T23:08:02.430902788Z"crucible",,""level"hostname"::"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"looper":"0","upstairs":1}
1331 ,"time":"{2023-09-22T23:08:02.430929747Z",""hostname"msg"::""[0] pm_task rx.recv() is None"ip-10-150-1-74.us-west-2.compute.internal",",v"":pid":04291,"name",":"upstairs":crucible1"},
1332 "level":40{"msg":"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.430968677Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1,"}time"
1333 :"2023-09-22T23:08:02.430980165Z","{hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291[0] 127.0.0.1:60909 task reports connection:false",","upstairs"v"::10},
1334 "name":"crucible","{level":30"msg":"[2] 127.0.0.1:57228 task reports connection:true","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.431031739Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1},"
1335 time":"2023-09-22T23:08:02.431042511Z{","hostname"":msg"":"ip-10-150-1-74.us-west-2.compute.internal",ed581aed-4fb6-4c12-84d1-a857283bcda2 Offline Active Active"",pid"":v4291":0,","upstairs"name"::1"}crucible"
1336 ,"level":30{"msg":"a2b1abbb-2148-4d24-93b2-2a0a303e7718 Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.431087551Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
1337 ,"time":"2023-09-22T23:08:02.431100528Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[0] 127.0.0.1:60909 task reports offline"4291,"v":,0","upstairsname""::"1crucible"},"
1338 level":30{"msg":"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.431139092Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1},
1339 "time":"2023-09-22T23:08:02.431151503Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"upstairs":1}
13402023-09-22T23:08:02.431ZINFOcrucible: [0] received reconcile message upstairs = 1
13412023-09-22T23:08:02.431ZINFOcrucible: [0] All repairs completed, exit upstairs = 1
13422023-09-22T23:08:02.431ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
13432023-09-22T23:08:02.431ZINFOcrucible: [1] received reconcile message upstairs = 1
13442023-09-22T23:08:02.431ZINFOcrucible: [1] All repairs completed, exit upstairs = 1
13452023-09-22T23:08:02.431ZINFOcrucible: [1] Starts cmd_loop upstairs = 1
13462023-09-22T23:08:02.431ZINFOcrucible: [2] received reconcile message upstairs = 1
13472023-09-22T23:08:02.431ZINFOcrucible: [2] All repairs completed, exit upstairs = 1
13482023-09-22T23:08:02.431ZINFOcrucible: [2] Starts cmd_loop upstairs = 1
1349 test impacted_blocks::test::extent_from_offset_can_recreate_iblocks ... ok
1350 thread 'impacted_blocks::test::extent_from_offset_panics_for_offsets_outside_region' panicked at 'assertion failed: offset.value < ddef.extent_count() as u64 * extent_size', upstairs/src/impacted_blocks.rs:280:5
1351 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
1352 test impacted_blocks::test::iblocks_conflicts_is_commutative ... ok
1353 test impacted_blocks::test::extent_from_offset_panics_for_offsets_outside_region ... ok
1354 test impacted_blocks::test::iblocks_extents_returns_correct_extents ... ok
1355 test impacted_blocks::test::extent_from_offset_panics_when_num_blocks_outside_region ... ok
1356 test impacted_blocks::test::iblocks_from_offset_is_empty_for_zero_blocks ... ok
1357 test impacted_blocks::test::iblocks_from_offset_with_zero_extent_size_panics ... ok
1358 test impacted_blocks::test::intersection_is_associative ... ok
1359 test impacted_blocks::test::intersection_is_commutative ... ok
1360 test impacted_blocks::test::iblocks_new_panics_for_flipped_polarity ... ok
1361 test impacted_blocks::test::intersection_produces_less_than_or_equal_block_count ... ok
1362 test impacted_blocks::test::intersection_with_empty_is_empty ... ok
1363 test impacted_blocks::test::nothing_contains_empty ... ok
1364 test impacted_blocks::test::test_extent_from_offset ... ok
1365 test impacted_blocks::test::test_extent_from_offset_single_block_only ... ok
1366 test impacted_blocks::test::test_extent_to_impacted_blocks ... ok
1367 test impacted_blocks::test::test_impacted_blocks_from_offset ... ok
1368 test impacted_blocks::test::test_large_extent_to_impacted_blocks ... ok
1369 test impacted_blocks::test::test_new_range_panics_when_last_block_before_first - should panic ... ok
1370 test impacted_blocks::test::test_new_range_panics_when_last_extent_before_first - should panic ... ok
1371 test impacted_blocks::test::overlapping_impacted_blocks_should_conflict ... ok
1372 test impacted_blocks::test::union_is_associative ... ok
1373 test impacted_blocks::test::union_is_commutative ... ok
1374 test impacted_blocks::test::iblocks_blocks_iterates_over_all_blocks ... ok
13752023-09-22T23:08:02.516ZINFOcrucible: Crucible stats registered with UUID: 8be15b9a-fc42-4ab7-8a35-70c3310bdb05
13762023-09-22T23:08:02.516ZINFOcrucible: Crucible 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 has session id: babb90fa-6130-4b9a-9c9b-8915d0effdb2
13772023-09-22T23:08:02.516ZINFOcrucible: 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 is now active with session: 9779cae0-6ece-4b8c-8c9e-8d519b6abff7
13782023-09-22T23:08:02.516ZINFOcrucible: [0] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) New New New ds_transition to WaitActive
13792023-09-22T23:08:02.516ZINFOcrucible: [0] Transition from New to WaitActive
13802023-09-22T23:08:02.516ZINFOcrucible: [0] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) WaitActive New New ds_transition to WaitQuorum
13812023-09-22T23:08:02.516ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
13822023-09-22T23:08:02.516ZINFOcrucible: [0] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) WaitQuorum New New ds_transition to Active
13832023-09-22T23:08:02.516ZINFOcrucible: [0] Transition from WaitQuorum to Active
13842023-09-22T23:08:02.516ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active New New ds_transition to WaitActive
13852023-09-22T23:08:02.516ZINFOcrucible: [1] Transition from New to WaitActive
13862023-09-22T23:08:02.516ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active WaitActive New ds_transition to WaitQuorum
13872023-09-22T23:08:02.516ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
13882023-09-22T23:08:02.516ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active WaitQuorum New ds_transition to Active
13892023-09-22T23:08:02.516ZINFOcrucible: [1] Transition from WaitQuorum to Active
13902023-09-22T23:08:02.516ZINFOcrucible: [2] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active Active New ds_transition to WaitActive
13912023-09-22T23:08:02.516ZINFOcrucible: [2] Transition from New to WaitActive
13922023-09-22T23:08:02.516ZINFOcrucible: [2] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active Active WaitActive ds_transition to WaitQuorum
13932023-09-22T23:08:02.516ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
13942023-09-22T23:08:02.517ZINFOcrucible: [2] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active Active WaitQuorum ds_transition to Active
13952023-09-22T23:08:02.517ZINFOcrucible: [2] Transition from WaitQuorum to Active
13962023-09-22T23:08:02.517ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active Active Active ds_transition to Faulted
13972023-09-22T23:08:02.517ZINFOcrucible: [1] Transition from Active to Faulted
13982023-09-22T23:08:02.517ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active Faulted Active ds_transition to LiveRepairReady
13992023-09-22T23:08:02.517ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
14002023-09-22T23:08:02.517ZINFOcrucible: [1] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active LiveRepairReady Active ds_transition to LiveRepair
14012023-09-22T23:08:02.517ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
14022023-09-22T23:08:02.517ZINFOcrucible: [0] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Active LiveRepair Active ds_transition to Faulted
14032023-09-22T23:08:02.517ZINFOcrucible: [0] Transition from Active to Faulted
14042023-09-22T23:08:02.517ZINFOcrucible: [0] 8be15b9a-fc42-4ab7-8a35-70c3310bdb05 (9779cae0-6ece-4b8c-8c9e-8d519b6abff7) Faulted LiveRepair Active ds_transition to LiveRepairReady
14052023-09-22T23:08:02.517ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
14062023-09-22T23:08:02.517ZINFOcrucible: Checking if live repair is needed
14072023-09-22T23:08:02.517ZWARNcrucible: Upstairs already in repair, trying again later
1408 test live_repair::repair_test::test_check_for_repair_already_repair ... ok
14092023-09-22T23:08:02.518ZINFOcrucible: Crucible stats registered with UUID: 3b20cd5e-ffe3-43dd-abf9-e945e76502a7
14102023-09-22T23:08:02.518ZINFOcrucible: Crucible 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 has session id: 9ae51c7e-5b1e-4ae4-a036-dc19e88449e9
14112023-09-22T23:08:02.518ZINFOcrucible: 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 is now active with session: b7a9eedf-81a9-4851-b077-0fe235e6b4d0
14122023-09-22T23:08:02.518ZINFOcrucible: [0] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) New New New ds_transition to WaitActive
14132023-09-22T23:08:02.518ZINFOcrucible: [0] Transition from New to WaitActive
14142023-09-22T23:08:02.518ZINFOcrucible: [0] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) WaitActive New New ds_transition to WaitQuorum
14152023-09-22T23:08:02.518ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
14162023-09-22T23:08:02.518ZINFOcrucible: [0] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) WaitQuorum New New ds_transition to Active
14172023-09-22T23:08:02.518ZINFOcrucible: [0] Transition from WaitQuorum to Active
14182023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active New New ds_transition to WaitActive
14192023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from New to WaitActive
14202023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active WaitActive New ds_transition to WaitQuorum
14212023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
14222023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active WaitQuorum New ds_transition to Active
14232023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from WaitQuorum to Active
14242023-09-22T23:08:02.518ZINFOcrucible: [2] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active Active New ds_transition to WaitActive
14252023-09-22T23:08:02.518ZINFOcrucible: [2] Transition from New to WaitActive
14262023-09-22T23:08:02.518ZINFOcrucible: [2] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active Active WaitActive ds_transition to WaitQuorum
14272023-09-22T23:08:02.518ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
14282023-09-22T23:08:02.518ZINFOcrucible: [2] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active Active WaitQuorum ds_transition to Active
14292023-09-22T23:08:02.518ZINFOcrucible: [2] Transition from WaitQuorum to Active
14302023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active Active Active ds_transition to Faulted
14312023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from Active to Faulted
14322023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active Faulted Active ds_transition to LiveRepairReady
14332023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
14342023-09-22T23:08:02.518ZINFOcrucible: Checking if live repair is needed
14352023-09-22T23:08:02.518ZINFOcrucible: [1] 3b20cd5e-ffe3-43dd-abf9-e945e76502a7 (b7a9eedf-81a9-4851-b077-0fe235e6b4d0) Active LiveRepairReady Active ds_transition to LiveRepair
14362023-09-22T23:08:02.518ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
1437 test live_repair::repair_test::test_check_for_repair_do_repair ... ok
14382023-09-22T23:08:02.519ZINFOcrucible: Crucible stats registered with UUID: 25bf7e8b-971c-4e8e-9e32-9d598106e189
14392023-09-22T23:08:02.519ZINFOcrucible: Crucible 25bf7e8b-971c-4e8e-9e32-9d598106e189 has session id: e8f7fcdc-3b78-4df0-aa00-a5795a260b2c
14402023-09-22T23:08:02.519ZINFOcrucible: 25bf7e8b-971c-4e8e-9e32-9d598106e189 is now active with session: 88fb9508-2fa9-4920-81d3-845a3885e04c
14412023-09-22T23:08:02.519ZINFOcrucible: [0] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) New New New ds_transition to WaitActive
14422023-09-22T23:08:02.519ZINFOcrucible: [0] Transition from New to WaitActive
14432023-09-22T23:08:02.519ZINFOcrucible: [0] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) WaitActive New New ds_transition to WaitQuorum
14442023-09-22T23:08:02.519ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
14452023-09-22T23:08:02.519ZINFOcrucible: [0] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) WaitQuorum New New ds_transition to Active
14462023-09-22T23:08:02.519ZINFOcrucible: [0] Transition from WaitQuorum to Active
14472023-09-22T23:08:02.519ZINFOcrucible: [1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active New New ds_transition to WaitActive
14482023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from New to WaitActive
1449 {"msg":"[1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.520043554Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","test impacted_blocks::test::subregions_are_contained ... pid":ok4291}
1450 
14512023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
14522023-09-22T23:08:02.520ZINFOcrucible: [1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active WaitQuorum New ds_transition to Active
14532023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from WaitQuorum to Active
14542023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active Active New ds_transition to WaitActive
14552023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from New to WaitActive
14562023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active Active WaitActive ds_transition to WaitQuorum
14572023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
14582023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active Active WaitQuorum ds_transition to Active
14592023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from WaitQuorum to Active
14602023-09-22T23:08:02.520ZINFOcrucible: [1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active Active Active ds_transition to Faulted
14612023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from Active to Faulted
14622023-09-22T23:08:02.520ZINFOcrucible: [1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active Faulted Active ds_transition to LiveRepairReady
14632023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
14642023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active LiveRepairReady Active ds_transition to Faulted
14652023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from Active to Faulted
14662023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active LiveRepairReady Faulted ds_transition to LiveRepairReady
14672023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
14682023-09-22T23:08:02.520ZINFOcrucible: Checking if live repair is needed
14692023-09-22T23:08:02.520ZINFOcrucible: [1] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active LiveRepairReady LiveRepairReady ds_transition to LiveRepair
14702023-09-22T23:08:02.520ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
14712023-09-22T23:08:02.520ZINFOcrucible: [2] 25bf7e8b-971c-4e8e-9e32-9d598106e189 (88fb9508-2fa9-4920-81d3-845a3885e04c) Active LiveRepair LiveRepairReady ds_transition to LiveRepair
14722023-09-22T23:08:02.520ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
14732023-09-22T23:08:02.520ZINFOcrucible: Crucible stats registered with UUID: 46e3c056-00ff-4d1a-9e2a-764650276e46
1474 test live_repair::repair_test::test_check_for_repair_do_two_repair ... {ok
1475 "msg":"Crucible 46e3c056-00ff-4d1a-9e2a-764650276e46 has session id: b40b4606-8bb3-454d-9ded-0c9e3424390e","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.520834857Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
14762023-09-22T23:08:02.520ZINFOcrucible: Checking if live repair is needed
14772023-09-22T23:08:02.520ZINFOcrucible: 46e3c056-00ff-4d1a-9e2a-764650276e46 is now active with session: 89be093f-a3e6-4c47-9b32-a64289baafd3
14782023-09-22T23:08:02.520ZINFOcrucible: [0] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) New New New ds_transition to WaitActive
14792023-09-22T23:08:02.521ZINFOcrucible: [0] Transition from New to WaitActive
14802023-09-22T23:08:02.521ZINFOcrucible: [0] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) WaitActive New New ds_transition to WaitQuorum
14812023-09-22T23:08:02.521ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
14822023-09-22T23:08:02.521ZINFOcrucible: [0] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) WaitQuorum New New ds_transition to Active
14832023-09-22T23:08:02.521ZINFOcrucible: [0] Transition from WaitQuorum to Active
14842023-09-22T23:08:02.521ZINFOcrucible: [1] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active New New ds_transition to WaitActive
14852023-09-22T23:08:02.521ZINFOcrucible: [1] Transition from New to WaitActive
14862023-09-22T23:08:02.521ZINFOcrucible: [1] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active WaitActive New ds_transition to WaitQuorum
14872023-09-22T23:08:02.521ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
14882023-09-22T23:08:02.521ZINFOcrucible: [1] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active WaitQuorum New ds_transition to Active
1489 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30{"msg":","time":"Crucible stats registered with UUID: ca3d570b-0142-4846-8b94-8a2df932685d"2023-09-22T23:08:02.521379103Z",,""v"hostname"::"0,"name"ip-10-150-1-74.us-west-2.compute.internal":,""crucible"pid":,4291"level"}:
1490 30{"msg":"[2] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.521411432Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.521426851Z",",pid"":hostname":4291"}
1491 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1492 {{"msg":""msg":"[2] Transition from New to WaitActive","v"Crucible ca3d570b-0142-4846-8b94-8a2df932685d has session id: ed63297a-d753-4422-aa8e-3627007c4617":0,,""v"name"::"0crucible",","name":"level":crucible"30,"level":30,"time":",2023-09-22T23:08:02.521482552Z"","time":hostname"":"2023-09-22T23:08:02.521484396Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname"pid"::"4291}
1493 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
1494 "msg":"{"msg":"[2] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible",ca3d570b-0142-4846-8b94-8a2df932685d is now active with session: 4ec111d6-91eb-4bad-893c-0cab18122c49"",level""v:"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.521538546Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.521543293Z"pid":,"4291hostname":"}
1495 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
1496 "msg":"{[2] Transition from WaitActive to WaitQuorum","v"":msg0",":name"":"crucible","level":30[0] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.521592855Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time}"
1497 :"2023-09-22T23:08:02.521601426Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1498 [2] 46e3c056-00ff-4d1a-9e2a-764650276e46 (89be093f-a3e6-4c47-9b32-a64289baafd3) Active Active WaitQuorum ds_transition to Active","v":{0,"name":"crucible"",msg"":level":"30[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.521642954Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1499 {"msg":","[2] Transition from WaitQuorum to Active"time":,""v":0,"name":"2023-09-22T23:08:02.521650625Zcrucible"","level,":"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1500 ,"time":"2023-09-22T23:08:02.521672475Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":msg"4291:"}
1501 {"msg":"Checking if live repair is needed",[0] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) WaitActive New New ds_transition to WaitQuorum""v",":v0",":name0":,""crucible",name"":level":"30crucible","level":30,"time":"2023-09-22T23:08:02.521705874Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1502 {"msg":"No Live Repair required at this time",","v":time"0:,""name":"crucible","level":2023-09-22T23:08:02.521708559Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time":"
1503 2023-09-22T23:08:02.521728857Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1504 "msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.521755732Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
15052023-09-22T23:08:02.521ZINFOcrucible: [0] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) WaitQuorum New New ds_transition to Active
15062023-09-22T23:08:02.521ZINFOcrucible: [0] Transition from WaitQuorum to Active
15072023-09-22T23:08:02.521ZINFOcrucible: [1] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active New New ds_transition to WaitActive
15082023-09-22T23:08:02.521ZINFOcrucible: [1] Transition from New to WaitActive
15092023-09-22T23:08:02.521ZINFOcrucible: [1] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active WaitActive New ds_transition to WaitQuorum
15102023-09-22T23:08:02.521ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
15112023-09-22T23:08:02.522ZINFOcrucible: [1] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active WaitQuorum New ds_transition to Active
15122023-09-22T23:08:02.522ZINFOcrucible: [1] Transition from WaitQuorum to Active
1513 test live_repair::repair_test::test_check_for_repair_normal ... {ok"
1514 msg":"[2] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.522122215Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
15152023-09-22T23:08:02.522ZINFOcrucible: [2] Transition from New to WaitActive
15162023-09-22T23:08:02.522ZINFOcrucible: [2] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active Active WaitActive ds_transition to WaitQuorum
15172023-09-22T23:08:02.522ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
15182023-09-22T23:08:02.522ZINFOcrucible: [2] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active Active WaitQuorum ds_transition to Active
15192023-09-22T23:08:02.522ZINFOcrucible: [2] Transition from WaitQuorum to Active
15202023-09-22T23:08:02.522ZINFOcrucible: [1] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active Active Active ds_transition to Faulted
15212023-09-22T23:08:02.522ZINFOcrucible: [1] Transition from Active to Faulted
15222023-09-22T23:08:02.522ZINFOcrucible: [1] ca3d570b-0142-4846-8b94-8a2df932685d (4ec111d6-91eb-4bad-893c-0cab18122c49) Active Faulted Active ds_transition to LiveRepairReady
15232023-09-22T23:08:02.522ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
15242023-09-22T23:08:02.522ZINFOcrucible: Checking if live repair is needed
15252023-09-22T23:08:02.522ZWARNcrucible: Upstairs repair task running, trying again later
15262023-09-22T23:08:02.522ZINFOcrucible: Crucible stats registered with UUID: 799e9fee-e865-4dfc-bb55-9cc2f161f350
15272023-09-22T23:08:02.522ZINFOcrucible: Crucible 799e9fee-e865-4dfc-bb55-9cc2f161f350 has session id: 71e074b3-11f6-4d45-abac-dffe02ff0dc6
1528 {"msg":"[0] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"test live_repair::repair_test::test_check_for_repair_task_running ... 2023-09-22T23:08:02.522840832Zok",
1529 "hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
15302023-09-22T23:08:02.522ZINFOcrucible: [0] Transition from New to WaitActive
15312023-09-22T23:08:02.522ZINFOcrucible: [0] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) WaitActive New New ds_transition to WaitQuorum
15322023-09-22T23:08:02.522ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
15332023-09-22T23:08:02.523ZINFOcrucible: [0] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) WaitQuorum New New ds_transition to Active
15342023-09-22T23:08:02.523ZINFOcrucible: [0] Transition from WaitQuorum to Active
15352023-09-22T23:08:02.523ZINFOcrucible: [1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active New New ds_transition to WaitActive
15362023-09-22T23:08:02.523ZINFOcrucible: [1] Transition from New to WaitActive
15372023-09-22T23:08:02.523ZINFOcrucible: [1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active WaitActive New ds_transition to WaitQuorum
15382023-09-22T23:08:02.523ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
15392023-09-22T23:08:02.523ZINFOcrucible: [1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active WaitQuorum New ds_transition to Active
1540 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30{,"time"":"msg":"2023-09-22T23:08:02.523323598Z","hostname":"Crucible stats registered with UUID: 07959179-ab76-4a4b-8cb7-dfeed5f1ae35"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291v":}0
1541 ,"name":"crucible"{,"level":30"msg":"[2] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.523357752Z",,""hostname"time":":"2023-09-22T23:08:02.523369499Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname"pid"::"4291ip-10-150-1-74.us-west-2.compute.internal}"
1542 ,"pid":4291}
1543 {{"msg":""msg":"[2] Transition from New to WaitActive"Crucible 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 has session id: 0a0958a3-354c-47a2-a637-e70cf90da86b","v,"":v"0:,"0name":,""crucible"name":","crucible"level":,"30level":30,"time":"2023-09-22T23:08:02.523430224Z",","time":hostname"":"2023-09-22T23:08:02.523431417Z"ip-10-150-1-74.us-west-2.compute.internal,"","hostnamepid""::"4291}
1544 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
1545 "msg":"{"[2] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active Active WaitActive ds_transition to WaitQuorum"msg":","v":0,"name":"crucible","level":[0] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) New New New ds_transition to WaitActive"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.52348674Z",,""hostname":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.523492412Z,""pid":,4291"hostname"}:
1546 "ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}
1547 "msg":"{[2] Transition from WaitActive to WaitQuorum","v":"0msg,"":"name":"crucible"[0] Transition from New to WaitActive",",level""v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.523544863Z",,""time":"hostname":"2023-09-22T23:08:02.523548986Z","ip-10-150-1-74.us-west-2.compute.internal"hostname":,""pid":4291ip-10-150-1-74.us-west-2.compute.internal",}"
1548 pid":4291}
1549 {"{msg":""msg":"[2] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active Active WaitQuorum ds_transition to Active","v":0,"[0] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) WaitActive New New ds_transition to WaitQuorum"name":","crucible"v",:"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.523605867Z",","hostname":time"":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.523609414Z"",",pid"":hostname"4291:"}
1550 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
1551 "test impacted_blocks::test::union_with_empty_is_identity ... msg"{:ok
1552 ""msg":"[2] Transition from WaitQuorum to Active","v"[0] Transition from WaitActive to WaitQuorum":,0",v"":name":0","crucible"name":,""cruciblelevel"":,30"level":30,",time":""time":"2023-09-22T23:08:02.523682694Z"2023-09-22T23:08:02.523684257Z,"",hostname":""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,""pid"::42914291}}
1553 
1554 {"msg":{""msg":"[0] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) WaitQuorum New New ds_transition to Active","v":0,"799e9fee-e865-4dfc-bb55-9cc2f161f350 is now active with session: 85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33name"":","crucible"v":,"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.523746955Z","hostname,"":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.523750497Z",",pid"":hostname"4291:"}
1555 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
1556 {"msg":"[1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active Active Active ds_transition to Faulted","v":0","name":"msgcrucible"",":level"":30[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.523867192Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1557 ,"time":"2023-09-22T23:08:02.523876375Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from Active to Faulted"4291,"v":}0
1558 ,"name":"crucible","level":30{"msg":"[1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:08:02.523919903Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1559 {,""time"msg"::""2023-09-22T23:08:02.523935465Z","hostname":"[1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active Faulted Active ds_transition to LiveRepairReady"ip-10-150-1-74.us-west-2.compute.internal",",v"":pid"0:,"4291name":"}crucible"
1560 ,"level":30{"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.523973926Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1561 ,"time":"2023-09-22T23:08:02.523986307Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] Transition from Faulted to LiveRepairReady"},
1562 "v":0,"name":"{crucible","level":30"msg":"[1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.5240223Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1563 ,"time":"2023-09-22T23:08:02.52403464Z{","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1564 [1] 799e9fee-e865-4dfc-bb55-9cc2f161f350 (85cf2e10-e0ba-47f8-95d8-3a05a2ce5d33) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"{name":"crucible",""level"msg"::"30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524074124Z","hostname":","time"ip-10-150-1-74.us-west-2.compute.internal":,""pid":42912023-09-22T23:08:02.524081427Z"},
1565 "hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg":}"
1566 [1] Transition from LiveRepairReady to LiveRepair","v":0,"{name":"crucible",""level":msg"30:"[1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524124008Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1567 ,"time":"2023-09-22T23:08:02.524135423Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
15682023-09-22T23:08:02.524ZINFOcrucible: [1] Transition from WaitQuorum to Active
1569 {"msg":"[2] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"{time":"2023-09-22T23:08:02.524230385Z","hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291}
1570 Crucible stats registered with UUID: 1da14c8a-8793-41f8-a35d-f1956ee49350","v":{0,"name":""cruciblemsg"":,""level":[2] Transition from New to WaitActive"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524274762Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1571 ,"{time":""msg":2023-09-22T23:08:02.524270377Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"[2] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active Active WaitActive ds_transition to WaitQuorum":,"4291v":0,"}name"
1572 :"crucible","level":30{"msg":"Crucible 1da14c8a-8793-41f8-a35d-f1956ee49350 has session id: 29e54210-8611-44f9-9caa-a5f3863d338c","v":0,"name":"crucible",","level":time"30:"2023-09-22T23:08:02.524325888Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1573 {,"time":""msg":2023-09-22T23:08:02.52434194Z"","hostname":"[2] Transition from WaitActive to WaitQuorum","v":ip-10-150-1-74.us-west-2.compute.internal"0,",pid"":name"4291:"crucible"},
1574 "level":30{"msg":"[0] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) New New New ds_transition to WaitActive","v":0,"nametest live_repair::repair_test::test_live_repair_deps_after_no_overlap ... ":,ok""
1575 crucibletime"":","level":2023-09-22T23:08:02.524377228Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1576 {,"time":""msg":"2023-09-22T23:08:02.524409929Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","[2] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active Active WaitQuorum ds_transition to Activepid"":4291,"v":}0
1577 ,"name":"crucible","{level":30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524443363Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1578 ,"time":"{2023-09-22T23:08:02.524453436Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal"[2] Transition from WaitQuorum to Active",",pid"":v"4291:0,}"
1579 name":"crucible","level":{30"msg":"[0] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524491447Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1580 ,"time":"{2023-09-22T23:08:02.524505994Z",""hostnamemsg":"":"ip-10-150-1-74.us-west-2.compute.internal","pid":429107959179-ab76-4a4b-8cb7-dfeed5f1ae35 is now active with session: e64c863e-d0f0-4d25-a523-4286ae993f56"}
1581 ,"v":0,"name":"{crucible","level":30"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524543465Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid",:"4291time":"}
1582 2023-09-22T23:08:02.524552289Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}
1583 [1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active Active Active ds_transition to Faulted","v":0{,"name":"crucible",""level"msg"::"30[0] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524594479Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1584 ,"time":"2023-09-22T23:08:02.524605152Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from Active to Faulted"4291,"v":}0
1585 ,"name":"crucible","{level":30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524642977Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1586 ,"time":"2023-09-22T23:08:02.524652267Z"{,"hostname":""msgip-10-150-1-74.us-west-2.compute.internal"",:""pid":4291}
1587 [1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active Faulted Active ds_transition to LiveRepairReady","v":0{,"name":"crucible"",msg":""level":30[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524701444Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}"
1588 time":"2023-09-22T23:08:02.524711114Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] Transition from Faulted to LiveRepairReady",}"
1589 v":0,"name":"{crucible","level":"30msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524750377Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"",:""pid":2023-09-22T23:08:02.524758101Z"4291,"hostname":"}
1590 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1591 {{""msg"msg":":"[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":[1] 07959179-ab76-4a4b-8cb7-dfeed5f1ae35 (e64c863e-d0f0-4d25-a523-4286ae993f56) Active LiveRepairReady Active ds_transition to LiveRepair"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524797039Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1592 {"msg":"[1] Transition from WaitActive to WaitQuorum,"","timev""::0","name":"2023-09-22T23:08:02.524803168Z"crucible",,""level"hostname"::30"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1593 ,"{time":"2023-09-22T23:08:02.524830127Z"",msg"":hostname"":{"[1] Transition from LiveRepairReady to LiveRepairip-10-150-1-74.us-west-2.compute.internal""",",msgpid""::""4291v":}0
1594 Crucible stats registered with UUID: 8bd67f2e-1fde-4913-85f8-577d1b57f202","name":{,"""vmsg""::crucible"0",","level"name"::"30crucible","[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active WaitQuorum New ds_transition to Activelevel"":,"v30":0,"name":"crucible","level":30,"time":",2023-09-22T23:08:02.524870791Z""time":,"","time"2023-09-22T23:08:02.524877159Z:""hostname,""2023-09-22T23:08:02.524873977Z:hostname"":,"""hostname":ip-10-150-1-74.us-west-2.compute.internal"","ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internalpid,"",:"4291"pid}pid
1595 ""::{42914291}"}msg
1596 ":
1597 {""[1] Transition from WaitQuorum to Activemsg"":","v":0,"name":"crucible","Crucible 8bd67f2e-1fde-4913-85f8-577d1b57f202 has session id: 9c94dd0d-4edf-440f-b6fa-7bb2f8c5b2e6level"":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.524929557Z",","hostname"time:"":"2023-09-22T23:08:02.52493391Z"ip-10-150-1-74.us-west-2.compute.internal",,""pidhostname""::"4291}
1598 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
1599 "msg":"{"msg":"[2] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active Active New ds_transition to WaitActive","v":0,"[0] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) New New New ds_transition to WaitActive"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time":,""time":"2023-09-22T23:08:02.524969316Z","2023-09-22T23:08:02.524971256Z"hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,"4291pid":}4291
1600 }
1601 {"{msg":""msg":"[2] Transition from New to WaitActive","v":[0] Transition from New to WaitActive"0,,""vname""::0","crucible"name",:""level"crucible:"30,"level":30,",time"":"time":"2023-09-22T23:08:02.525001901Z"2023-09-22T23:08:02.525003136Z,"","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""pid,"":pid4291":4291}
1602 }
1603 {"{msg":""msg":"[2] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active Active WaitActive ds_transition to WaitQuorum",[0] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) WaitActive New New ds_transition to WaitQuorum""v",:"0v",:"0,name"":"name":crucible"",crucible"",level"":level30":30,,""timetime""::""2023-09-22T23:08:02.525039524Z2023-09-22T23:08:02.52503909Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
1604 
1605 {{"msg"":"msg":"[2] Transition from WaitActive to WaitQuorum"[0] Transition from WaitActive to WaitQuorum,""v,"":v"0:,0","name":name"":"crucible"crucible,"","level":level"30:30,,""timetime""::""2023-09-22T23:08:02.525080441Z2023-09-22T23:08:02.525080778Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
1606 
1607 {"{msg":""msg":"[0] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) WaitQuorum New New ds_transition to Active","v"[2] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active Active WaitQuorum ds_transition to Active:"0,,""v"name:"0:","crucible"name",:""level"crucible:"30,"level":30,",time"test live_repair::repair_test::test_live_repair_deps_flush_repair_flush ... time""::""2023-09-22T23:08:02.525120608Z2023-09-22T23:08:02.52511934Z""ok,,""
1608 hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
1609 
1610 {"{msg":""msg":"[0] Transition from WaitQuorum to Active","[2] Transition from WaitQuorum to Activev"":,0",v"":name0":,""cruciblename"":,""levelcrucible"":,30"level":30,","time"time:"":"2023-09-22T23:08:02.52516768Z"2023-09-22T23:08:02.525168756Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42914291}}
1611 
1612 {"{msg":""msg":"[1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active New New ds_transition to WaitActive"1da14c8a-8793-41f8-a35d-f1956ee49350 is now active with session: 1a0bf2e2-a8c9-482a-a779-90cd6a9523de",",v"":v0":,"0,name"":"name":"crucible"crucible,"","level":level30":30,,""timetime""::""2023-09-22T23:08:02.525204481Z2023-09-22T23:08:02.525204004Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42914291}}
1613 
1614 {"{msg":""msg":"[1] Transition from New to WaitActive","v":0,"[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active Active Active ds_transition to Faulted"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time":","time":"2023-09-22T23:08:02.525238878Z","2023-09-22T23:08:02.525240943Z"hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,"4291pid":}4291
1615 }
1616 {"{msg":""msg":"[1] Transition from Active to Faulted","v":0,"name[1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active WaitActive New ds_transition to WaitQuorum"":","cruciblev"":,0","level":name"30:"crucible","level":30,"time":","time"2023-09-22T23:08:02.525272924Z:"","hostname":2023-09-22T23:08:02.525275347Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal","}
1617 pid":4291}{
1618 "msg":"{"msg":"[1] Transition from WaitActive to WaitQuorum","v":[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active Faulted Active ds_transition to LiveRepairReady0",","name"v:"":0crucible,"","name":level"":crucible30","level":30,"time":","time":"2023-09-22T23:08:02.525307795Z","2023-09-22T23:08:02.525309366Zhostname"":,""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal:"4291,"pid}"
1619 :4291}
1620 {"{msg":""msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"[1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active WaitQuorum New ds_transition to Active"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time":","time":"2023-09-22T23:08:02.525342218Z","2023-09-22T23:08:02.525344264Zhostname"":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid}"
1621 :4291}
1622 {{"msg":""msg":"[1] Transition from WaitQuorum to Active","v":0,"name":[1] 1da14c8a-8793-41f8-a35d-f1956ee49350 (1a0bf2e2-a8c9-482a-a779-90cd6a9523de) Active LiveRepairReady Active ds_transition to LiveRepair"",crucible""v",:"0level,"":30name":"crucible","level":30,"time":","2023-09-22T23:08:02.525381386Z"time":,""hostname":"2023-09-22T23:08:02.525384125Z","hostname"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
1623 ,"pid":4291{}
1624 "msg":"{"msg":"[1] Transition from LiveRepairReady to LiveRepair","[2] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active Active New ds_transition to WaitActive"v":,"0v,"":0name",:""name"crucible:"","crucible"level",:"30level":30,,""timetime""::""2023-09-22T23:08:02.525417405Z2023-09-22T23:08:02.525418188Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
1625 
16262023-09-22T23:08:02.525ZINFOcrucible: [2] Transition from New to WaitActive
16272023-09-22T23:08:02.525ZINFOcrucible: [2] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active Active WaitActive ds_transition to WaitQuorum
16282023-09-22T23:08:02.525ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
16292023-09-22T23:08:02.525ZINFOcrucible: [2] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active Active WaitQuorum ds_transition to Active
16302023-09-22T23:08:02.525ZINFOcrucible: [2] Transition from WaitQuorum to Active
16312023-09-22T23:08:02.525ZINFOcrucible: 8bd67f2e-1fde-4913-85f8-577d1b57f202 is now active with session: 7024e503-4e36-4db3-924a-6f1bc0972c72
1632 {{"msg"":"msg":"Crucible stats registered with UUID: 53aa6152-9858-456d-bc14-bd6a95d97fdd","[1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active Active Active ds_transition to Faulted"v",:"v0":,0","name":"name":"crucible"crucible",","level"level:":3030,"time":","time":"2023-09-22T23:08:02.525615192Z","2023-09-22T23:08:02.525615311Zhostname"":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291"test live_repair::repair_test::test_live_repair_deps_mix ... }
1633 ,ok{"
1634 "pid"msg:":4291"}
1635 [1] Transition from Active to Faulted","v":0{,"name":""cruciblemsg"":,""level":30Crucible 53aa6152-9858-456d-bc14-bd6a95d97fdd has session id: 22d33f84-a075-4645-936b-e06708f0564d","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.525665045Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid",":time4291":"}
1636 2023-09-22T23:08:02.525673207Z","hostname{":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1637 [1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active Faulted Active ds_transition to LiveRepairReady"{,"v":0","msg":name"":"crucible","level":30[0] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.525700946Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pidtime""::"4291}
1638 2023-09-22T23:08:02.525708332Z","hostname{":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] Transition from Faulted to LiveRepairReady"},
1639 "v":0,"{name":"crucible"","msg":level"":30[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.525740048Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"":","pid":42912023-09-22T23:08:02.525746265Z"},"
1640 hostname":"{ip-10-150-1-74.us-west-2.compute.internal"","msg"pid:"":4291}
1641 {"[1] 8bd67f2e-1fde-4913-85f8-577d1b57f202 (7024e503-4e36-4db3-924a-6f1bc0972c72) Active LiveRepairReady Active ds_transition to LiveRepairmsg"":","v":0,"name":"crucible","level":30[0] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.525776546Z","hostname":","time":ip-10-150-1-74.us-west-2.compute.internal"","pid":2023-09-22T23:08:02.525782203Z4291","}
1642 hostname":"{ip-10-150-1-74.us-west-2.compute.internal""msg,"":"pid":4291}
1643 [1] Transition from LiveRepairReady to LiveRepair","v{":0,""namemsg""::""crucible","level":[0] Transition from WaitActive to WaitQuorum30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.525809583Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.525814528Zpid"":,"4291hostname"}:
1644 "ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
16452023-09-22T23:08:02.525ZINFOcrucible: [0] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) WaitQuorum New New ds_transition to Active
16462023-09-22T23:08:02.525ZINFOcrucible: [0] Transition from WaitQuorum to Active
16472023-09-22T23:08:02.525ZINFOcrucible: [1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active New New ds_transition to WaitActive
16482023-09-22T23:08:02.525ZINFOcrucible: [1] Transition from New to WaitActive
16492023-09-22T23:08:02.526ZINFOcrucible: [1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active WaitActive New ds_transition to WaitQuorum
16502023-09-22T23:08:02.526ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
1651 {test live_repair::repair_test::test_live_repair_deps_no_overlap ... "okmsg":"
1652 [1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.526078009Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
16532023-09-22T23:08:02.526ZINFOcrucible: [1] Transition from WaitQuorum to Active
16542023-09-22T23:08:02.526ZINFOcrucible: [2] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active Active New ds_transition to WaitActive
16552023-09-22T23:08:02.526ZINFOcrucible: [2] Transition from New to WaitActive
1656 {"msg":"[2] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"{time":"2023-09-22T23:08:02.526191636Z","hostname":""msgip-10-150-1-74.us-west-2.compute.internal"",":pid"":4291}
1657 Crucible stats registered with UUID: 25f8ca52-9766-4754-b9c9-b871f5033947","v":{0,"name":""crucible"msg":,""level":30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.526242788Z"time":,""hostname":"2023-09-22T23:08:02.526237007Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname"pid":":4291}ip-10-150-1-74.us-west-2.compute.internal"
1658 ,"pid":4291{}
1659 "msg":"{[2] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active Active WaitQuorum ds_transition to Active"","msg"v"::"0,"name":"crucible","level":30Crucible 25f8ca52-9766-4754-b9c9-b871f5033947 has session id: 8a5194ca-fccb-4e81-8cc4-211ed80171e1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.526300106Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":"}
1660 2023-09-22T23:08:02.526306562Z","hostname":"{"ip-10-150-1-74.us-west-2.compute.internal"msg",":"pid":4291[2] Transition from WaitQuorum to Active"}
1661 ,"v":0,"name":"{crucible","level":30"msg":"[0] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) New New New ds_transition to WaitActive","v":0,"name":"crucible",",level"":time"30:"2023-09-22T23:08:02.526347949Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1662 {"msg",:""time":"2023-09-22T23:08:02.526363452Z","53aa6152-9858-456d-bc14-bd6a95d97fdd is now active with session: 2276e112-0970-4da5-9cd5-228a41ff83b8hostname"":,""v":0,"ip-10-150-1-74.us-west-2.compute.internalname"":,""crucible"pid",":level":429130}
1663 {"msg":","[0] Transition from New to WaitActive"time":","v":02023-09-22T23:08:02.526393598Z",",name"":"hostname":crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1664 {"msg":","time":"[1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active Active Active ds_transition to Faulted"2023-09-22T23:08:02.52641593Z",",v"":hostname":"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",",pid"":level"4291:30}
1665 {"msg":","[0] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) WaitActive New New ds_transition to WaitQuorumtime"":","v":2023-09-22T23:08:02.526445725Z"0,,""hostnamename""::""crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
1666 {"msg":"[1] Transition from Active to Faulted","v":,0","timename":"":"crucible","2023-09-22T23:08:02.526473807Z"level":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1667 {,""time"msg"::""2023-09-22T23:08:02.526497011Z"[0] Transition from WaitActive to WaitQuorum",","hostnamev""::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",","pid"level:"4291:30}
1668 {"msg":","time":"[1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active Faulted Active ds_transition to LiveRepairReady","2023-09-22T23:08:02.52652824Zv"{":,0","hostname"name":""msg":"Crucible stats registered with UUID: 4959cad2-ae2a-45ba-8a58-01c25a8aabf8","v":0ip-10-150-1-74.us-west-2.compute.internal",","name":"pidcrucible"":,"4291level":}30
1669 {"msg":":"crucible","level":[0] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) WaitQuorum New New ds_transition to Active"30,"v":0,"name":",crucible""time,""level"::"302023-09-22T23:08:02.526568664Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":"}
1670 ,"time":"2023-09-22T23:08:02.526596462Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4291}
1671 "{msg":""msg":"[0] Transition from WaitQuorum to Active2023-09-22T23:08:02.526587201Z""Crucible 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 has session id: fc05e5b8-f2e7-4066-b75b-c280c4b7728c","v",,""vhostname":"ip-10-150-1-74.us-west-2.compute.internal"",:"0pid":,4291"name":"}crucible
1672 ",":level":030{,"name":"crucible"",msg"":"level":30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.526657703Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":"}
1673 {,"time":"2023-09-22T23:08:02.526671872Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1674 "{msg":""msg":"[0] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) New New New ds_transition to WaitActive","v":0,"name":"[1] 53aa6152-9858-456d-bc14-bd6a95d97fdd (2276e112-0970-4da5-9cd5-228a41ff83b8) Active LiveRepairReady Active ds_transition to LiveRepaircrucible"","level2023-09-22T23:08:02.526665027Z",":,"hostname":"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1675 "v":{0,"name":""crucible"msg":,"",level":"30time":"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active New New ds_transition to WaitActive2023-09-22T23:08:02.526753428Z"",","v"hostname:":0","name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""pid"level":30,"time":"2023-09-22T23:08:02.526774667Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1676 ,"time":":2023-09-22T23:08:02.526788239Z4291"{,"}hostname"
1677 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30","time"msg"::""2023-09-22T23:08:02.526844151Z","[0] Transition from New to WaitActive"hostname":","v"ip-10-150-1-74.us-west-2.compute.internal":,"pid0:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1678 {"msg":"[1] Transition from New to WaitActive","v":0,"name":",crucible""name,"":"level":crucible"30,"level":30":4291}
1679 ,"time":"2023-09-22T23:08:02.526903228Z,"",time"":"hostname":"2023-09-22T23:08:02.526905424Z"ip-10-150-1-74.us-west-2.compute.internal",,""pid"hostname"::"4291}
1680 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
1681 {"msg":""[0] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) WaitActive New New ds_transition to WaitQuorum","v":msg"0:,"name":"crucible","level":30"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.527032198Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1682 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.527081284Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}
1683 {""msg":"time":"2023-09-22T23:08:02.527046487Z","hostname":"[0] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) WaitQuorum New New ds_transition to Active"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291v":}0
1684 ,"name":"crucible","level{":30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.527135156Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,"pid":4291}
1685 {test live_repair::repair_test::test_live_repair_deps_reads ... "ok
1686 msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30"time":"2023-09-22T23:08:02.527142148Z","hostname,"":"time":"2023-09-22T23:08:02.527220072Z"ip-10-150-1-74.us-west-2.compute.internal","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
16872023-09-22T23:08:02.527ZINFOcrucible: [1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active New New ds_transition to WaitActive
1688 {,""msg":pid"":4291[1] Transition from New to WaitActive","v"}:
1689 0,"name":"crucible","level":30{"msg":"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible",","level":time":30"2023-09-22T23:08:02.527390111Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1690 {"msg":","time":"2023-09-22T23:08:02.527409003Z","[1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active WaitActive New ds_transition to WaitQuorum"hostname",":v":0,"name":"crucible","level":30"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1691 ,"time":"2023-09-22T23:08:02.527445248Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid:"":4291}[1] Transition from WaitQuorum to Active"
16922023-09-22T23:08:02.527ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
1693 {"msg":,""time":"{2023-09-22T23:08:02.527541321Z"[1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active WaitQuorum New ds_transition to Active",",hostname""":v"msg:":"0,"Crucible stats registered with UUID: 42308f37-4093-47ec-8852-915c974792c6name"":"crucible",","v":level":030,"name":"crucible","level":30"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time":}"
1694 2023-09-22T23:08:02.527599882Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid",:"4291"time"msg}":"
1695 {:""msg":2023-09-22T23:08:02.52760841Z"",[1] Transition from WaitQuorum to Active","v":0,"name":""crucible"hostname":,[2] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active Active New ds_transition to WaitActive""level":30,"v":0,"name":"crucible",""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1696 ,"time":"2023-09-22T23:08:02.527671168Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"",msg""pid":4291:"}
1697 Crucible 42308f37-4093-47ec-8852-915c974792c6 has session id: f9f486be-e34d-455f-a56e-92cc97480032","v":0{,"name":"msg":""crucible","level":30[2] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active Active New ds_transition to WaitActive","v":0,"name,":""crucible"time",:""level":302023-09-22T23:08:02.52767909Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}
1698 2023-09-22T23:08:02.527720666Z",","time":hostname"":{"2023-09-22T23:08:02.527730075Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"pid":4291}
1699 ":"ip-10-150-1-74.us-west-2.compute.internal",{"pid":4291}
17002023-09-22T23:08:02.527ZINFOcrucible: [0] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) New New New ds_transition to WaitActive
1701 "{msg":"[2] Transition from New to WaitActive""msg,"":v""msg":":[2] Transition from New to WaitActive"0,"name":,""v":crucible"0,,""level"name"::"30crucible",""level":30[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.527858713Z",,""time"hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.527870493Zpid"":4291,"hostname":"}
1702 :"2023-09-22T23:08:02.527862368Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}
1703 [2] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{ip-10-150-1-74.us-west-2.compute.internal",""msg":"pid":4291}
1704 [2] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active Active WaitActive ds_transition to WaitQuorum","v":0,,""time{""msg":":"2023-09-22T23:08:02.527922675Z","hostname":"[0] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) WaitActive New New ds_transition to WaitQuorum","v"ip-10-150-1-74.us-west-2.compute.internal:"0,","pid"name":name4291":"crucible","level":30}
1705 {:""msg":"crucible","[2] Transition from WaitActive to WaitQuorum"level":,"30v":0,",name"":"time":"crucible","2023-09-22T23:08:02.527962868Zlevel"":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1706 ,"time":"2023-09-22T23:08:02.527979279Z"{,",hostname":""time":"2023-09-22T23:08:02.527988094Z","hostname":msg"":"ip-10-150-1-74.us-west-2.compute.internal",[2] Transition from WaitActive to WaitQuorum""pid",:"4291v":0},""name":
1707 {ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1708 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"msg":"[2] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible",,""time""level"crucible","level":30:30:"2023-09-22T23:08:02.52806053Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal",time"","time:pid"""::"42912023-09-22T23:08:02.528088077Z"2023-09-22T23:08:02.528088997Z"},
1709 ","hostname"hostname":":"{ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid""":msg"pid:"":4291[0] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) WaitQuorum New New ds_transition to Active"},
1710 4291}
1711 {"v":"0msg"{,":name":""crucible""[2] Transition from WaitQuorum to Active","msg"level"::,"30v":0,"name":"crucible","level":30","[2] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active Active WaitQuorum ds_transition to Active"time":",2023-09-22T23:08:02.528177334Z""v":,0",hostname"":"ip-10-150-1-74.us-west-2.compute.internal","timepid""::"42912023-09-22T23:08:02.528185724Z}"
1712 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal""msg":","pid,"":4291}
1713 {"msg":"4959cad2-ae2a-45ba-8a58-01c25a8aabf8 is now active with session: 5205f651-d65a-4d53-a1c9-0ce1d9b0247f","v":0,"name":"crucible","level":30name":"crucible","level":30,"time":"2023-09-22T23:08:02.528251224Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1714 ,"time":"2023-09-22T23:08:02.528266483Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1715 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"{crucible","level":30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible"[1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active Active Active ds_transition to Faulted","level":,30"v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.528317695Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","pid":time4291":"}2023-09-22T23:08:02.52832779Z"
1716 ,"hostname":",ip-10-150-1-74.us-west-2.compute.internal"","{pid":time"msg":"25f8ca52-9766-4754-b9c9-b871f5033947 is now active with session: 8d12af84-3563-40e3-992b-37f1da12c218","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528366909Z",""hostname":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.528333417Z,""pid":4291,"hostname"}:
1717 "ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}
1718 4291}
1719 {"msg":"[1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time"{:"2023-09-22T23:08:02.528425833Z","hostname"":"msg":"ip-10-150-1-74.us-west-2.compute.internal","[1] Transition from Active to Faultedpid"":,4291"v"}:
1720 0,"name"{:"crucible""msg":","level"[1] Transition from New to WaitActive:","30"msg":"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528480675Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1721 ,"time":"2023-09-22T23:08:02.528491919Z",{"hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528531525Z",","hostname":pid":"4291}ip-10-150-1-74.us-west-2.compute.internal"
1722 v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528568334Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1723 {{"msg":""msg":","pid":[1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active WaitActive New ds_transition to WaitQuorum"4291,"v":0},
1724 "[1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active Faulted Active ds_transition to LiveRepairReadyname":""crucible","level",{"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528619027Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}"
1725 :30,"time":"2023-09-22T23:08:02.528664006Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1726 {{""msgmsg""::"msg":"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528712856Z"","hostname":"[1] Transition from WaitActive to WaitQuorum","ip-10-150-1-74.us-west-2.compute.internal"v",:"0pid",:"4291name":"crucible","}level":
1727 ,"{time":"2023-09-22T23:08:02.528730605Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","msgpid""::"4291[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.528782371Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1728 {"msg":"[1] 25f8ca52-9766-4754-b9c9-b871f5033947 (8d12af84-3563-40e3-992b-37f1da12c218) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"}name
1729 {30,"time":"2023-09-22T23:08:02.52884036Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1730 {"msg":"[1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30"msg":","time":"[1] 4959cad2-ae2a-45ba-8a58-01c25a8aabf8 (5205f651-d65a-4d53-a1c9-0ce1d9b0247f) Active LiveRepairReady Active ds_transition to LiveRepair"2023-09-22T23:08:02.528869713Z",","hostname"v"::":"crucible","level":30,"0time":","name"2023-09-22T23:08:02.528891074Z":"crucible",","hostname"level"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1731 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,":time"30:"2023-09-22T23:08:02.528934391Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1732 ,"time":"2023-09-22T23:08:02.528947522Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1733 {{""msg":msg"":[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30","time":"2023-09-22T23:08:02.528998909Z"[2] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active Active New ds_transition to WaitActive",",hostname"":v":"0,"name":"crucible","ip-10-150-1-74.us-west-2.compute.internal"level",":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1734 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible",""level":pid"30:4291}
1735 :30,"time":"2023-09-22T23:08:02.529059687Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1736 ,"time":"2023-09-22T23:08:02.52907541Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
17372023-09-22T23:08:02.529ZINFOcrucible: [2] Transition from New to WaitActive
17382023-09-22T23:08:02.529ZINFOcrucible: [2] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active Active WaitActive ds_transition to WaitQuorum
17392023-09-22T23:08:02.529ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
17402023-09-22T23:08:02.529ZINFOcrucible: [2] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active Active WaitQuorum ds_transition to Active
17412023-09-22T23:08:02.529ZINFOcrucible: [2] Transition from WaitQuorum to Active
17422023-09-22T23:08:02.529ZINFOcrucible: 42308f37-4093-47ec-8852-915c974792c6 is now active with session: f7a38787-241b-4e71-9b0d-a228c252b4a7
17432023-09-22T23:08:02.529ZINFOcrucible: [1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active Active Active ds_transition to Faulted
1744 test live_repair::repair_test::test_live_repair_deps_repair_flush_repair ... {ok"
1745 msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.529366203Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
17462023-09-22T23:08:02.529ZINFOcrucible: [1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active Faulted Active ds_transition to LiveRepairReady
17472023-09-22T23:08:02.529ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
1748 test live_repair::repair_test::test_live_repair_deps_repair_flush ... {ok"
1749 msg":"[1] 42308f37-4093-47ec-8852-915c974792c6 (f7a38787-241b-4e71-9b0d-a228c252b4a7) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.52943764Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
17502023-09-22T23:08:02.529ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
17512023-09-22T23:08:02.529ZWARNcrucible: Write to Extent 1:2:9 under repair
1752 test live_repair::repair_test::test_live_repair_deps_repair_kitchen_sink ... ok
17532023-09-22T23:08:02.529ZINFOcrucible: Crucible stats registered with UUID: f60f7c4d-4385-45f0-b753-8c0c6a7e5045
17542023-09-22T23:08:02.529ZINFOcrucible: Crucible f60f7c4d-4385-45f0-b753-8c0c6a7e5045 has session id: 053465c4-04e6-4b4c-89f2-ff90abc10dd1
17552023-09-22T23:08:02.529ZINFOcrucible: [0] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) New New New ds_transition to WaitActive
17562023-09-22T23:08:02.529ZINFOcrucible: [0] Transition from New to WaitActive
17572023-09-22T23:08:02.530ZINFOcrucible: [0] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) WaitActive New New ds_transition to WaitQuorum
17582023-09-22T23:08:02.530ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
17592023-09-22T23:08:02.530ZINFOcrucible: [0] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) WaitQuorum New New ds_transition to Active
17602023-09-22T23:08:02.530ZINFOcrucible: [0] Transition from WaitQuorum to Active
17612023-09-22T23:08:02.530ZINFOcrucible: [1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active New New ds_transition to WaitActive
1762 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30{"msg":"Crucible stats registered with UUID: 996915b6-f4ae-4178-8b00-04c24dd99dc3",","time"v"::"0,"2023-09-22T23:08:02.53024077Z"name":","crucible"hostname":","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
1763 {"msg":"[1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible",",level"":time"30:"2023-09-22T23:08:02.530266697Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1764 ,"time":"2023-09-22T23:08:02.530291418Z"{,"hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg,":""pid":4291}
1765 Crucible 996915b6-f4ae-4178-8b00-04c24dd99dc3 has session id: fa2b84fd-a1e1-4774-b83e-eb1039583139","v":0,"{name":"crucible","level""msg"::"30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.530338047Z","hostname":","ip-10-150-1-74.us-west-2.compute.internaltime":"","pid":42912023-09-22T23:08:02.530345876Z",}"
1766 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid{":4291}
1767 "msg":"{"msg":"[0] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) New New New ds_transition to WaitActive","v":0,"name":"crucible","[1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active WaitQuorum New ds_transition to Active"level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.530396693Z",","time":hostname"":"2023-09-22T23:08:02.530401496Z",ip-10-150-1-74.us-west-2.compute.internal"","hostnamepid""::"4291}
1768 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
1769 "msg":"[0] Transition from New to WaitActive{","v":0","msg"name"::""crucible","level"[1] Transition from WaitQuorum to Active":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.530451779Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,"":pid"":42912023-09-22T23:08:02.530457315Z"}
1770 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal{","pid":4291"msg"}:
1771 "{[0] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) WaitActive New New ds_transition to WaitQuorum"",msg"":{"v":0,"name":"crucible"","level"msg:[2] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active Active New ds_transition to WaitActive30"":","v":0,"name":"Crucible stats registered with UUID: 99477887-f8f6-4caf-a2e3-12b105487a29crucible"",",level"":v"30:0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.530510623Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1772 ,"time":"2023-09-22T23:08:02.530520639Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[0] Transition from WaitActive to WaitQuorum","},
1773 "time"v":":{02023-09-22T23:08:02.530527117Z","name,"":hostname"":""crucible"msg",:ip-10-150-1-74.us-west-2.compute.internal"","level"":pid"[2] Transition from New to WaitActive:"304291,"v":}0
1774 ,"name":"crucible","level":30{","msg":time"":"2023-09-22T23:08:02.530573717Z","hostname":"Crucible 99477887-f8f6-4caf-a2e3-12b105487a29 has session id: 60461d52-283b-4d0d-a975-c0f76952364d","v":ip-10-150-1-74.us-west-2.compute.internal"0,","pidname""::,"4291"crucible"time}",:
1775 ""level":302023-09-22T23:08:02.530585733Z"{,"hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":","pid":4291}
1776 [0] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) WaitQuorum New New ds_transition to Active","v":0,"{name":","crucible"time","":"msg"level"2023-09-22T23:08:02.53061336Z"::30,""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[2] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active Active WaitActive ds_transition to WaitQuorum","}v"
1777 :0,,""name":{time"":"crucible",""2023-09-22T23:08:02.530638853Zmsg":level"":"30,"hostname":"[0] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) New New New ds_transition to WaitActive"ip-10-150-1-74.us-west-2.compute.internal",","v"pid:"0:,"4291name":"crucible"},
1778 ",level":"30time":"{2023-09-22T23:08:02.530664794Z",""hostname"msg":":"ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from WaitQuorum to Active,""pid,"","v":time":4291":}2023-09-22T23:08:02.530682882Z"
1779 ,"0hostname":","name":{ip-10-150-1-74.us-west-2.compute.internal"","pid":"4291msg"crucible}:
1780 "","{level":[2] Transition from WaitActive to WaitQuorum"30"msg":","v"[0] Transition from New to WaitActive":,"v":00,",name":""crucible"name,"":"level":30crucible","level":30,"time":","2023-09-22T23:08:02.530722168Z"time":","2023-09-22T23:08:02.530732449Zhostname":"","hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,""pid":pid"4291:}4291
1781 ,}"{
1782 time""msg":":"{2023-09-22T23:08:02.530734966Z",[0] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) WaitActive New New ds_transition to WaitQuorum""","hostname"msg":v"::0",""name":"crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":[1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active New New ds_transition to WaitActive"4291,"v":}
1783 {,"time":"2023-09-22T23:08:02.530780294Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1784 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"msg":","time":"2023-09-22T23:08:02.530874301Z","[2] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active Active WaitQuorum ds_transition to Activehostname":"","ip-10-150-1-74.us-west-2.compute.internal"v,""pid"::42910}
1785 ,{"msg":"[0] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.530904524Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1786 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible"","name"level"::"30crucible","level":30,"time":"2023-09-22T23:08:02.530928785Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
17872023-09-22T23:08:02.530ZINFOcrucible: ,[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active New New ds_transition to WaitActive
1788 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30"time":"2023-09-22T23:08:02.530932093Z",","hostname":"time":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.530974766Z"",","hostnamepid""::"4291ip-10-150-1-74.us-west-2.compute.internal","}pid":
1789 4291{}
1790 "msg":"{0[2] Transition from WaitQuorum to Active""msg,"":"v",:"0,"name"name"::"[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active WaitActive New ds_transition to WaitQuorum"","cruciblev":crucible"0",,","name"level":"":level"crucible":3030,"level":30,"time":"2023-09-22T23:08:02.5310361Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1791 {"msg":","[1] Transition from WaitActive to WaitQuorum",time"":"v":0,",name2023-09-22T23:08:02.531035568Z""":",crucible"time""hostname"::",""level":2023-09-22T23:08:02.531035566Z"30ip-10-150-1-74.us-west-2.compute.internal",,""pid":hostname"4291:"}
1792 ip-10-150-1-74.us-west-2.compute.internal",",time":""{pid"2023-09-22T23:08:02.531068051Z:"4291","msg"hostname":":"}
1793 ip-10-150-1-74.us-west-2.compute.internal"f60f7c4d-4385-45f0-b753-8c0c6a7e5045 is now active with session: 428178a5-4e75-4da2-abf3-724c4e0398b8","pid",:"4291v":}0
1794 {,"{name":""msg"crucible:""","msg"level"::"[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active WaitQuorum New ds_transition to Active"30,"[1] Transition from New to WaitActive"v":0,,""name":"v"crucible":,"0level":30,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.531118192Z",",hostname":""time"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291}
1795 2023-09-22T23:08:02.531110966Z"{,""msg":",hostname"":[1] Transition from WaitQuorum to Active"time,""v""::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.531122598Z","crucible",pid","":level"4291hostname"::}"30
1796 ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time":"{}
1797 2023-09-22T23:08:02.531157819Z"","msghostname":""{:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}"
1798 msg":"{[1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active Active Active ds_transition to Faulted"",msg":""v":0,"[1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active WaitActive New ds_transition to WaitQuorum"name[2] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active Active New ds_transition to WaitActive,""",v":""v"crucible"::00,",,name""":namelevel""":crucible:30"","crucible"level":30,"level":30,"time":"2023-09-22T23:08:02.531206783Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1799 ,,""{time"time""::""msg":"2023-09-22T23:08:02.531204735Z"2023-09-22T23:08:02.531208731Z"[2] Transition from New to WaitActive",,,"""v"hostname"hostname"::0:",""name":"crucible"ip-10-150-1-74.us-west-2.compute.internal,"ip-10-150-1-74.us-west-2.compute.internal"",,level":""pid"pid"30::42914291}}
1800 ,"
1801 {time":"2023-09-22T23:08:02.53125268Z","hostname":""{msgip-10-150-1-74.us-west-2.compute.internal"","":pid":"4291msg"}:
1802 "[1] Transition from WaitActive to WaitQuorum"{,"[1] Transition from Active to Faultedv""":msg0":,",""name"v"::[2] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active Active WaitActive ds_transition to WaitQuorum""0,crucible"",,"v"name":"0level:,""name""crucible":",:"crucible"level30":,"30level":30,"time":"2023-09-22T23:08:02.531309043Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1803 {"msg":"[2] Transition from WaitActive to WaitQuorum",",,v"":"time"0time",:""name":":"crucible"2023-09-22T23:08:02.531307493Z","2023-09-22T23:08:02.531308913Zlevel"":,,"30"hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"",pid",":time"":"pid"42912023-09-22T23:08:02.531341832Z":,}"
1804 hostname":"4291ip-10-150-1-74.us-west-2.compute.internal","}pid{
1805 "":4291msg":}"
1806 {{"msg":""[1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active Faulted Active ds_transition to LiveRepairReady"msg":","[2] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active Active WaitQuorum ds_transition to Active"v",":v":00,,""[1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active WaitQuorum New ds_transition to Activename"":name,"":crucible""",crucible""v",:"level0level"",:"30name"::"30crucible","level":30,"time":"2023-09-22T23:08:02.53139852Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1807 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":","time"2023-09-22T23:08:02.531400922Z":","hostname",:2023-09-22T23:08:02.531403555Z""",time":"ip-10-150-1-74.us-west-2.compute.internal""2023-09-22T23:08:02.531417101Z",",hostname"pid"::"4291"hostname":"}ip-10-150-1-74.us-west-2.compute.internal"
1808 ,ip-10-150-1-74.us-west-2.compute.internal"","pid"pid"{::42914291}
1809 }"{
1810 "msg"msg"::""{[1] Transition from Faulted to LiveRepairReady99477887-f8f6-4caf-a2e3-12b105487a29 is now active with session: fb767d23-0921-4b65-9076-40bc0c47b70a""",",msg"":v"v""::00,"[1] Transition from WaitQuorum to Active"name",,:""crucible"","vlevel":name30""::"0crucible",","name"level":,""time":":crucible"302023-09-22T23:08:02.531477252Z,"","level"hostname:":"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1811 {"msg":"[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active Active Active ds_transition to Faulted","v":0,"name":","crucible",time"":level":"30,"time":2023-09-22T23:08:02.531487963Z"","hostname"2023-09-22T23:08:02.531492595Z":",,""ip-10-150-1-74.us-west-2.compute.internal"hostname",time":":"pid""2023-09-22T23:08:02.531507762Z":,"4291ip-10-150-1-74.us-west-2.compute.internal"hostname":},"
1812 ip-10-150-1-74.us-west-2.compute.internal"","pidpid"":{4291:}
1813 "4291msg":{"}"
1814 msg":"[1] f60f7c4d-4385-45f0-b753-8c0c6a7e5045 (428178a5-4e75-4da2-abf3-724c4e0398b8) Active LiveRepairReady Active ds_transition to LiveRepair"[1] Transition from Active to Faulted",",{v":"0v"","msg":name"0::",crucible"",name"":""level":30crucible","level":30[2] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active Active New ds_transition to WaitActive","v":0,",time":""name":2023-09-22T23:08:02.531562073Z"","crucible"hostname":","level"ip-10-150-1-74.us-west-2.compute.internal":,"30pid":4291}
1815 ,"{time":""msg":"2023-09-22T23:08:02.531565119Z","hostname":"[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active Faulted Active ds_transition to LiveRepairReady","v":0ip-10-150-1-74.us-west-2.compute.internal",",,""nametime":""crucible"pid":",":level":4291302023-09-22T23:08:02.531578079Z"}
1816 ,"hostname":","time":"{2023-09-22T23:08:02.531603089Z"ip-10-150-1-74.us-west-2.compute.internal,"",hostname"":pid""":ip-10-150-1-74.us-west-2.compute.internal4291msg""}:"
1817 ,"[1] Transition from LiveRepairReady to LiveRepair"pid":4291,"}v{"
1818 :"0{,msg""msgname":"""::""crucible"[1] Transition from Faulted to LiveRepairReady",,"[2] Transition from New to WaitActive"level"":v"30:,0",v"":name":"0crucible",,""level":name"30:"crucible","level":30,"time":",2023-09-22T23:08:02.531657302Z"",time"":hostname":""ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.531649138Z"pid":4291,}"
1819 hostname":{,"""msg":"time":ip-10-150-1-74.us-west-2.compute.internal"","pid":2023-09-22T23:08:02.531661049Z4291[1] 99477887-f8f6-4caf-a2e3-12b105487a29 (fb767d23-0921-4b65-9076-40bc0c47b70a) Active LiveRepairReady Active ds_transition to LiveRepair""},"
1820 v":,0","namehostname"":":"crucible","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1821 ,"time":"2023-09-22T23:08:02.531698494Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1822 "msg{":""msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30[2] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible",","level"time":":302023-09-22T23:08:02.531720553Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1823 ,"time":"2023-09-22T23:08:02.531729593Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
18242023-09-22T23:08:02.531ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
18252023-09-22T23:08:02.531ZINFOcrucible: [2] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active Active WaitQuorum ds_transition to Active
18262023-09-22T23:08:02.531ZINFOcrucible: [2] Transition from WaitQuorum to Active
18272023-09-22T23:08:02.531ZINFOcrucible: 996915b6-f4ae-4178-8b00-04c24dd99dc3 is now active with session: 17b68f05-acac-4680-865d-7ea2ec0cd8e8
1828 {"msg":"[1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active Active Active ds_transition to Faulted","v"test live_repair::repair_test::test_live_repair_deps_repair_other ... :ok0
1829 ,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.53195126Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1830 {"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30test live_repair::repair_test::test_live_repair_deps_repair_rafter ... ok
1831 ,"time":"2023-09-22T23:08:02.531991722Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
18322023-09-22T23:08:02.532ZINFOcrucible: [1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active Faulted Active ds_transition to LiveRepairReady
18332023-09-22T23:08:02.532ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
18342023-09-22T23:08:02.532ZINFOcrucible: [1] 996915b6-f4ae-4178-8b00-04c24dd99dc3 (17b68f05-acac-4680-865d-7ea2ec0cd8e8) Active LiveRepairReady Active ds_transition to LiveRepair
18352023-09-22T23:08:02.532ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
1836 test live_repair::repair_test::test_live_repair_deps_repair_overlappers ... ok
18372023-09-22T23:08:02.532ZINFOcrucible: Crucible stats registered with UUID: 1c3e124b-d8c5-4c0d-9558-10aa827e84d2
18382023-09-22T23:08:02.532ZINFOcrucible: Crucible 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 has session id: 27b5fb3d-5800-4e86-916d-15f076b1c956
18392023-09-22T23:08:02.532ZINFOcrucible: [0] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) New New New ds_transition to WaitActive
18402023-09-22T23:08:02.532ZINFOcrucible: [0] Transition from New to WaitActive
18412023-09-22T23:08:02.532ZINFOcrucible: [0] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) WaitActive New New ds_transition to WaitQuorum
18422023-09-22T23:08:02.532ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
1843 {"msg":"[0] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.532669535Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",{"pid":4291}
1844 "msg":"{"Crucible stats registered with UUID: 42df254b-7101-46d2-b33a-70cc63b38d0emsg"":","v":[0] Transition from WaitQuorum to Active"0,",v"":name":0","crucible"name":,""level"crucible":,"30level":30,"time":"2023-09-22T23:08:02.532719121Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid":":42912023-09-22T23:08:02.532718343Z"}
1845 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg":"}
1846 [1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active New New ds_transition to WaitActive","v":0,"name":"{crucible","level":30"msg":"Crucible 42df254b-7101-46d2-b33a-70cc63b38d0e has session id: 528bdb1e-dc63-4b25-9f0d-084a49b41225","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.53276976Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1847 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":,""time"2023-09-22T23:08:02.532811252Z":","hostname":2023-09-22T23:08:02.532781597Z"{"ip-10-150-1-74.us-west-2.compute.internal",","hostname"pid"":msg":"Crucible stats registered with UUID: 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8"4291,"v":}
1848 0,"name":"crucible"{,"level":30"msg":"[1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active WaitActive New ds_transition to WaitQuorum","v":0,"name:":""crucible","levelip-10-150-1-74.us-west-2.compute.internal"":30,"pid":4291}
1849 ,"time":"2023-09-22T23:08:02.532861447Z"{,","hostname":time"""ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1850 :{"2023-09-22T23:08:02.532872826Z"msg","msg:":""Crucible 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 has session id: f9ed11e1-1134-4e8a-a7b1-a56e65070b2e","v":0,"name":"crucible","[0] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) New New New ds_transition to WaitActive"level":30,"v"":0hostname",":"name":"crucible","ip-10-150-1-74.us-west-2.compute.internal"level":,"30pid":4291}
1851 ,"time":"2023-09-22T23:08:02.53292596Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid"::"4291}
1852 {[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30"msg":"[0] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":,"30time":",2023-09-22T23:08:02.53302444Z"","time"hostname"::""2023-09-22T23:08:02.532935962Z"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291hostname":"}
1853 ip-10-150-1-74.us-west-2.compute.internal,"","time"pid"{:":2023-09-22T23:08:02.53304488Z4291"","msg"hostname}:""[1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30:"ip-10-150-1-74.us-west-2.compute.internal",",pid":4291"time"}:
1854 {
1855 {"msg":""msg":"[0] Transition from New to WaitActive"[0] Transition from New to WaitActive",","vv"":0:,"name":0","crucible"name,""level:"":crucible"30,""level":302023-09-22T23:08:02.533085908Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"time"4291:"}2023-09-22T23:08:02.533167818Z"
1856 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{,
1857 "msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:08:02.533204331Z","hostname":"[0] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) WaitActive New New ds_transition to WaitQuorum","v":0ip-10-150-1-74.us-west-2.compute.internal,"",name"":"pidcrucible"":,"4291level":30}
1858 {","timemsg":":"[2] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30"2023-09-22T23:08:02.533236028Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal"","time"pid:"":42912023-09-22T23:08:02.533260381Z}"
1859 {""msg":"time":[0] Transition from WaitActive to WaitQuorum"","v":0,"name":"2023-09-22T23:08:02.533171419Z"crucible","level":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1860 ,"time":"2023-09-22T23:08:02.533335293Z"{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":"4291,msg"}":
1861 {hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1862 "msg":"{[0] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) WaitQuorum New New ds_transition to Active"","msg"v:"":0,"name":"crucible"[2] Transition from New to WaitActive","level":,30"v":0,"name":"crucible"","level":30,"time":"[0] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) WaitActive New New ds_transition to WaitQuorum"2023-09-22T23:08:02.533418661Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal"",v"":pid":42910}
1863 ,"time":"2023-09-22T23:08:02.533425542Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1864 {{"msg":""msg":"[0] Transition from WaitQuorum to Active","v":[2] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active Active WaitActive ds_transition to WaitQuorum"0,,""name"v":":crucible"0,",level":",30name":""crucible"name",:""level":crucible"30,"level":,"30time":"2023-09-22T23:08:02.533479127Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1865 {"msg":","time":"[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active New New ds_transition to WaitActive","v":02023-09-22T23:08:02.533486207Z","name":","crucible","hostnamelevel""::30","time":"2023-09-22T23:08:02.533510962Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
18662023-09-22T23:08:02.533ZINFOcrucible: [1] Transition from New to WaitActive
1867 {"msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active WaitActive New ds_transition to WaitQuorum"},
1868 "v":0,"name":"crucible","level":30{"msg":","[2] Transition from WaitActive to WaitQuorum"time,,"v":0,"name":"crucible","level":30","timetime":"":"2023-09-22T23:08:02.533574869Z"2023-09-22T23:08:02.533559808Z",","hostname"hostname:":""ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"}pid"
1869 {":"2023-09-22T23:08:02.533489972Z""msg":","[1] Transition from WaitActive to WaitQuorum"hostname",:""v":0,"name":"crucible"ip-10-150-1-74.us-west-2.compute.internal,""level":,30"pid":4291}
1870 ,"time":"2023-09-22T23:08:02.533664139Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1871 {:4291}
1872 "msg":"{[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible"","level":msg30":"[2] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active Active WaitQuorum ds_transition to Active"{,"v":0,","name":time"":"2023-09-22T23:08:02.53373704Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
18732023-09-22T23:08:02.533ZINFOcrucible: [1] Transition from WaitQuorum to Active
1874 {"msg":"[2] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active Active New ds_transition to WaitActive"msg":","v":0,"name"[0] Transition from WaitActive to WaitQuorum"crucible","level":30","v":0,,""name":time"":"crucible","2023-09-22T23:08:02.533819725Zlevel"":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1875 {"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30:"crucible","level",:"30time":"2023-09-22T23:08:02.533847248Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time"}:
1876 ","time":"2023-09-22T23:08:02.533874283Z2023-09-22T23:08:02.533880452Z"",",hostname":""{hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}",
1877 "msg"pid"::4291"{}
1878 [0] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) WaitQuorum New New ds_transition to Active""msg":","v":[2] Transition from New to WaitActive{"0,"v":,0",""msgname"name":"::"""cruciblecrucible"","level":,"30level":1c3e124b-d8c5-4c0d-9558-10aa827e84d2 is now active with session: 66cadaf0-27ad-428b-882a-29ad9e6a6b14"30,"v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.533939541Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1879 {"msg":","[2] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active Active WaitActive ds_transition to WaitQuorum"time",":v":,"0","timename":""2023-09-22T23:08:02.533941961Z":crucible"",",level"2023-09-22T23:08:02.533948321Z"hostname",:"30"hostname":":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",","pidpid,""time":":":2023-09-22T23:08:02.53397746Z"42914291,"}hostname
1880 ":"}
1881 {ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1882 "{msg{":""msg":""msg"[2] Transition from WaitActive to WaitQuorum":","v":0,"name":"[1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active Active Active ds_transition to Faultedcrucible"",",level"[0] Transition from WaitQuorum to Active"":v30",:"0v",:"0name":,"","cruciblename"time":"":2023-09-22T23:08:02.534019213Z"",,""cruciblelevelhostname":"":"30ip-10-150-1-74.us-west-2.compute.internal",,""pid":level4291":}
1883 30{"msg":"[2] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.534038211Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:02.53405273Z,"","",timehostname""pid":":"":ip-10-150-1-74.us-west-2.compute.internal"4291,"2023-09-22T23:08:02.534044735Z"pid":,4291"}}hostname"
1884 
1885 {:""msg":"ip-10-150-1-74.us-west-2.compute.internal"[2] Transition from WaitQuorum to Active,""{pid,""v"::"04291msg,"":name":"}"crucible"
1886 [1] Transition from Active to Faulted","level":,"30v"{:0,"name":"crucible"","msg":level",":time30":""2023-09-22T23:08:02.534107563Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1887 [1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active New New ds_transition to WaitActive","{v",""msg"time"::0",:""name":"7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 is now active with session: db630a11-b750-4e3d-a391-6686a58e1487"crucible,2023-09-22T23:08:02.534116736Z"""v,"":,0"hostname":,level"""name:":"30crucible"ip-10-150-1-74.us-west-2.compute.internal,""level":,30"pid":4291}
1888 ,"time":"{2023-09-22T23:08:02.534155129Z","hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid":,4291"}
1889 time":"[1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active Faulted Active ds_transition to LiveRepairReady"{2023-09-22T23:08:02.534152096Z",","msg":""v"hostname"::"0[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active Active Active ds_transition to Faulted",",ip-10-150-1-74.us-west-2.compute.internal""v":,name":""0pid,""crucible"name"::"4291,crucible"","levellevel":"30}:
1890 30,"time":"2023-09-22T23:08:02.534201702Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1891 "msg{":""msg":"[1] Transition from New to WaitActive"[1] Transition from Active to Faulted",","v":v"0:,"0name",:",crucible"",""level"name"::time""30:"crucible","2023-09-22T23:08:02.534207944Z"level":,30"hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.534234224Z",,""hostname":"pid":4291ip-10-150-1-74.us-west-2.compute.internal","pid":4291}}
1892 
1893 ,"{time{"":msg":"""msg":"2023-09-22T23:08:02.534239249Z","[1] Transition from Faulted to LiveRepairReady"hostname",[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active Faulted Active ds_transition to LiveRepairReady":,""v":"0v",":ip-10-150-1-74.us-west-2.compute.internalname":""crucible",,""0pid"level"::4291,"30name"}:
1894 ","crucible"time":","level{"2023-09-22T23:08:02.534282787Z":,"30hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1895 {"[1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active WaitActive New ds_transition to WaitQuorummsg":"","[1] Transition from Faulted to LiveRepairReady"v,""v"::0,"0,name"",:"time"crucible":,""level":"2023-09-22T23:08:02.534295614Z30"name":","crucible"hostname",:""level":30,"time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.534321142Z"pid",":hostname":4291"}ip-10-150-1-74.us-west-2.compute.internal
1896 ","pid":4291}
1897 {{,"""msg"msg"::""time":"2023-09-22T23:08:02.534327831Z","[1] 7c6b2b7f-d4b5-4ec2-a67b-4f02968436c8 (db630a11-b750-4e3d-a391-6686a58e1487) Active LiveRepairReady Active ds_transition to LiveRepair"hostname",[1] 1c3e124b-d8c5-4c0d-9558-10aa827e84d2 (66cadaf0-27ad-428b-882a-29ad9e6a6b14) Active LiveRepairReady Active ds_transition to LiveRepair":""v":,0"ip-10-150-1-74.us-west-2.compute.internal,"vname":""",:crucible""0pid,,"""level"::304291name":"crucible}"
1898 ,,""time":"level":2023-09-22T23:08:02.534371854Z"30,"{hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}
1899 msg":"{"msg":"[1] Transition from WaitActive to WaitQuorum"[1] Transition from LiveRepairReady to LiveRepair",,""v":v"0:,,"0"name"time"::,""cruciblename""":,""level":2023-09-22T23:08:02.534383616Z"30crucible",","hostname":"level":30ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}2023-09-22T23:08:02.534410964Z"
1900 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
1901 "msg":","time"[1] Transition from LiveRepairReady to LiveRepair":","v":2023-09-22T23:08:02.534415194Z0","name":,""crucible"hostname":,""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1902 {"msg":","time":"2023-09-22T23:08:02.534443403Z","hostname":[1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active WaitQuorum New ds_transition to Active"","v":0ip-10-150-1-74.us-west-2.compute.internal",",name"":"pid"crucible":,"4291level":30}
1903 ,"time":"2023-09-22T23:08:02.534473845Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
19042023-09-22T23:08:02.534ZINFOcrucible: [1] Transition from WaitQuorum to Active
1905 {test impacted_blocks::test::union_produces_greater_than_or_equal_block_count ... "okmsg"
1906 :"[2] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.534576834Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1907 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"test live_repair::repair_test::test_live_repair_deps_repair_rspan_left ... 2023-09-22T23:08:02.534618148Zok
1908 ","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
19092023-09-22T23:08:02.534ZINFOcrucible: [2] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active Active WaitActive ds_transition to WaitQuorum
1910 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.534714472Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":test live_repair::repair_test::test_live_repair_deps_repair_read ... 4291ok
1911 }
19122023-09-22T23:08:02.534ZINFOcrucible: [2] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active Active WaitQuorum ds_transition to Active
19132023-09-22T23:08:02.534ZINFOcrucible: [2] Transition from WaitQuorum to Active
19142023-09-22T23:08:02.534ZINFOcrucible: 42df254b-7101-46d2-b33a-70cc63b38d0e is now active with session: 4719a10d-d68a-415f-8de5-4a3581ecf476
19152023-09-22T23:08:02.534ZINFOcrucible: [1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active Active Active ds_transition to Faulted
19162023-09-22T23:08:02.534ZINFOcrucible: [1] Transition from Active to Faulted
1917 {"msg":"[1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30{"msg":","time":"Crucible stats registered with UUID: 40de3eeb-c819-466e-88b4-e1712ed524bc"2023-09-22T23:08:02.535006432Z",",v"":hostname":0","name":"ip-10-150-1-74.us-west-2.compute.internal"crucible,""pid",:"4291level":}30
1918 {"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.535037493Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""timepid""::"42912023-09-22T23:08:02.535054287Z"},
1919 "hostname":"ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}
1920 "msg":"{Crucible 40de3eeb-c819-466e-88b4-e1712ed524bc has session id: f1573e91-5db6-4b3a-8324-135a00e99a0d"",msg""v"::"0,"name":"crucible","level":30[1] 42df254b-7101-46d2-b33a-70cc63b38d0e (4719a10d-d68a-415f-8de5-4a3581ecf476) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.535106376Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""time"pid"::"42912023-09-22T23:08:02.535113314Z"}
1921 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",{"pid":4291}
1922 "msg":"{"[0] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) New New New ds_transition to WaitActivemsg"":","v":0[1] Transition from LiveRepairReady to LiveRepair",",name"":"v":crucible"0,","level"name:":30"crucible","level":30,"time,"":"time":"2023-09-22T23:08:02.53516921Z"2023-09-22T23:08:02.535171953Z",","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid4291":4291}
1923 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30{"msg":"Crucible stats registered with UUID: b734394e-ac55-46b3-89f9-8e132a0e6ae7",","time":v"":02023-09-22T23:08:02.535238186Z","name,"":"hostname"crucible:","level":30,"time":"2023-09-22T23:08:02.535273804Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"","pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
1924 {"msg":}"{
1925 "Crucible stats registered with UUID: aa7556f5-78ad-4859-b5a3-cb4209a0a400msg":"Crucible b734394e-ac55-46b3-89f9-8e132a0e6ae7 has session id: c70bd9ee-29e2-4ee1-bd38-7ddc38a57989","v":0,,""pidname":"crucible","level":30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.535345236Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1926 {"msg":",""time":":4291[0] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) New New New ds_transition to WaitActive2023-09-22T23:08:02.53535437Z"",,"}v
1927 ":0,"name":"crucible","{level":30"msg":"[0] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.535402463Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1928 {"msg":","[0] Transition from New to WaitActivetime"":","v":02023-09-22T23:08:02.535419675Z","name",:""hostname"crucible:"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1929 {,"time":""msg":"2023-09-22T23:08:02.535451854Z","hostname[0] Transition from WaitActive to WaitQuorum":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1930 "{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1931 [0] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) WaitActive New New ds_transition to WaitQuorum","v":0,"{name":"crucible","level":"30msg":"Crucible aa7556f5-78ad-4859-b5a3-cb4209a0a400 has session id: 922807e4-3275-418d-96f2-52bc2d475a21","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.535503085Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1932 ,"time":"2023-09-22T23:08:02.535514244Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid"":msg4291}
1933 ":"{[0] Transition from WaitActive to WaitQuorum","v":"0msg",":name":"crucible"test live_repair::repair_test::test_live_repair_deps_repair_repair_repair ... ,""oklevel
1934 [0] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) New New New ds_transition to WaitActive"","v":0,"name":"crucible",""level":30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.535576686Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1935 ,:"time30"{:"2023-09-22T23:08:02.535583651Z""msg",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal[0] Transition from New to WaitActive"",","pid"v:":42910,"name}":
1936 "crucible",",level""{:30time":""msg":"2023-09-22T23:08:02.535604328Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","[0] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) WaitQuorum New New ds_transition to Activepid"":,,"4291v":0,"name":"crucible","level":30}
1937 {,""time":"time":"2023-09-22T23:08:02.535640139Zmsg":"","hostname":"[0] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) WaitQuorum New New ds_transition to Active"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid"::4291}
1938 0,"name":"{crucible","""level"msg"::2023-09-22T23:08:02.535623832Z30","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1939 {""msg":"[0] Transition from WaitQuorum to Active","v":0,"name":,"[0] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) WaitActive New New ds_transition to WaitQuorumcrucible""",,""timevlevel""::0"30,:""name":"crucible"2023-09-22T23:08:02.535681203Z","level,"":30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1940 ,"time":"2023-09-22T23:08:02.535710264Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4291},
1941 ""time"msg:{"":"2023-09-22T23:08:02.535704028Z""msg":",[0] Transition from WaitQuorum to Active""hostname,":""ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1942 {"msg":"[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.53577414Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1943 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30[0] Transition from WaitActive to WaitQuorum","v":0,"name":","crucible"time",:""level":302023-09-22T23:08:02.535806691Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1944 {,""timemsg""::""2023-09-22T23:08:02.535819291Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active WaitActive New ds_transition to WaitQuorum"",",pid"":v"4291:0,"}
1945 name":"crucible","{level":30"msg":"[0] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) WaitQuorum New New ds_transition to Active","v":0,"name":",crucible""time,"":"level":302023-09-22T23:08:02.535846134Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1946 {",msg"":"time":"[1] Transition from WaitActive to WaitQuorum2023-09-22T23:08:02.535858245Z"",,""v":hostname"0:","name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",",",pidlevel""::429130"}
1947 time":"{2023-09-22T23:08:02.535854732Z""msg":","hostname":"[0] Transition from WaitQuorum to Active",,""timevip-10-150-1-74.us-west-2.compute.internal"""::0",,"2023-09-22T23:08:02.535879986Z"",name""pid"hostname:""::crucible""4291,"levelip-10-150-1-74.us-west-2.compute.internal"":,30"}pid"
1948 :4291}
1949 ,"{time{":""msg":2023-09-22T23:08:02.535902865Z"",""hostname"msg:"":"[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active WaitQuorum New ds_transition to Active"ip-10-150-1-74.us-west-2.compute.internal",","v"pid:":04291,"}name
1950 "[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active New New ds_transition to WaitActive":{",""cruciblemsg"",":level"":v30":0,"name":"crucible","[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active New New ds_transition to WaitActive"level",:"30v":0,",name"":time"":"crucible","level2023-09-22T23:08:02.535933737Z"":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1951 {,"time"":"msg":","2023-09-22T23:08:02.535946151Z"time"[1] Transition from WaitQuorum to Active,"":hostname,""":v"":02023-09-22T23:08:02.535941426Z,"ip-10-150-1-74.us-west-2.compute.internal"",name,"""pid:"":hostname"crucible4291":"},
1952 "ip-10-150-1-74.us-west-2.compute.internal"level{",:"30msg"":pid"":4291[1] Transition from New to WaitActive"},
1953 ","v":time"0:","name":"2023-09-22T23:08:02.535977628Z"crucible",",{hostname"":"level":30"ip-10-150-1-74.us-west-2.compute.internal"msg,"":pid"":4291}
1954 ,[1] Transition from New to WaitActive""{,time"":"msg""v":2023-09-22T23:08:02.535994918Z"":,"0hostname,"":name""[2] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active Active New ds_transition to WaitActive":",ip-10-150-1-74.us-west-2.compute.internal""crucible",v"",pid:"0:,4291""}name
1955 "level":"{:crucible"",30msg"":level"":30[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible",","level"time:"30:"2023-09-22T23:08:02.536031755Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal"","timepid"":,4291":"}time
1956 "2023-09-22T23:08:02.536030575Z":{",""hostname2023-09-22T23:08:02.536042335Zmsg""",:"":hostname"":[2] Transition from New to WaitActive"","v":ip-10-150-1-74.us-west-2.compute.internal"0ip-10-150-1-74.us-west-2.compute.internal,"","name"pid:"":,"4291crucible"}pid,
1957 "":level{4291":"30msg":}"
1958 [1] Transition from WaitActive to WaitQuorum","v":0,"name":"{crucible,"","time":level"":30"2023-09-22T23:08:02.536077063Zmsg"":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active WaitActive New ds_transition to WaitQuorum"},
1959 ","time{"v"::""0msg":,"2023-09-22T23:08:02.536088146Z"",name"":"hostname"crucible":","[2] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active Active WaitActive ds_transition to WaitQuorumlevel"ip-10-150-1-74.us-west-2.compute.internal"":,,30""v"pid:":04291,"name}"
1960 :"crucible",{"level":"30msg":"[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active WaitQuorum New ds_transition to Active","v":0,"{name",:"","crucible"time","time"level:":"":"msg30"2023-09-22T23:08:02.536128669Z:2023-09-22T23:08:02.536121796Z""",","hostname":"hostname":Crucible stats registered with UUID: 021f3454-29b2-4ff1-97c2-40bc8a3180a2""ip-10-150-1-74.us-west-2.compute.internal",",pid,ip-10-150-1-74.us-west-2.compute.internal"""":,v"time"4291:pid""::"04291},2023-09-22T23:08:02.536147292Z
1961 }"
1962 ,""{{hostname"name"":msg"":":"ip-10-150-1-74.us-west-2.compute.internal"msg""[2] Transition from WaitActive to WaitQuorum",,crucible""pidv":"0:,4291"":}name
1963 ",:""{levelcrucible"""",msg""[1] Transition from WaitActive to WaitQuorum:":"level30",:[1] Transition from WaitQuorum to Active30"",v"":v"0:0,,""name"name"::""crucible,"",crucible""time,level""::""30level":2023-09-22T23:08:02.536208499Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""time"pid,""::time""4291:"}
1964 2023-09-22T23:08:02.536223646Z"2023-09-22T23:08:02.536207099Z,{""hostname"":msg"",:","ip-10-150-1-74.us-west-2.compute.internal"hostname"",:"time"":[2] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active Active WaitQuorum ds_transition to Activepid"":"ip-10-150-1-74.us-west-2.compute.internal",4291"2023-09-22T23:08:02.536225529Z"v}"
1965 :,,0{,""""msgname""::"pid""hostname:crucible"4291,":[2] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active Active New ds_transition to WaitActive""level,"":v30"}:"
1966 ip-10-150-1-74.us-west-2.compute.internal"0,",time""name"::"",{crucible2023-09-22T23:08:02.536273588Z"","",pid"hostname"""level:"":msg"::ip-10-150-1-74.us-west-2.compute.internal30"4291,""}pid
1967 Crucible 021f3454-29b2-4ff1-97c2-40bc8a3180a2 has session id: f8ec93e4-add2-454d-a49c-7f44bf328c3f"":,"4291time":"},"
1968 2023-09-22T23:08:02.5362981Z"v"{,{""msg:hostname""::""0","[2] Transition from WaitQuorum to Activeip-10-150-1-74.us-west-2.compute.internal"",,""namevpid""::msg""04291,:}"
1969 name"::""{crucible"""crucible",msg"",level:"":"[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active WaitQuorum New ds_transition to Active"30[2] Transition from New to WaitActive"level,"",:"vv"30"::00,,""time"name:"",":name2023-09-22T23:08:02.536342853Z""",crucible"":hostname,""level""::"30crucible","level"ip-10-150-1-74.us-west-2.compute.internal":,"30pid":4291},
1970 ,""time"{:time""":msg"2023-09-22T23:08:02.536362516Z"":","2023-09-22T23:08:02.536349838Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal"40de3eeb-c819-466e-88b4-e1712ed524bc is now active with session: 5b45a21e-af03-4c26-80d4-f98a411c8304,""ip-10-150-1-74.us-west-2.compute.internal"pid,",:"4291v"}:
1971 0",{"pid""namemsg""::"",:"crucible4291"time",:}"
1972 "[2] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active Active WaitActive ds_transition to WaitQuorumlevel""2023-09-22T23:08:02.536367263Z",:"30v",:"0{hostname",:""name":"crucible""ip-10-150-1-74.us-west-2.compute.internal,msg,""",time""level""::":"pid"2023-09-22T23:08:02.536407789Z30":,"4291hostname":"}[0] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) New New New ds_transition to WaitActive"
1973 ip-10-150-1-74.us-west-2.compute.internal,"",,"timepid"""{::4291"v}"
1974 2023-09-22T23:08:02.536426454Z:"{"0",msg"msg"hostname""::"":,""name"ip-10-150-1-74.us-west-2.compute.internal:"[1] Transition from WaitQuorum to Active"[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active Active Active ds_transition to Faulted,","",vcrucible""v"pid:"0"",:":name4291"0,}:
1975 "",level""{crucible":",msgname""":30:level""":30crucible"[2] Transition from WaitActive to WaitQuorum",","level"v:":300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.536489607Z",",hostname"":","time"time":ip-10-150-1-74.us-west-2.compute.internal:"",""2023-09-22T23:08:02.536498468Zpid,2023-09-22T23:08:02.536487138Z""",:"4291"hostname}"
1976 :,time":"{"hostname"":"ip-10-150-1-74.us-west-2.compute.internalmsg"","":pid""2023-09-22T23:08:02.536495066Z":ip-10-150-1-74.us-west-2.compute.internal[1] Transition from Active to Faulted""4291,,""v}"
1977 :pid",0{,:"4291"msg"":"name"}hostname"
1978 :[2] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active Active WaitQuorum ds_transition to Active"":crucible,"""v",:"0{,level"":name30"ip-10-150-1-74.us-west-2.compute.internal:""",msgcrucible""":"pid",:[0] Transition from New to WaitActive4291",""}timelevel,"
1979 ":"30:v"":0{2023-09-22T23:08:02.536563527Z",",name"""hostname:msg"":,"":crucible""time,"":ip-10-150-1-74.us-west-2.compute.internal"""level"2023-09-22T23:08:02.536583309Z,"","pid:hostname""::4291"[2] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active Active New ds_transition to WaitActive"30}ip-10-150-1-74.us-west-2.compute.internal
1980 ",,"{"v""pid"msg:"4291:"}:
1981 0,"{name":"[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active Faulted Active ds_transition to LiveRepairReady""msg",:""crucible"v",:[2] Transition from WaitQuorum to Active"0,,"",v""name:"0time,:"""namecrucible"":,:"""levelcrucible"":,30""level"level":2023-09-22T23:08:02.536608176Z"30:,"30hostname":"ip-10-150-1-74.us-west-2.compute.internal",","pid"time:":"4291,"time2023-09-22T23:08:02.536642624Z"":"},
1982 2023-09-22T23:08:02.536646513Z"",hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,ip-10-150-1-74.us-west-2.compute.internal,"",pid""":pid4291""time"}:msg":4291
1983 }:
1984 {"""{msg":2023-09-22T23:08:02.536648831Z"""msg":,""[1] Transition from Faulted to LiveRepairReady"hostname":[0] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) WaitActive New New ds_transition to WaitQuorum""aa7556f5-78ad-4859-b5a3-cb4209a0a400 is now active with session: df59fbd5-37bd-496c-ad85-1e830208beb9,"",,v""v:"0:,0"",name"vname""::"""cruciblecrucible"":,,""0levellevel"":ip-10-150-1-74.us-west-2.compute.internal",30:"30,name"":"pid":crucible"4291,"level":}30
1985 ,,""timetime""::""2023-09-22T23:08:02.53671336Z2023-09-22T23:08:02.536712184Z"{",","hostname":hostname"":""msg"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",:",pid""pid:4291",}"
1986 :"4291[2] Transition from New to WaitActive}{time"
1987 ""msg{"::""msg,""":v""2023-09-22T23:08:02.536720344Z":[1] 40de3eeb-c819-466e-88b4-e1712ed524bc (5b45a21e-af03-4c26-80d4-f98a411c8304) Active LiveRepairReady Active ds_transition to LiveRepair",,""0[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active Active Active ds_transition to Faultedv"",hostname":,0""v,"":name0",:"name"name"":":crucible:"",""cruciblelevel"",:ip-10-150-1-74.us-west-2.compute.internal"30",crucible"level","":pid":304291}
1988 ,"time":,""time":"2023-09-22T23:08:02.536780715Z"{2023-09-22T23:08:02.536787268Z,"","hostname":hostname"":""msg":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291[0] Transition from WaitActive to WaitQuorum"}}
1989 
1990 {level,""{msg""":msg:"":30v"":[1] Transition from LiveRepairReady to LiveRepair"0,[1] Transition from Active to Faulted",v"",:"0",v""name"name:"0::,"""namecrucible"":crucible"","cruciblelevel"",:"30,"level":level,"30:"30time":"2023-09-22T23:08:02.536818646Z",,""hostnametime""::,"""time":2023-09-22T23:08:02.536834851Z"","ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.536838866Zhostname"",:"",hostname"":ip-10-150-1-74.us-west-2.compute.internalpid""",",:ip-10-150-1-74.us-west-2.compute.internalpid"",:"4291pid"4291}"
1991 :}time"
1992 :4291"}
1993 2023-09-22T23:08:02.536840739Z"{{,"hostname":"""msg":"ip-10-150-1-74.us-west-2.compute.internal"msg,""pid":":4291[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active Faulted Active ds_transition to LiveRepairReady"},
1994 [2] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active Active WaitActive ds_transition to WaitQuorum{","v"":msg0":","name":"crucible","level":30[0] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.536915928Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
1995 ,""time":"v":{02023-09-22T23:08:02.536928533Z",",name"""hostname"msg:":"":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,[2] Transition from WaitActive to WaitQuorum"""level",:"pid"v":304291:0,"}name"
1996 :"crucible","level":30{"msg":"[0] Transition from WaitQuorum to Active","v":,"0time,"":"name":"crucible"2023-09-22T23:08:02.536969915Z","level":,"30,hostname"":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.53698014Z",","pid":hostname4291":"}
1997 ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time"{}:"
1998 "2023-09-22T23:08:02.536995771Z"msg":","hostname":"{[1] Transition from Faulted to LiveRepairReady","ip-10-150-1-74.us-west-2.compute.internal"v",:"0pid",":"4291name"msg"}::"
1999 "crucible"[2] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active Active WaitQuorum ds_transition to Active",",level{"":v"30":msg"0:","name":"crucible","level":30[1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.537046497Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time":"pid":42912023-09-22T23:08:02.537053115Z"},
2000 ","hostname":"time":{"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:08:02.537060141Z"":,4291"msg"hostname"::""}
2001 ip-10-150-1-74.us-west-2.compute.internal","[1] aa7556f5-78ad-4859-b5a3-cb4209a0a400 (df59fbd5-37bd-496c-ad85-1e830208beb9) Active LiveRepairReady Active ds_transition to LiveRepairpid"":{4291,"v":}0"
2002 msg":","name":"crucible[2] Transition from WaitQuorum to Active""{,,""level"v"":msg30"::"0,"name":[1] Transition from New to WaitActive"",crucible"",v"":level"0:,"30name":"crucible","level":30,"time":"test live_repair::repair_test::test_live_repair_deps_repair_rspan_right ... 2023-09-22T23:08:02.537117628Z"ok,"
2003 hostname",":",time"ip-10-150-1-74.us-west-2.compute.internal":"",time"":"pid":2023-09-22T23:08:02.537128175Z"2023-09-22T23:08:02.537131316Z"4291,",}hostname""
2004 hostname"::"{""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal""msg":",,"[1] Transition from LiveRepairReady to LiveRepair"pid"":,"4291pidv""::0},"4291name"
2005 :"}crucible"
2006 {,"level":30"{msg":""msg":"b734394e-ac55-46b3-89f9-8e132a0e6ae7 is now active with session: c367f5ab-c4f8-42c0-882b-6f763e0900ac","v":0,"name":"crucible"[1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active WaitActive New ds_transition to WaitQuorum",,,"""v"timelevel"::"30:0","name":"2023-09-22T23:08:02.537192267Z"crucible",","hostname"level"::"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2007 ,"time":"{2023-09-22T23:08:02.53721269Z"","msg"hostname":":","time":ip-10-150-1-74.us-west-2.compute.internal""Write to Extent 0:2:9 under repair,"2023-09-22T23:08:02.53722009Z""pid",:,""4291hostname"v":":}0
2008 ip-10-150-1-74.us-west-2.compute.internal",,""name":"pid"{:crucible"4291,"level""}msg
2009 "::"40{"[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active Active Active ds_transition to Faulted"msg":","v":0[1] Transition from WaitActive to WaitQuorum",",name":""v"crucible":,"0level",:,30""name":time"":crucible"","level":2023-09-22T23:08:02.537269921Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2010 ,"time":"2023-09-22T23:08:02.537286276Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time":,""pid":42912023-09-22T23:08:02.537292591Z"},
2011 {"msg":"[1] Transition from Active to Faulted","v":0,""name":"crucible"hostname",":level":"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2012 {,"time":""msg":"2023-09-22T23:08:02.537380183Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",[1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active WaitQuorum New ds_transition to Active""pid",":v4291":0},"
2013 name":"crucible","level":{30"msg":"[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.537419191Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time
2014 ":"2023-09-22T23:08:02.537433516Z","{hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4291[1] Transition from WaitQuorum to Active","}v"
2015 :0,"name":"crucible"{,"level":30"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.537476549Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time
2016 ":"2023-09-22T23:08:02.537486141Z","{hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg":,""pid":4291}
2017 [2] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active Active New ds_transition to WaitActive","v":0{,"name":"crucible",""level":msg30":"[1] b734394e-ac55-46b3-89f9-8e132a0e6ae7 (c367f5ab-c4f8-42c0-882b-6f763e0900ac) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30{,"time"":"msg":"2023-09-22T23:08:02.537529744Z","hostname":"Crucible stats registered with UUID: e25c51fe-6bce-4500-9128-4f204e1a9445"ip-10-150-1-74.us-west-2.compute.internal",","pid"v"::42910,"},"
2018 name"time"::""crucible{"2023-09-22T23:08:02.537541991Z",",level"""msghostname"::"30":"ip-10-150-1-74.us-west-2.compute.internal"[2] Transition from New to WaitActive",",pid"":v"4291:0,"}name"
2019 test live_repair::repair_test::test_live_repair_deps_repair_write ... :"{,ok
2020 "crucible""timemsg"",:":""level":302023-09-22T23:08:02.537574802Z"[1] Transition from LiveRepairReady to LiveRepair",","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",",pid"":level"4291:30}
2021 ,"time":"2023-09-22T23:08:02.537609141Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"Crucible e25c51fe-6bce-4500-9128-4f204e1a9445 has session id: 15fb88f2-fded-4cce-bdd4-93603c9bc51f"time"}:,"
2022 v"":0,"2023-09-22T23:08:02.53762148Z"name"{:,""crucible"hostname",""msg":":level"":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
2023 [2] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active Active WaitActive ds_transition to WaitQuorum","v":0,"{name":"crucible","level"":msg"30,:""time":"Write to Extent 0:2:9 under repair","2023-09-22T23:08:02.537665051Z"v":,"0hostname",:""name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""levelpid""::429140,"}
2024 time":"2023-09-22T23:08:02.537681261Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2025 [0] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) New New New ds_transition to WaitActive",","timev"":{":0,"2023-09-22T23:08:02.537697881Z"name":",""msg":"hostname"crucible":",[2] Transition from WaitActive to WaitQuorum""ip-10-150-1-74.us-west-2.compute.internallevel,""":,"v30"pid"::04291,"name":"}crucible"
2026 ,"level":30{",msg"":"time":"Write 1:0:9 past extent under repair 0"2023-09-22T23:08:02.537739789Z","v,":"0hostname",:""name":"crucible",ip-10-150-1-74.us-west-2.compute.internal"",,time"":"pid"level""::4291402023-09-22T23:08:02.537750983Z"}
2027 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4291"}msg
2028 ":"[0] Transition from New to WaitActive","v"{:0,"name":""crucible"msg":,""level":30[2] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.537776463Z",",time"":"hostname":"2023-09-22T23:08:02.537805273Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname":"pid",:"4291ip-10-150-1-74.us-west-2.compute.internal"time,""}:"
2029 2023-09-22T23:08:02.537814609Z"pid":,"4291hostname":"}{
2030 ip-10-150-1-74.us-west-2.compute.internal"","msgpid"":{4291:"}
2031 Write 1:1:9 past extent under repair 0""msg,""v":{":0,"name"":"msg":crucible""[0] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) WaitActive New New ds_transition to WaitQuorum",",level[2] Transition from WaitQuorum to Active""",v""v::"0:,0"40,"name"name"::""crucible"crucible",","level"level"::3030,"time":"2023-09-22T23:08:02.53788561Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid"":time,4291"":}time
2032 "":"2023-09-22T23:08:02.537894132Z"2023-09-22T23:08:02.537894443Z"{,,""hostname"hostname"":msg":"":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal"IO Write 1008 on eur 0 Added deps 1,:""4291pid",:"}4291v"
2033 :}0
2034 ,{"name":"crucible"",msg":"{"level[0] Transition from WaitActive to WaitQuorum"":,"40v":"0,"msgname":""crucible":,""level":30021f3454-29b2-4ff1-97c2-40bc8a3180a2 is now active with session: 55e767e0-c831-4573-b14d-c7e5c142adea","v":0,"name":"crucible","level":,","30time":"time":"2023-09-22T23:08:02.537966253Z","hostname":2023-09-22T23:08:02.537959938Z"","ip-10-150-1-74.us-west-2.compute.internal"hostname",":pid":"4291}
2035 ip-10-150-1-74.us-west-2.compute.internal","{pid":"4291msg":"}
2036 [0] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) WaitQuorum New New ds_transition to Active",","v":0time",":name":""crucible","level":302023-09-22T23:08:02.537978013Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}2023-09-22T23:08:02.538005773Z"
2037 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
2038 "{msg":""msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30[1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.538033115Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2039 {"msg":","time":"2023-09-22T23:08:02.538040588Z","[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active New New ds_transition to WaitActivehostname"":","v":0,ip-10-150-1-74.us-west-2.compute.internal"","name"pid"::"4291crucible","}level{"
2040 ":30msg":"{Crucible stats registered with UUID: af820581-d317-46fc-9b93-01ba46c1da49",""msgv""::"0,"name"[1] Transition from Active to Faulted":",crucible,"""time,"":level"":v"302023-09-22T23:08:02.538075872Z":,"0hostname",:""name":"crucible","ip-10-150-1-74.us-west-2.compute.internal"level",:"30pid":4291}
2041 ,"{time":""msg":"2023-09-22T23:08:02.53809334Z","[1] Transition from New to WaitActivehostname"":","v":0,"ip-10-150-1-74.us-west-2.compute.internalname"":,""pidcrucible"":,4291"level":}30,"
2042 time":"{2023-09-22T23:08:02.538103523Z","hostname"":msg,"""time:"":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.538119971Z,"",pid"Crucible af820581-d317-46fc-9b93-01ba46c1da49 has session id: 77262da8-9084-4487-8fc8-aafdaf5d60a6"":,hostname"4291v""::"0},
2043 "ip-10-150-1-74.us-west-2.compute.internal"name",:""pid"crucible:"4291,"}{
2044 level""msg"{::30""msg":"[1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active Faulted Active ds_transition to LiveRepairReady"[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active WaitActive New ds_transition to WaitQuorum,""v,"",v"":time:"0:,""0name"2023-09-22T23:08:02.53815581Z":,","name"crucible"":hostname,"":"level"":crucible"30ip-10-150-1-74.us-west-2.compute.internal",,""level"pid:"30:4291}
2045 ,"time":"{2023-09-22T23:08:02.538179384Z"",msg"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2046 [0] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) New New New ds_transition to WaitActive",",v{":""0time,msg""name:"":"":"crucible[1] Transition from WaitActive to WaitQuorum"",","levelv""2023-09-22T23:08:02.538184389Z"::030,,""hostname"name":test live_repair::repair_test::test_live_repair_deps_repair_wafter ... ":,""oktimecrucible"":,""
2047 2023-09-22T23:08:02.538211347Zlevel"","hostname":"ip-10-150-1-74.us-west-2.compute.internal",":pid30":4291}
2048 {"ip-10-150-1-74.us-west-2.compute.internalmsg","":"time",:"[0] Transition from New to WaitActive"","2023-09-22T23:08:02.538243789Zv""pid",:"0:hostname,"":name""4291:"crucibleip-10-150-1-74.us-west-2.compute.internal"},"",
2049 level"":pid"30:4291}
2050 {{"msg"":"msg":","time":"[1] Transition from Faulted to LiveRepairReady"2023-09-22T23:08:02.538271832Z",",[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active WaitQuorum New ds_transition to Active""v,hostname""v"::"0",:"ip-10-150-1-74.us-west-2.compute.internal"name"0:,"",pidcrucible"":,""4291name":level}"
2051 :"30{crucible",""msg"level"::"30,"time[0] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) WaitActive New New ds_transition to WaitQuorum"":","v":02023-09-22T23:08:02.538302218Z,""name,"":"hostname"crucible:"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2052 {","msg":time","":"time":"[1] Transition from WaitQuorum to Active"2023-09-22T23:08:02.538307877Z"2023-09-22T23:08:02.538318214Z,""v,"":,hostname0",:"""name"hostname":ip-10-150-1-74.us-west-2.compute.internal"":,crucible""pid",:"4291"level}"
2053 :ip-10-150-1-74.us-west-2.compute.internal"30{,"pid"":msg"4291:"}
2054 ,[0] Transition from WaitActive to WaitQuorum""time,"":"v":0,"2023-09-22T23:08:02.53834631Z"name{,"":"hostname"crucible:"","level"":ip-10-150-1-74.us-west-2.compute.internal30msg"":","pid":4291}
2055 [1] 021f3454-29b2-4ff1-97c2-40bc8a3180a2 (55e767e0-c831-4573-b14d-c7e5c142adea) Active LiveRepairReady Active ds_transition to LiveRepair"{,"",timemsg"""::""v":02023-09-22T23:08:02.53836423Z",","name":hostname""[2] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active Active New ds_transition to WaitActive:crucible""",,""levelvip-10-150-1-74.us-west-2.compute.internal"":,0","pid":name""::4291"30crucible}"
2056 ,"level":30{"msg":"[0] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) WaitQuorum New New ds_transition to Active","v":0,"name",:""time"crucible:"","level":302023-09-22T23:08:02.538396979Z",","hostname":time"":"2023-09-22T23:08:02.538393651Zip-10-150-1-74.us-west-2.compute.internal"","pid",:,4291""}time
2057 "hostname":{":""2023-09-22T23:08:02.538409722Zmsg"":",ip-10-150-1-74.us-west-2.compute.internal"",[2] Transition from New to WaitActivehostname"":,"""v"pid":ip-10-150-1-74.us-west-2.compute.internal0",,"":pidname4291""::"4291crucible"},
2058 "}level
2059 {":30"msg":"{[0] Transition from WaitQuorum to Active","v":0,""name"msg"::",""timecrucible"":","[1] Transition from LiveRepairReady to LiveRepairlevel2023-09-22T23:08:02.53844585Z"":,30"","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",",,pid""time:"4291:"}"
2060 2023-09-22T23:08:02.538458949Z"level":{,"30hostname"":msg"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2061 [2] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active Active WaitActive ds_transition to WaitQuorum","v{":0,""namemsg""::""crucible","level":30[1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active New New ds_transition to WaitActive",",v"":time"0:,""name":,""2023-09-22T23:08:02.538476202Zcrucibletime"",:"""level":302023-09-22T23:08:02.53848912Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,,""4291timepid""::"4291}
2062 2023-09-22T23:08:02.53850299Z}"
2063 ,"hostname":"{"msg"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291[2] Transition from WaitActive to WaitQuorum}"
2064 ,"v":0{,"name":""msg":crucible"","level":30[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.538534993Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:08:02.538540456Z:"4291,"}
2065 hostname":"{"ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4291}
2066 {[2] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active Active WaitQuorum ds_transition to Active"","msgv""::"0,"name":"crucible","level":30[1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.538569113Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid:"":4291}
2067 2023-09-22T23:08:02.538576057Z","hostname":{""msg"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291[2] Transition from WaitQuorum to Active"}
2068 ,"v":0{,"name":""msgcrucible"":,""level":30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.538608779Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.538614376Z"pid":,"4291hostname":}"
2069 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
2070 "msg":"{"msg":"e25c51fe-6bce-4500-9128-4f204e1a9445 is now active with session: 04223c12-1924-4b5a-ac33-ec762a7ea271","v":0,"name":"[1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active WaitQuorum New ds_transition to Active"crucible",","v":level0":,"30name":"crucible","level":30,"time":","time2023-09-22T23:08:02.538645496Z"":","hostname":"2023-09-22T23:08:02.538648301Z","hostname":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
2071 "pid":4291{}
2072 "msg":"{"msg":"[1] Transition from WaitQuorum to Active","[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active Active Active ds_transition to Faultedv"":,0",v"":name0":,""namecrucible"":","crucible"level",:"30level":30,,""timetime""::""2023-09-22T23:08:02.538681242Z2023-09-22T23:08:02.538681975Z"",,""hostnamehostname""::""{ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
2073 
2074 {"{"msgmsg"""msg::"":""[1] Transition from Active to Faulted","Crucible stats registered with UUID: 4b1de70b-0c52-4541-acb2-5a791e8f874cv"":[2] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active Active New ds_transition to WaitActive0",",",vname""::0"",crucible""name",:""vcruciblelevel"",:"30"level"::300,"name":"crucible","level":30,"time":","time":2023-09-22T23:08:02.538727538Z"","hostname2023-09-22T23:08:02.538729734Z"":"test live_repair::repair_test::test_live_repair_deps_repair_wspan_left ... ,"ip-10-150-1-74.us-west-2.compute.internal"hostname,"":pid""okip-10-150-1-74.us-west-2.compute.internal:"4291,
2075 }"
2076 pid",:{4291"}"
2077 msg"time":{:"""msg":"2023-09-22T23:08:02.5387362Z","[2] Transition from New to WaitActive"hostname",:""v[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active Faulted Active ds_transition to LiveRepairReady"":,0",v""name:"0:,""ip-10-150-1-74.us-west-2.compute.internalcruciblename"",:"""cruciblelevel"",:,30""levelpid""::304291}
2078 ,,""timetime""::""2023-09-22T23:08:02.538783882Z2023-09-22T23:08:02.538787433Z""{,,""hostnamehostname""::"""msg"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal:"",,"""pidpid""::42914291}}
2079 
2080 {Crucible 4b1de70b-0c52-4541-acb2-5a791e8f874c has session id: d97d444e-ecec-4a18-add5-6352a11d2556","{msg"":""vmsg"":[1] Transition from Faulted to LiveRepairReady"":,"0v":,0","name":"[2] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active Active WaitActive ds_transition to WaitQuorumname"crucible,"":v"":"0crucible",,"",level"name:"30:""level":crucible"30,"level":30,"time":"2023-09-22T23:08:02.538835163Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.538839343Z"pid",:"4291hostname":}"
2081 ip-10-150-1-74.us-west-2.compute.internal",",pid{":""4291msg"}:
2082 "time":"{2023-09-22T23:08:02.538838308Z""msg",:""[1] e25c51fe-6bce-4500-9128-4f204e1a9445 (04223c12-1924-4b5a-ac33-ec762a7ea271) Active LiveRepairReady Active ds_transition to LiveRepair"hostname",:"[2] Transition from WaitActive to WaitQuorum""v",:"0v",:"0ip-10-150-1-74.us-west-2.compute.internalname,"":"name"crucible:"""crucible,"",,""levellevel""::3030pid":4291}
2083 ,,""timetime""::""2023-09-22T23:08:02.538881836Z2023-09-22T23:08:02.538882064Z""{,,""hostnamehostname""::"""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""msg"pidpid""::42914291:}}
2084 
2085 {"{"msg":""msg":"[1] Transition from LiveRepairReady to LiveRepair"[0] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) New New New ds_transition to WaitActive","v",:"[2] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active Active WaitQuorum ds_transition to Active0",v","":vname":00":,"",name""crucible:"",name"crucible""level,:""level:"":3030crucible","level":30,,""timetime""::""2023-09-22T23:08:02.538938621Z2023-09-22T23:08:02.538938813Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291,"}}
2086 
2087 {time":""msg":"2023-09-22T23:08:02.538941965Z","[2] Transition from WaitQuorum to Active"hostname,"":v"":0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",,""levelpid":"30:4291}
2088 {,"time"":"msg":"2023-09-22T23:08:02.538977679Z","[0] Transition from New to WaitActive"hostname",":v"":0,"ip-10-150-1-74.us-west-2.compute.internal"name,"":pid"":crucible"4291,"level}"
2089 :30{"msg":"af820581-d317-46fc-9b93-01ba46c1da49 is now active with session: 4bd2fdd5-afb2-479f-8091-75ec93e8aa7a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.539011136Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2090 ,"time":"2023-09-22T23:08:02.539024964Z"{,"hostname":""msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
2091 [0] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) WaitActive New New ds_transition to WaitQuorum","v":{0,"name":""msg":"crucible","level":30[1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.539062031Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.539070084Z"pid",:"4291hostname":"}
2092 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2093 {{"msg":""msg":"[0] Transition from WaitActive to WaitQuorum","v"[1] Transition from Active to Faulted":,"0v",":name"0:,""crucible"name":","crucible"level",:"30level":30,"time":","time":"2023-09-22T23:08:02.539117656Z","2023-09-22T23:08:02.539118659Z"hostname":,""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal":,"4291pid":4291}
2094 }
2095 {"{msg":""msg":"[0] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) WaitQuorum New New ds_transition to Active","v":0,[1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active Faulted Active ds_transition to LiveRepairReady"","name":"v"crucible":,0","level"name"::30"crucible","level":30,"time":"2023-09-22T23:08:02.539174922Z",","time":"hostname":"2023-09-22T23:08:02.539177853Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,""pid"::4291"}
2096 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
2097 "msg":{"{[0] Transition from WaitQuorum to Active","v"":msg"":0"msg,"":name"[1] Transition from Faulted to LiveRepairReady":"",crucible""v,"test live_repair::repair_test::test_live_repair_deps_repair_wspan_right ... :0ok,""Crucible stats registered with UUID: b4ddac83-8f02-4759-90d0-81ecbf434d0d"level"name
2098 ",:"30v"::"0crucible",","name"level"::"30crucible","level":30,"time":"2023-09-22T23:08:02.539247304Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","pid":time4291":"}
2099 2023-09-22T23:08:02.539255365Z","hostname":"{,"ip-10-150-1-74.us-west-2.compute.internal"time":,"""pid":msg42912023-09-22T23:08:02.539258682Z""}:
2100 ,""hostname":"{ip-10-150-1-74.us-west-2.compute.internal",""pid"[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active New New ds_transition to WaitActive:msg"":",4291"v":0},"
2101 [1] af820581-d317-46fc-9b93-01ba46c1da49 (4bd2fdd5-afb2-479f-8091-75ec93e8aa7a) Active LiveRepairReady Active ds_transition to LiveRepair"name":","cruciblev"":{0,,""name":"levelcrucible""",:msg"30:""level":30Crucible b4ddac83-8f02-4759-90d0-81ecbf434d0d has session id: b1858b94-2183-4402-9eab-f3f8e6e8484a","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.539322395Z"time":","hostname":"2023-09-22T23:08:02.539324592Z","hostnameip-10-150-1-74.us-west-2.compute.internal"",":pid"":4291},
2102 ip-10-150-1-74.us-west-2.compute.internal"","time"pid":{":42912023-09-22T23:08:02.539333557Z""}msg"
2103 ,":"hostname":"[1] Transition from New to WaitActive{"ip-10-150-1-74.us-west-2.compute.internal",",v"":"pid"msg":04291:","}name"
2104 [1] Transition from LiveRepairReady to LiveRepair":",crucible""v",:{"0level",":nameJob is DownstairsIO { ds_id: JobId(1002), guest_id: 1, work: ExtentFlushClose { dependencies: [JobId(1000), JobId(1001)], extent: 1, flush_number: 2, gen_number: 4, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2105 "msg":"[0] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) New New New ds_transition to WaitActive","v":0,"name":"crucible","level""::"crucible"30,"level":3030,"time":","time":2023-09-22T23:08:02.539540937Z"",2023-09-22T23:08:02.539542967Z""hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,",""pidpid"time""::4291":4291}2023-09-22T23:08:02.53954914Z"}
2106 ,
2107 {"hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid"::"4291[0] Transition from New to WaitActive"}
2108 ,"v":0,"name":"crucible","level"{:30"msg":"[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.53961479Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2109 ,"time":"{2023-09-22T23:08:02.539631253Z","hostname"":"msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2110 [0] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) WaitActive New New ds_transition to WaitQuorum","v":0,{"name":"crucible"","msg":"level":30[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{,"time":"2023-09-22T23:08:02.539669188Z",""hostnamemsg""::,"""time":"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:08:02.53967583Z":Crucible stats registered with UUID: ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c"4291,",hostname""}v"
2111 ::"0,"{nameip-10-150-1-74.us-west-2.compute.internal"","":pid"":msg4291crucible"":",}"
2112 [0] Transition from WaitActive to WaitQuorum"level":,"30v":0{,"name":"crucible",""level"msg"::"30[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,"time",:""time":"2023-09-22T23:08:02.539724472Z"2023-09-22T23:08:02.539731055Z",",hostname":""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid"4291:,4291}"
2113 time"}:
2114 {"2023-09-22T23:08:02.539742487Z","{"hostname":msg""":"msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291Crucible ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c has session id: 62d9130a-bdf5-43ee-992c-7a4ed3ffed51"}
2115 ,"v":0,"name":"{crucible","level":"30msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"[0] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) WaitQuorum New New ds_transition to Active"time":","v"2023-09-22T23:08:02.539796062Z:"0,,""hostname"name",:"":time"":crucible""ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.539804233Z"",,""test live_repair::repair_test::test_live_repair_deps_super_spanner ... pidhostname"level":ok"
2116 ::"304291}ip-10-150-1-74.us-west-2.compute.internal
2117 ","pid":4291{}
2118 "msg":",{"time":"[0] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) New New New ds_transition to WaitActive""2023-09-22T23:08:02.539848645Z"msg",,":v""":hostname":0","name":"ip-10-150-1-74.us-west-2.compute.internal[2] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active Active New ds_transition to WaitActive""crucible,"",v",:""0level,""pidname:"30"::"4291crucible","}level"
2119 ,Job is DownstairsIO { ds_id: JobId(1002), guest_id: 1, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001)], extent: 1 }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2120 {":30msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.540030065Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal"time",":pid"":42912023-09-22T23:08:02.53989306Z"}
2121 ,"hostname",:""time":"{ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.540050217Z","pid",":"hostname"msg"4291::""}
2122 [2] Transition from New to WaitActive"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid:{"0:,4291"name":"}"cruciblemsg""
2123 :,""level":30[0] Transition from New to WaitActive",{"v":0,"name":""msg"crucible":","level":30[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active New New ds_transition to WaitActive","v":0,"name":"crucible",,""timelevel""::"302023-09-22T23:08:02.540105266Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid"":time"4291:"}
2124 2023-09-22T23:08:02.540114405Z","hostname,"":{"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.54012654Z""msg,,""":hostname"":pid"":4291ip-10-150-1-74.us-west-2.compute.internal"},
2125 [2] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active Active WaitActive ds_transition to WaitQuorum""pid,"":{v":42910,""name"msg}:"":
2126 crucible"","level":30{[0] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) WaitActive New New ds_transition to WaitQuorum"","msg"v"::"0,"name":"[1] Transition from New to WaitActive"crucible",,""v"level"::030,"name":",crucible"","time"level"::"302023-09-22T23:08:02.540182806Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2127 ,"time":"2023-09-22T23:08:02.540198043Z"{,","hostname":"time":""ip-10-150-1-74.us-west-2.compute.internal"msg2023-09-22T23:08:02.540204777Z,""":pid,"":"4291hostname":"}[2] Transition from WaitActive to WaitQuorum
2128 {ip-10-150-1-74.us-west-2.compute.internal"",",pid{""v:"":4291"0msg},
2129 msg"""::""{name":"crucible"[0] Transition from WaitActive to WaitQuorum"Crucible stats registered with UUID: d9cc3da9-eb70-4040-97da-092ce8ba4fe2,"""msglevel""::"30,,""v"v"::0[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active WaitActive New ds_transition to WaitQuorum",0",,name"""name:"v":"crucible":,",0""level",:"time"name"::crucible"""302023-09-22T23:08:02.540268619Z",crucible"",,level""":levelhostname"":":3030ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2130 ,"time":"2023-09-22T23:08:02.540297779Z"{,"hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg,"":",pid"":time4291",:""[2] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active Active WaitQuorum ds_transition to Active"time"2023-09-22T23:08:02.540307616Z"}:,"
2131 ,2023-09-22T23:08:02.540307424Z"""v"hostname{":,:0""msg,hostname""":"name"::""ip-10-150-1-74.us-west-2.compute.internal""ip-10-150-1-74.us-west-2.compute.internalcrucible,"""pid",:,"[0] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) WaitQuorum New New ds_transition to Active""pid"level,4291:"4291}"
2132 :v"30:}0
2133 ,{"name":"crucible","{level"":msg"30:""msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"Crucible d9cc3da9-eb70-4040-97da-092ce8ba4fe2 has session id: de9105d0-8769-44d7-ac42-838041e175c8",name,"""v"time:"::""0crucible",,""time"level":",2023-09-22T23:08:02.540381102Z":"2023-09-22T23:08:02.540393228Z"30,,name"""Job is DownstairsIO { ds_id: JobId(1003), guest_id: 1, work: ExtentLiveNoOp { dependencies: [JobId(1001), JobId(1002)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2134 ,":time":"2023-09-22T23:08:02.540417324Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":crucible"4291,"}level":
2135 30{"msg":"hostname[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.5405682Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2136 {"msg":","[0] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) New New New ds_transition to WaitActive"time":,""v":02023-09-22T23:08:02.540585243Z",":"","name":hostname""ip-10-150-1-74.us-west-2.compute.internal:"crucible"",","level"pidip-10-150-1-74.us-west-2.compute.internal"",:":pid"30:42914291}}
2137 
2138 ,"{time":"2023-09-22T23:08:02.540623789Z","hostname"":"msg":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] Transition from WaitQuorum to Active"}
2139 ,""{msg":""msg":"[0] Transition from WaitQuorum to Active"[0] Transition from New to WaitActive"v,,"":v"0:","v"0name:,"0name",:""":"namecrucible"",":level":crucible""30,crucible""level":,"30level":30,"time":"2023-09-22T23:08:02.540675359Z",,""time"hostname"::""2023-09-22T23:08:02.540676926Z"ip-10-150-1-74.us-west-2.compute.internal,""time":",,""2023-09-22T23:08:02.540692776Z"pidhostname""hostname,":hostname":""":ip-10-150-1-74.us-west-2.compute.internal4291"ip-10-150-1-74.us-west-2.compute.internal"},
2140 :""pid":,4291"{pidip-10-150-1-74.us-west-2.compute.internal}"
2141 "":msg4291,"{":pid}"""
2142 msg[2] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active Active New ds_transition to WaitActive:"":"4291,"{v":}0
2143 [0] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) WaitActive New New ds_transition to WaitQuorum,""","msg"v:"name":"{:0","name":"crucible""crucible",msg,"[1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active New New ds_transition to WaitActive"levellevel""::"30:""30,"v":[2] Transition from WaitQuorum to Active"0,","name"v"::"0,"crucible"time":",,"2023-09-22T23:08:02.540781246Zlevel""",":hostname30name""::""crucible"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291level}"
2144 :30{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.540798201Z",,""hostname":time"",":time":""ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.540813576Z",,2023-09-22T23:08:02.54078014Z"""pid",hostname,""":"time"hostname":::""ip-10-150-1-74.us-west-2.compute.internal"4291,ip-10-150-1-74.us-west-2.compute.internal"",pid":}"2023-09-22T23:08:02.540808819Zpid4291
2145 }""
2146 ,:{"4291{hostname":"}"
2147 ip-10-150-1-74.us-west-2.compute.internal"","msg":"pidmsg""::[0] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) WaitQuorum New New ds_transition to Active"","{v":4291[1] Transition from New to WaitActive"0",}"msg
2148 ,"v""::name"0{,"":name"[2] Transition from New to WaitActivecrucible""":msg":"",""cruciblev","":4b1de70b-0c52-4541-acb2-5a791e8f874c is now active with session: b63c9270-2f34-4a0f-b0a8-337f9b4151560level"",:,"30,name"""level"v":::,""30time0crucible"":",,""2023-09-22T23:08:02.540909272Z"level"name,:""30hostname":":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""pid":level"4291:}
2149 ,30{""timemsg":"":"[0] Transition from WaitQuorum to Active","v":2023-09-22T23:08:02.540918208Z"0,"name,":""crucible",,"hostname"level":":30"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.540927779Z","pid,"":,,hostname"""4291:time"}time"":":
2150 2023-09-22T23:08:02.540950804Z""ip-10-150-1-74.us-west-2.compute.internal",",hostname2023-09-22T23:08:02.540937283Z{"":"",pidip-10-150-1-74.us-west-2.compute.internal""",msg""hostnamepid":":4291":4291}"}:
2151 [1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active WaitActive New ds_transition to WaitQuorum""ip-10-150-1-74.us-west-2.compute.internal","pid"{:4291}
2152 "msg":"{"[2] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active Active WaitActive ds_transition to WaitQuorummsg"":","v":0,"name":"crucible",[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active Active Active ds_transition to Faulted"",level""v"::300,"name":"crucible",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.541035309Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time",:""pid":4291,"}time
2153 2023-09-22T23:08:02.541041939Z"
2154 "{:,""{"hostname2023-09-22T23:08:02.541047554Zmsg""":msg"":",""[2] Transition from WaitActive to WaitQuorum":hostname[1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active New New ds_transition to WaitActive"",",v"ip-10-150-1-74.us-west-2.compute.internal":v":"",:"0pid,":"0name4291,""name:"}""crucible"
2155 ,:ip-10-150-1-74.us-west-2.compute.internal"""level":crucible",30{,""level"pid"::"30msg":4291"}
2156 ,"[1] Transition from Active to Faulted"time":","v"2023-09-22T23:08:02.541118677Z{":,"0hostname":",""name"msg":,:"""crucible"time"[1] Transition from WaitActive to WaitQuorum,":ip-10-150-1-74.us-west-2.compute.internal""level",":",2023-09-22T23:08:02.541123578Z""pid",:30v":"42910hostname",:}"
2157 name""{:""crucible"msg",ip-10-150-1-74.us-west-2.compute.internal"":",level"":[1] Transition from New to WaitActive"pid",,"v":30"0:time4291",":name":"}crucible"
2158 ","level":302023-09-22T23:08:02.541156128Z","{hostname":","ip-10-150-1-74.us-west-2.compute.internal"",time",""msgtime":"":":pid"2023-09-22T23:08:02.541178299Z""2023-09-22T23:08:02.541190565Z",:,4291""hostname"}:"
2159 [2] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active Active WaitQuorum ds_transition to Active"ip-10-150-1-74.us-west-2.compute.internal"hostname,,""pid":":{v""4291:}
2160 ip-10-150-1-74.us-west-2.compute.internal"{"msg,""0"pid"msg":":,"":name4291":[1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active WaitActive New ds_transition to WaitQuorum}"
2161 "[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active Faulted Active ds_transition to LiveRepairReady","crucible,{""v":,v""0msg":,"":level"0name",":name"30:":""crucible"crucible",","level":level"30:30[1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible",,""time":"level":2023-09-22T23:08:02.541271229Z30","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time":,""pid":4291}
2162 ,2023-09-22T23:08:02.541265558Z""{time",:""msg":"",2023-09-22T23:08:02.541272628Z"[1] Transition from WaitActive to WaitQuorum"hostname":,time""":"hostname"ip-10-150-1-74.us-west-2.compute.internal,"",:"""v":pid"0:,"4291ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.541282086Z,name}":""crucible"
2163 "pid",":,level":4291{30"}hostname"
2164 :",""msg"time":{:""ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.541333721Z"","msg,"hostnamepid[2] Transition from WaitQuorum to Active":"""":,ip-10-150-1-74.us-west-2.compute.internal":,""4291pid":"}v
2165 4291[1] Transition from Faulted to LiveRepairReady""},
2166 {"{v"::0",0",msg":"""namename""msg:[1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active WaitQuorum New ds_transition to Active""crucible"":,"":crucible"",v":[1] Transition from WaitQuorum to Active"","level,""0:,level30v":"":name":"0crucible",30,""level":name"30:"crucible","level":30,,""timetime":"":"2023-09-22T23:08:02.541411043Z","hostname"2023-09-22T23:08:02.541401648Z":",,""ip-10-150-1-74.us-west-2.compute.internal"hostname",":time"pid":"4291ip-10-150-1-74.us-west-2.compute.internal"}
2167 ,,{""pid":"msg"":":time2023-09-22T23:08:02.541408641Z4291[1] Transition from WaitQuorum to Active"""},:"
2168 ,v"":"02023-09-22T23:08:02.541414725Z",hostname","{:name":""""crucible"msgip-10-150-1-74.us-west-2.compute.internal",,"":"hostnamelevel"pid":"30"::"4291}ip-10-150-1-74.us-west-2.compute.internal"
2169 ,"[1] 4b1de70b-0c52-4541-acb2-5a791e8f874c (b63c9270-2f34-4a0f-b0a8-337f9b415156) Active LiveRepairReady Active ds_transition to LiveRepair"time":,,"""2023-09-22T23:08:02.541477907Z"{,"pidhostnamev"":"":":msg"ip-10-150-1-74.us-west-2.compute.internal"4291,"0:pid",":"}4291name}b4ddac83-8f02-4759-90d0-81ecbf434d0d is now active with session: 33b3cc45-a768-4d92-ae35-28fdcf2e3cdf"
2170 "
2171 :,"{"crucible"{v"","msg""::levelmsg"0":,"""name":[2] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active Active New ds_transition to WaitActive30":,""[2] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active Active New ds_transition to WaitActive"v":,0",v"crucible"":,name":"0"crucible",,"level""name"level":30,:":"time"crucible":30,"",level""2023-09-22T23:08:02.541541346Z"time":",:"2023-09-22T23:08:02.541559004Z"30,"hostname"hostname":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}
2172 ,"pid{":"4291msg":",}"
2173 [2] Transition from New to WaitActive"time",":v"",{"2023-09-22T23:08:02.541564398Z"time"::0"",msg",2023-09-22T23:08:02.541572171Z"""namehostname:",":"""hostname":":crucible""ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from LiveRepairReady to LiveRepair,ip-10-150-1-74.us-west-2.compute.internal,"""level,""":pid"v"30:,:42910",pid"":name"4291,":}time"}
2174 ":"crucible"
2175 {2023-09-22T23:08:02.541622041Z",","level"hostname":"{:"ip-10-150-1-74.us-west-2.compute.internal"msg"30:",""pid":msg"4291[2] Transition from New to WaitActive"}
2176 :","{v":"msg":"0,"name":"[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active Active Active ds_transition to Faulted"crucible"[2] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active Active WaitActive ds_transition to WaitQuorum",,,""v",""time"v":level0:":,"0name":"":,"2023-09-22T23:08:02.541649886Z"30crucible",name"","hostname"::"level":"30ip-10-150-1-74.us-west-2.compute.internal"crucible",",pid"":level"4291:30},"
2177 ,time":""time"2023-09-22T23:08:02.541689102Z":,""hostname":"2023-09-22T23:08:02.541682611Z"ip-10-150-1-74.us-west-2.compute.internal","pid",:"4291hostname"}:,
2178 ""{time":""msg":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.541695034Z",[2] Transition from WaitActive to WaitQuorum"",,pid"""hostname":v4291"::0","}name":ip-10-150-1-74.us-west-2.compute.internal"
2179 ,""crucible"pid",":level":429130{}
2180 ","msg"time":":{"2023-09-22T23:08:02.54174225Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal","msg"pid"::[2] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active Active WaitActive ds_transition to WaitQuorum"4291"}
2181 ,[1] Transition from Active to Faulted""{v,""msg"::""0v",:"0name",[2] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active Active WaitQuorum ds_transition to Active":""name":",crucible"crucible"",v":"0level,,"":level"30name":"crucible","level":30,"time":,""time":"2023-09-22T23:08:02.541789704Z","2023-09-22T23:08:02.54178692Z"hostname":","hostname":"":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid":}4291
2182 }
2183 {"msg"{:","[2] Transition from WaitQuorum to Activetime"":"msg",":""2023-09-22T23:08:02.541812753Z"v":,0",[2] Transition from WaitActive to WaitQuorum""hostnamename,"":":v""":crucible"0,,ip-10-150-1-74.us-west-2.compute.internal"",""levelpid""::name"429130:"}crucible"
2184 ,"level":30{"msg":","time":"2023-09-22T23:08:02.541859286Z"[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active Faulted Active ds_transition to LiveRepairReady",",hostname":""v":,"0time,"ip-10-150-1-74.us-west-2.compute.internal":""name",:2023-09-22T23:08:02.541867059Z""",crucible""pid,"hostname""::"4291level":30}ip-10-150-1-74.us-west-2.compute.internal"
2185 ,"pid":4291}
2186 {"msg{":",""time"msg"::"d9cc3da9-eb70-4040-97da-092ce8ba4fe2 is now active with session: 86fd9d03-bd19-4d98-b8bb-45992b454d2b"","v":2023-09-22T23:08:02.541899519Z"0,",name"":[2] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active Active WaitQuorum ds_transition to Active""hostname",:""crucible"v":ip-10-150-1-74.us-west-2.compute.internal,"0",,level"""name":pid":30:"4291crucible","}level
2187 ,{"time":"2023-09-22T23:08:02.541942077Z""msg,"":hostname"":"test live_repair::repair_test::test_live_repair_deps_writes ... [1] Transition from Faulted to LiveRepairReady"okip-10-150-1-74.us-west-2.compute.internal",
2188 ",v"":pid0,"name":"crucible","level":30":,"4291time":"}
2189 2023-09-22T23:08:02.542001121Z","hostname":{""ip-10-150-1-74.us-west-2.compute.internal"msg":","pid":4291}
2190 [1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active Active Active ds_transition to Faulted",{"v":0,"name":"crucible"","msg":level"":30[1] b4ddac83-8f02-4759-90d0-81ecbf434d0d (33b3cc45-a768-4d92-ae35-28fdcf2e3cdf) Active LiveRepairReady Active ds_transition to LiveRepair","v":,0","timename""::""crucible""2023-09-22T23:08:02.54204875Z",:,""level"hostname30:"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2191 {"msg":"[1] Transition from Active to Faulted","v":0,"name":,,"""time":crucible""time,""2023-09-22T23:08:02.54207645Z"level":,"":hostname"2023-09-22T23:08:02.542075759Z"30:","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid":}4291
2192 }
2193 {,"time":""2023-09-22T23:08:02.542107023Z"msg":{","hostname":"[1] Transition from LiveRepairReady to LiveRepair""ip-10-150-1-74.us-west-2.compute.internal"msg",:","pid"":v4291[2] Transition from WaitQuorum to Active"":,}0",
2194 "v"name"::{"0crucible",",name""msg""level"::":30"crucible","level":30[1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:08:02.542158972Z"","time"hostname"::""2023-09-22T23:08:02.542162035Z","ip-10-150-1-74.us-west-2.compute.internal"time",,:"""pid":hostname"4291:2023-09-22T23:08:02.542169525Z"}",
2195 ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid"::"4291}
2196 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
2197 "msg":"{"msg":ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c is now active with session: eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a"","v":0[1] Transition from Faulted to LiveRepairReady",","name"v":":crucible0",","name":level"":crucible"30,"level":30,"time":",2023-09-22T23:08:02.542235198Z"",time":""hostname":"2023-09-22T23:08:02.542236891Z",ip-10-150-1-74.us-west-2.compute.internal"","hostnamepid""::4291"}
2198 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
2199 "msg":"{"msg":"[1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active Active Active ds_transition to Faulted","v":0,"name":"crucible",[1] d9cc3da9-eb70-4040-97da-092ce8ba4fe2 (86fd9d03-bd19-4d98-b8bb-45992b454d2b) Active LiveRepairReady Active ds_transition to LiveRepair"",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542291725Z",,""hostnametime":"":"2023-09-22T23:08:02.54229634Z",ip-10-150-1-74.us-west-2.compute.internal""hostname",:""pid":4291}ip-10-150-1-74.us-west-2.compute.internal
2200 ","pid":4291}{
2201 "msg":"{[1] Transition from Active to Faulted",""v":msg"0:","name":"crucible"[1] Transition from LiveRepairReady to LiveRepair",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542344284Z,""time,":""hostname":"2023-09-22T23:08:02.542348595Z","ip-10-150-1-74.us-west-2.compute.internalhostname"":,""pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
2202 ,"pid":4291}
2203 {"{msg":""msg":"No repair needed for extent 1","v":[1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active Faulted Active ds_transition to LiveRepairReady"0,,""name"v":":crucible"0,","level"name:":30"crucible","level":30,"time":"2023-09-22T23:08:02.542404334Z",","hostname":"time":"{ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.542407511Z",","pid":hostname4291"":",msg"":"ip-10-150-1-74.us-west-2.compute.internal"":,""Crucible stats registered with UUID: bf05128a-1b6d-44ee-a686-2a0f8c44fb12downstairspid""":}4291
2204 ,}"test live_repair::repair_test::test_live_repair_enqueue_close ...
2205 v":{ok0
2206 ","msg"name"::""crucible","[1] Transition from Faulted to LiveRepairReady"level":,"v"30:0,"name":"crucible","level":30,"time":",2023-09-22T23:08:02.542484651Z"",time":""hostname":"2023-09-22T23:08:02.542480088Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
2207 "pid":4291{}
2208 "msg":"{[1] ba5b2c47-00db-4ad1-a76f-e40e8bf42e3c (eb4c23a9-6f3c-467c-9a21-2dac1fc4c32a) Active LiveRepairReady Active ds_transition to LiveRepair",""msg"v"::"0,"name":"crucible","level"Crucible bf05128a-1b6d-44ee-a686-2a0f8c44fb12 has session id: a7c67caa-b740-4a8a-abf1-20c5651d758c":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542547226Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":}"
2209 2023-09-22T23:08:02.542553586Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid"":msg"4291:"}
2210 [1] Transition from LiveRepairReady to LiveRepair","v":0,"{name":"crucible","level":"30msg":"[0] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542597408Z","hostname"Job is DownstairsIO { ds_id: JobId(1003), guest_id: 1, work: ExtentLiveRepair { dependencies: [JobId(1001), JobId(1002)], extent: 1, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 1, block: 0 }, ImpactedAddr { extent_id: 1, block: 2 }) }
2211 test live_repair::repair_test::test_live_repair_enqueue_repair_noop ... ok
2212 :",ip-10-150-1-74.us-west-2.compute.internal"",time""pid"::"42912023-09-22T23:08:02.542609635Z"}
2213 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
22142023-09-22T23:08:02.542ZINFOcrucible: [0] Transition from New to WaitActive
22152023-09-22T23:08:02.542ZINFOcrucible: [0] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) WaitActive New New ds_transition to WaitQuorum
22162023-09-22T23:08:02.542ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
22172023-09-22T23:08:02.542ZINFOcrucible: [0] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) WaitQuorum New New ds_transition to Active
2218 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542896894Z","hostname"{:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}
2219 msg":"test live_repair::repair_test::test_live_repair_enqueue_reopen ... {ok"
2220 msg"Crucible stats registered with UUID: 8d97f7f2-258d-40c3-ad7f-db2027cd44f9":","v":0,"name":"crucible"[1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active New New ds_transition to WaitActive",","level"v"::0,30"name":"crucible","level":30,"time":"2023-09-22T23:08:02.542949584Z",","time"hostname"::""2023-09-22T23:08:02.542946428Zip-10-150-1-74.us-west-2.compute.internal"","pid",:"4291hostname":"}
2221 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
2222 "msg":"[1] Transition from New to WaitActive","{v":0,"name":"crucible"","msg"level"::"30Crucible 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 has session id: 6a69c769-4563-43e7-8b68-5571fb482e62","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543000771Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2223 ,"time":"2023-09-22T23:08:02.543010952Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2224 [1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active WaitActive New ds_transition to WaitQuorum","v":0,"name{":"crucible","level":"30msg":"[0] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543055415Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2225 ,"time":"2023-09-22T23:08:02.543067649Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from WaitActive to WaitQuorum"4291,"v":}0
2226 ,"name":"crucible","level{":30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543103816Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2227 ,"time":"2023-09-22T23:08:02.54311389Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2228 [1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active WaitQuorum New ds_transition to Active","v":0,{"name":"crucible"","msg"level"::"30[0] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543153982Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2229 ,"{time":""msg{2023-09-22T23:08:02.543165393Z"msg":"","hostname":"Crucible stats registered with UUID: 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b"ip-10-150-1-74.us-west-2.compute.internal,""v",":pid":04291,"name"}:"crucible","level":
2230 30{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543208652Z",""hostname"::,""[1] Transition from WaitQuorum to Active","vip-10-150-1-74.us-west-2.compute.internal":"0,,""pid"name:":4291"crucible",}"
2231 "time":"{2023-09-22T23:08:02.543222229Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal"level,"":pid"30:Crucible 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b has session id: 12b715e8-407f-4d25-b63b-f62a28f963b8"4291,"v"}:
2232 0,"name":"crucible",{"level":30"msg":","time":"[0] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) WaitQuorum New New ds_transition to Active"2023-09-22T23:08:02.543273873Z,""v":,"0hostname":,""name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""level,"":time":"2023-09-22T23:08:02.543286687Z","30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2233 pid{":4291}"
2234 msg{":""msg":"[0] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) New New New ds_transition to WaitActive","v":{0,"[2] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active Active New ds_transition to WaitActivename"":","crucible""v,"level":30":0,"name":"crucible",",level"":time"30:"2023-09-22T23:08:02.543309484Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid"":time"4291:"}
2235 2023-09-22T23:08:02.543350477Z",","time":hostname"":{"2023-09-22T23:08:02.543359635Z",""ip-10-150-1-74.us-west-2.compute.internalhostname"":,"pid":4291}
2236 msg":"Crucible stats registered with UUID: 85751bdd-8856-4e5b-8ef6-adad326659c5"{,"v":msg0",":name""msg":"[0] Transition from New to WaitActive",:""v":crucible"0,,"name":"crucible","level":"30level":30"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543415482Z",","time"hostname"::""2023-09-22T23:08:02.543416734Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname",:""pid":ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2237 4291"}
2238 time{"":"msg":"2023-09-22T23:08:02.543427481Z","hostname":"[0] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) WaitActive New New ds_transition to WaitQuorumip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal"",,""pidv":0,"name":"crucible"":,"4291level":30}{
2239 "msg":"{Crucible 85751bdd-8856-4e5b-8ef6-adad326659c5 has session id: 97d2e79d-a968-4554-ac16-de9c428b4ad8"",msg"":v"":,"0time",:[1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active New New ds_transition to WaitActive","v":0,""name":"crucible"2023-09-22T23:08:02.543487877Z",",level"",:"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2240 ,"time":"{2023-09-22T23:08:02.543522774Z","hostname""msg":":"[0] Transition from WaitActive to WaitQuorum","v":ip-10-150-1-74.us-west-2.compute.internal"0,",pid":4291}"
2241 name"{:"crucible",""level":30msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible"pid,""level":":name30"4291:"crucible"},
2242 ,""level"time:":"30{2023-09-22T23:08:02.543567375Z",","hostname":time":"""msg"2023-09-22T23:08:02.543580425Z:ip-10-150-1-74.us-west-2.compute.internal""",,""hostnamepid":4291}
2243 {[2] Transition from New to WaitActive","v":,0","nametime""::""crucible","2023-09-22T23:08:02.543594079Z"level":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2244 {""msgmsg":"":","time":"2023-09-22T23:08:02.543638046Z"[0] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) New New New ds_transition to WaitActive"[0] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) WaitQuorum New New ds_transition to Active",,,""hostname":""ip-10-150-1-74.us-west-2.compute.internal:""v",:"0ip-10-150-1-74.us-west-2.compute.internalpid",":"4291,name""pid:"":crucible"4291,"level":}30
2245 }
2246 {,"time":"{2023-09-22T23:08:02.543688704Z""msg":",""hostname":"msg"":"ip-10-150-1-74.us-west-2.compute.internalv""[1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active WaitActive New ds_transition to WaitQuorum:,"0,[2] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active Active WaitActive ds_transition to WaitQuorum""name",:""v"crucible":,0","level"name"::"30crucible","level":30,"time":"pid":2023-09-22T23:08:02.543731132Z"4291,","time":}","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",","pid"level"::304291}
2247 {"msg":","[0] Transition from New to WaitActive
2248 {""msg":"[0] Transition from WaitQuorum to Active","v":2023-09-22T23:08:02.543733978Z"0,"name":"crucible","timehostname""::"","level":302023-09-22T23:08:02.543769778Zip-10-150-1-74.us-west-2.compute.internal"",",pid"":hostname":"4291}ip-10-150-1-74.us-west-2.compute.internal"
2249 ,"pid":4291{}
2250 "msg":"","{time":"[2] Transition from WaitActive to WaitQuorum,""2023-09-22T23:08:02.543893133Z"msg,",":"hostname":"[1] Transition from WaitActive to WaitQuorum"ip-10-150-1-74.us-west-2.compute.internal,""v",:"0pid",":name"4291:"crucible"},
2251 {""msg"level"::""30v":0,"name":"crucible","level":[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active New New ds_transition to WaitActive"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.543946224Z",","hostname"time:"":"2023-09-22T23:08:02.543951616Z"ip-10-150-1-74.us-west-2.compute.internal",","hostnamepid""::"4291}
2252 ip-10-150-1-74.us-west-2.compute.internal",,"{""msg":"v":0,"name":"crucible"[1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active WaitQuorum New ds_transition to Active",","levelv":":0,"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.543996016Z","hostname,""time:time"":"":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
2253 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30"pid":4291,"time":"}
2254 2023-09-22T23:08:02.544036364Z"2023-09-22T23:08:02.543957737Z","hostname":","{hostname":ip-10-150-1-74.us-west-2.compute.internal"",""msgpid""::"4291ip-10-150-1-74.us-west-2.compute.internal"},
2255 "pid":2023-09-22T23:08:02.543993477Z[2] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active Active WaitQuorum ds_transition to Active{"4291",""}vmsg""::"0,,""
2256 {[2] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active Active New ds_transition to WaitActive","v":0,"name":""crucible"msg",":"level":30[1] Transition from New to WaitActive","v"name:"0:","name":cruciblehostname""",crucible:"ip-10-150-1-74.us-west-2.compute.internal",,""timepid":":4291"2023-09-22T23:08:02.544127547Z}"
2257 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":{4291"},
2258 ""msg":level"":{30"msg":"[0] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) WaitActive New New ds_transition to WaitQuorum"[2] Transition from New to WaitActive",,""v"v"::00,,""name"name"::""crucible"crucible",","level"level"::30,30"time":"2023-09-22T23:08:02.544174817Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"":4291level,"":time"}30
2259 ,"time":"2023-09-22T23:08:02.544195456Z","hostname":{":,"""ip-10-150-1-74.us-west-2.compute.internal"time,msg""pid":4291}
2260 {""msg":":"2023-09-22T23:08:02.544207251Z"[0] Transition from WaitActive to WaitQuorum",","hostname"v"::"0,"name":":ip-10-150-1-74.us-west-2.compute.internal"crucible","level":[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active WaitActive New ds_transition to WaitQuorum"30,"v":02023-09-22T23:08:02.544193935Z,"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2261 ,"{time":""2023-09-22T23:08:02.544264475Z"msg":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[2] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active Active WaitActive ds_transition to WaitQuorum"},
2262 "namev""::0","crucible"name,"level":30","pid":4291}
2263 {,"time":""msg":"2023-09-22T23:08:02.544305403Z","[2] Transition from WaitQuorum to Active"hostname":","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name":"",:""crucible","level":30pid":4291}
2264 crucible","{level":30","msg"time"::""2023-09-22T23:08:02.544337534Z"[1] Transition from WaitActive to WaitQuorum",","{v,"time":"2023-09-22T23:08:02.54435019Z""msg",":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[0] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) WaitQuorum New New ds_transition to Active"}
2265 ,"v":0,{"name":""crucible"msg":,""level":30bf05128a-1b6d-44ee-a686-2a0f8c44fb12 is now active with session: 37037115-02dd-475d-a8b7-528101615ffc","v":0,"name":"crucible"",":level"0:30,"name":"crucible","level":30hostname":","time":ip-10-150-1-74.us-west-2.compute.internal,"""time,"":"pid":2023-09-22T23:08:02.54438621Z"42912023-09-22T23:08:02.544394773Z","},"
2266 hostname"hostname":":"{,"ip-10-150-1-74.us-west-2.compute.internal"time"":msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2267 ,"time":"2023-09-22T23:08:02.544432075Z",""hostname"{:2023-09-22T23:08:02.544399642Z"",""ip-10-150-1-74.us-west-2.compute.internal"msg,""pid"::"hostname[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,""time":":"2023-09-22T23:08:02.544468952Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid":,4291"pid":}
2268 4291{}
2269 {4291"msg":"[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active WaitQuorum New ds_transition to Active",,""v":pid":04291,"name":"}
2270 crucible","level":30{"msg":"}
2271 [1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active Active Active ds_transition to Faulted"{,"v":0","msg"name:"":"crucible","level":30[2] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active Active WaitQuorum ds_transition to Active",,""v"":time"0msg,:""name":"crucible","level":30,"time":"2023-09-22T23:08:02.544606792Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2272 ,"time":"{2023-09-22T23:08:02.544620596Z"","msg":hostname"":"[1] Transition from Active to Faulted"ip-10-150-1-74.us-west-2.compute.internal,"",v"":pid"0:,"4291name":"}
2273 crucible","level"{:30"msg":"2023-09-22T23:08:02.544591187Z"[2] Transition from WaitQuorum to Active",",v":"0,hostname"":name","::"""time""crucibleip-10-150-1-74.us-west-2.compute.internal","pid":[1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active New New ds_transition to WaitActive"4291},
2274 ","level":30,"{time":"2023-09-22T23:08:02.544695071Z",""hostname":msg"":"ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from WaitQuorum to Active","pid"::,"4291"v":}2023-09-22T23:08:02.54464569Z0","name":""crucible","v"level"::030,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.54472645Z","hostname":",,""ip-10-150-1-74.us-west-2.compute.internal"time":"2023-09-22T23:08:02.544731603Z","hostname":","pid":ip-10-150-1-74.us-west-2.compute.internal4291
2275 "},"pid":4291{}
2276 "msg":"8d97f7f2-258d-40c3-ad7f-db2027cd44f9 is now active with session: 32d96d38-3745-4ae0-96ff-5afe11fcae22"{,"v":0,"name":"crucible",""msg"level"::30"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.544789323Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2277 ,"time":"{2023-09-22T23:08:02.544797338Z"","msg":hostname"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active Active Active ds_transition to Faulted}"
2278 ,"v":0,"name":"{crucible","level":30"msg":"[1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible",,""
2279 {level":30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2280 {"msg":","time":"2023-09-22T23:08:02.544871349Z"","msg"hostname:[1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active Faulted Active ds_transition to LiveRepairReady"","v":0,"name":"crucible","level":30time":"2023-09-22T23:08:02.544836794Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2281 ,"time":"{2023-09-22T23:08:02.544901131Z","hostname"":"msg":"ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from Active to Faulted,""pid",:"4291v":0}
2282 ,[2] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active Active New ds_transition to WaitActive{"msg":"[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.544938452Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"","pid":,4291}
2283 "v":{0,""namemsg""::""crucible"","level"::"30[1] bf05128a-1b6d-44ee-a686-2a0f8c44fb12 (37037115-02dd-475d-a8b7-528101615ffc) Active LiveRepairReady Active ds_transition to LiveRepair","v":ip-10-150-1-74.us-west-2.compute.internal"0,","name"pid:"":crucible"4291,"level":30}
2284 ,{,""time":time""msg":"name":"crucible"[1] Transition from WaitActive to WaitQuorum","level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.545002093Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2285 ",":"time":{"2023-09-22T23:08:02.544973779Z""2023-09-22T23:08:02.545007872Zmsg",","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2286 "2023-09-22T23:08:02.544982589Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2287 [1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active WaitQuorum New ds_transition to Active","v":{0,""name"hostname"::"ip-10-150-1-74.us-west-2.compute.internal","pid":"4291"crucible":}",
2288 [1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30"level":30,"time":"2023-09-22T23:08:02.545086327Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}{
2289 ","msg"time{"::"""msg":[2] Transition from New to WaitActive"2023-09-22T23:08:02.545091532Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2290 {"msg":"[1] Transition from WaitQuorum to Active","v":0msg,"":"name":"crucible","[1] Transition from LiveRepairReady to LiveRepair",level",:"30v"":v"","time"[1] Transition from Faulted to LiveRepairReady":","v"2023-09-22T23:08:02.545152171Z":0,,""hostnamename""::""crucible","ip-10-150-1-74.us-west-2.compute.internal"level",:"30pid"::04291,"}
2291 name":"crucible","level":{30,"time":""msg":"2023-09-22T23:08:02.545182518Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","[2] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active Active New ds_transition to WaitActive"pid":,4291"v":}0
2292 ,,""time"{:"2023-09-22T23:08:02.545194208Z""msg",":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"[1] 8d97f7f2-258d-40c3-ad7f-db2027cd44f9 (32d96d38-3745-4ae0-96ff-5afe11fcae22) Active LiveRepairReady Active ds_transition to LiveRepair":,"4291}
2293 name":"crucible","level":300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.545256216Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid"":msg"4291,":"time"}:
2294 "[2] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active Active WaitActive ds_transition to WaitQuorum",2023-09-22T23:08:02.545268365Z""v{",":hostname"":0msg,"name":"crucible","level":30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.54530405Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,":""pid""::42912023-09-22T23:08:02.545315816Z}
2295 ""[2] Transition from New to WaitActive"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"v":}
2296 0,"name":"crucible"{,"level":"30msg":"Repair for extent 1 s:0 d:[ClientId(2)]","v":0,"name":"{crucible","level":30"msg":""[2] Transition from WaitActive to WaitQuorum,"",hostname"":v"":0,"name,"","timetime""::""2023-09-22T23:08:02.545375326Z"2023-09-22T23:08:02.545367602Z","hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid4291":,4291"":"}downstairs
2297 "}
2298 {:"crucible",""level"msg":ip-10-150-1-74.us-west-2.compute.internal:"30,"time":"2023-09-22T23:08:02.54542421Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2299 ,"pid":4291{}
2300 "{msg":""msg":"[1] Transition from LiveRepairReady to LiveRepair","[2] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active Active WaitQuorum ds_transition to Activev":"0,,""v"name"::""0crucible",","name"level:[2] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30"crucible","level":,30"time":"2023-09-22T23:08:02.545499486Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}:
2301 30,"time":"2023-09-22T23:08:02.545521708Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid"msg:"4291,"time":"2023-09-22T23:08:02.545539191Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2302 }
2303 :"{[2] Transition from WaitActive to WaitQuorum"",msg"":v"":0,"[2] Transition from WaitQuorum to Active"name":","crucible"v":,"0level":,"30name"test live_repair::repair_test::test_live_repair_enqueue_repair_repair ... :"crucibleok"
2304 ,"level":30,"time":"2023-09-22T23:08:02.545606296Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2305 ,"time":"2023-09-22T23:08:02.545621756Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid":4291}
2306 {:""msg":"35eecd9f-6208-42ff-b6c0-c4c02caf8b9b is now active with session: 63d034ab-cd7a-4ae2-9a0d-89f39e17085b"[2] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active Active WaitQuorum ds_transition to Active","v":0,",v"":name":0","cruciblename"":","crucible"level":,"30level":30,"time":"2023-09-22T23:08:02.545686817Z",","hostname":time"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2307 {2023-09-22T23:08:02.545688373Z""msg":","hostname":"[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active Active Active ds_transition to Faultedip-10-150-1-74.us-west-2.compute.internal"",,""pidv""::42910,"name":}"
2308 crucible","level":30{"msg":"[2] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.545751608Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2309 {,"time":"2023-09-22T23:08:02.545766926Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2310 {"msg":test live_repair::repair_test::test_live_repair_flush_is_flush ... "ok
2311 85751bdd-8856-4e5b-8ef6-adad326659c5 is now active with session: 8cb064bd-4131-4172-9681-8a036ecff523","v":0,"name":"crucible","level":30",msg""time":":"2023-09-22T23:08:02.545820642Z"[1] Transition from Active to Faulted","hostname","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.545883049Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2312 {"msg":"[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active Faulted Active ds_transition to LiveRepairReady":,"v"":0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible","level,":"30pid":4291}
2313 {"msg":","time":"2023-09-22T23:08:02.545975161Z","[1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active Active Active ds_transition to Faulted"hostname":","v":0ip-10-150-1-74.us-west-2.compute.internal",,""pid"name":":crucible4291","}level"
2314 {":msg":"30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30{,"time"":","msg"time:2023-09-22T23:08:02.546023976Z""":"Crucible stats registered with UUID: 182bd4a1-263e-4c1e-9802-54d965519b48","v":02023-09-22T23:08:02.546030697Z,""name":,""crucible"hostname":","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid,":"4291hostname"}:
2315 ,"time":"2023-09-22T23:08:02.546065963Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2316 {{"msg"":"msg":"Crucible 182bd4a1-263e-4c1e-9802-54d965519b48 has session id: 8d0a0614-4017-41fa-99e2-ce0e155fa6af","v":0[1] 35eecd9f-6208-42ff-b6c0-c4c02caf8b9b (63d034ab-cd7a-4ae2-9a0d-89f39e17085b) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2317 {"msg":","time":"[1] Transition from Active to Faulted","2023-09-22T23:08:02.546131337Z"v":,"0hostname",:","name"ip-10-150-1-74.us-west-2.compute.internal:""crucible","pid":,"4291level":}30
2318 "name{":"crucible",""msg"level:":"30[1] Transition from LiveRepairReady to LiveRepair","v":0,",name"":"time"crucible":","level":2023-09-22T23:08:02.546168776Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2319 ,"time":"2023-09-22T23:08:02.546182556Z"{,",hostname":"""time"msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
2320 2023-09-22T23:08:02.546193179Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}
2321 [1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30:"[0] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) New New New ds_transition to WaitActive","v":0,"name":"crucible",,""timelevel"":":302023-09-22T23:08:02.546241921Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2322 {",msg":""time":"[1] Transition from Faulted to LiveRepairReady"2023-09-22T23:08:02.546262663Z","v",:"0hostname":,""name":"crucible",ip-10-150-1-74.us-west-2.compute.internal"","level"pid"::304291}
2323 {"msg":"[0] Transition from New to WaitActive","v":,"0time",":"name":"crucible2023-09-22T23:08:02.546296012Z"",,""level"hostname"::"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2324 {"{msg":","time":""msg":2023-09-22T23:08:02.546322089Z"","[1] 85751bdd-8856-4e5b-8ef6-adad326659c5 (8cb064bd-4131-4172-9681-8a036ecff523) Active LiveRepairReady Active ds_transition to LiveRepair"hostname":",Crucible stats registered with UUID: 3b2f825e-b910-4529-8739-ae82806d8b76""v":0,ip-10-150-1-74.us-west-2.compute.internal",""vname""::0",,crucible"""name,"":level""pidcrucible:"",30":level"4291:30}
2325 {"msg":","time":"2023-09-22T23:08:02.546358173Z",",hostname"":"time":[0] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) WaitActive New New ds_transition to WaitQuorum""ip-10-150-1-74.us-west-2.compute.internal",,""pidv"2023-09-22T23:08:02.546360756Z"":,:"42910hostname},
2326 ":""name":"{ip-10-150-1-74.us-west-2.compute.internal"crucible,""msg"pid:"":"4291,"}[1] Transition from LiveRepairReady to LiveRepair
2327 "level",:"{30v":0","msg"name:"":"crucible","level":30Crucible 3b2f825e-b910-4529-8739-ae82806d8b76 has session id: 51a953b0-5569-419e-a249-0b56135d74c9","v":0,"name":"crucible","level":30,","time":time"":"2023-09-22T23:08:02.546402583Z"2023-09-22T23:08:02.546407325Z,"",time,"":""hostname"hostname:2023-09-22T23:08:02.546413754Z""":,""ip-10-150-1-74.us-west-2.compute.internal"hostname,"":"pid":ip-10-150-1-74.us-west-2.compute.internal4291"ip-10-150-1-74.us-west-2.compute.internal}"
2328 ,,test live_repair::repair_test::test_live_repair_no_repair_yet ... {"""pidpid":msg4291"ok:"}":
2329 4291Create read repair deps for extent 1
2330 "}
2331 {,"v":0","msg":name"":"{crucible","level":40"msg":"[0] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) New New New ds_transition to WaitActive","v":0,"[0] Transition from WaitActive to WaitQuorum"name":","crucible"v,"":level"0:,30,""time"name"::""crucible"2023-09-22T23:08:02.546477609Z",","level":hostname"30:,""time":"ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.546488799Z""pid",:"4291hostname":"}
2332 ip-10-150-1-74.us-west-2.compute.internal","pid":{----------------------------------------------------------------
2333 ,"time":"2023-09-22T23:08:02.546497599Z" Crucible gen:0 GIO:true work queues: Upstairs:1 downstairs:1
2334 4291}
2335 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
2336 1 AckReady 1000 Write 1 New New{"msg":","[0] Transition from New to WaitActive"hostname":,""v":0,"ip-10-150-1-74.us-west-2.compute.internal"name":",crucible"",pid"":level":429130 New} false
2337 STATES DS:0 DS:1 DS:2 TOTAL
2338 New 1 1 1
2339 , 3
2340 { Sent 0 0 0 0
2341 Done 0 0 0 " 0
2342 " Skipped 0 0 time" 0 : 0
2343 "msg": Error " 0 0 0 0
2344 msg":"Last Flush: 0 0 0
2345 [0] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) WaitQuorum New New ds_transition to Active"","v":2023-09-22T23:08:02.546648104Z"0,"name",:""cruciblehostname"":",Downstairs last five completed:"
2346 IO Read 1004 extent 0 added deps 1ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2347 Upstairs last five completed: {
2348 ","v":0,"name":"crucible","level":level40":30"msg":"[0] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) WaitActive New New ds_transition to WaitQuorum","v":0,",name"":"time":crucible"","level":2023-09-22T23:08:02.546765873Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2349 ,"time":",2023-09-22T23:08:02.546767696Z""time":,""hostname":"2023-09-22T23:08:02.546792486Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":pid"":4291ip-10-150-1-74.us-west-2.compute.internal"},
2350 "pid":4291}
2351 {{"msg":"{[0] Transition from WaitQuorum to Active"",""msgvmsg"":""::"0[0] Transition from WaitActive to WaitQuorum,""name"Crucible stats registered with UUID: 231d0792-4fa5-43d8-bed8-61976ebe4426:",""crucible"v",,:""v0level"",:":030name,"":"name":crucible"","crucible"level":,"30level":30,"time":"2023-09-22T23:08:02.546868264Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:08:02.546875076Z":4291,"hostname}"
2352 ,:""time":"{ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.546876832Z""pid"",:msg""4291:hostname"}:"
2353 "[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active New New ds_transition to WaitActive"ip-10-150-1-74.us-west-2.compute.internal,""{,v":"0",pid"msg"::""4291name":"crucible"},
2354 "[0] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) WaitQuorum New New ds_transition to Activelevel":"30,"v":0{,"name":"crucible","level":"30msg":"Crucible 231d0792-4fa5-43d8-bed8-61976ebe4426 has session id: daddcc97-92dc-410b-8580-428379cd0b09","v":0,"name":"crucible",,""level":time"30,:""time":"2023-09-22T23:08:02.546940974Z","2023-09-22T23:08:02.546946821Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291","}pid"
2355 :4291}
2356 ,{"time":"{"2023-09-22T23:08:02.546960331Z"msg":,"""msg"hostname"[1] Transition from New to WaitActive"::",""v":ip-10-150-1-74.us-west-2.compute.internal"0,[0] Transition from WaitQuorum to Active""pid",,:"4291v":"test live_repair::repair_test::test_live_repair_repair_read_push ... 0}
2357 ok
2358 {"msg":,name""":name"":"cruciblecrucible"",,[0] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) New New New ds_transition to WaitActive""level"level""::,"30v"30:0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.547047453Z"time":","hostname":",2023-09-22T23:08:02.547048681Z""ip-10-150-1-74.us-west-2.compute.internal,time"","hostname":""pid:"":ip-10-150-1-74.us-west-2.compute.internal"42912023-09-22T23:08:02.547053692Z",}"
2359 ,pid"":hostname"4291:"{}
2360 ip-10-150-1-74.us-west-2.compute.internal"","msgpid""::4291{"}
2361 "[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active WaitActive New ds_transition to WaitQuorummsg"":","v"{:0,"name":"crucible""[1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active New New ds_transition to WaitActivemsg","",:""v"level"::[0] Transition from New to WaitActive0","30name":,""v"crucible":,"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.547128256Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid",:"4291time":"}
2362 2023-09-22T23:08:02.547135768Z",",time"":{hostname"":""2023-09-22T23:08:02.547138944Z"msgip-10-150-1-74.us-west-2.compute.internal","",:hostname"""pid"::"[1] Transition from WaitActive to WaitQuorum"4291,"v"ip-10-150-1-74.us-west-2.compute.internal:"0},
2363 ,""pid"name"::"4291{crucible",}"
2364 level"":msg"30:"{[1] Transition from New to WaitActive","v":0","msg"name"::""crucible","level":30[0] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) WaitActive New New ds_transition to WaitQuorum","v":0,",name"":"time"crucible":","level":2023-09-22T23:08:02.54719662Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid",:"4291time":"}
2365 2023-09-22T23:08:02.547207564Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal,"""time"msg"::"","pid"2023-09-22T23:08:02.547219247Z":4291,"hostname":}"
2366 ip-10-150-1-74.us-west-2.compute.internal"[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active WaitQuorum New ds_transition to Active,""pid{,"":v":"0msg"4291,:""}name"
2367 [1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active WaitActive New ds_transition to WaitQuorum":","v":crucible"{0,,""name":""crucible"msg","level":level"::30"30[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.547277518Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2368 ,"{time":""msg":"2023-09-22T23:08:02.547278566Z"[1] Transition from WaitActive to WaitQuorum",","hostnamev":",:"0"time",:""ip-10-150-1-74.us-west-2.compute.internal"name"2023-09-22T23:08:02.547286638Z:"",crucible",pid"":",4291"hostname"level"}:
2369 :"30ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4291"}msg"
2370 :","[1] Transition from WaitQuorum to Active"time":",{"v"2023-09-22T23:08:02.547325799Z":,"0"msg":,"hostname"":"name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""levelpid"[0] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) WaitQuorum New New ds_transition to Active"":,4291:"30}v
2371 ":0,"name"{:"crucible",""level"msg"::"30,[1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active WaitQuorum New ds_transition to Active"",time"":v":"0,"name":"crucible"2023-09-22T23:08:02.547360142Z","level":,"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time"
2372 :,""time":"2023-09-22T23:08:02.547384682Z"2023-09-22T23:08:02.547371939Z{","hostname":",""ip-10-150-1-74.us-west-2.compute.internalhostname""msg":":","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}
2373 ,"pid[2] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active Active New ds_transition to WaitActive""{,:"4291"v"msg":"}:
2374 0[1] Transition from WaitQuorum to Active",,""v":0name":","crucible"name","{level":":crucible","30"level":30msg":"[0] Transition from WaitQuorum to Active","v":0,"name":","crucible"time":","level"2023-09-22T23:08:02.547436503Z":,"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2375 ,"time"{:""msg":"2023-09-22T23:08:02.547435743Z","hostname":",[2] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active Active New ds_transition to WaitActive"","timevip-10-150-1-74.us-west-2.compute.internal""":,""pid":2023-09-22T23:08:02.547447982Z0":,4291,""name"hostname"::}""
2376 ip-10-150-1-74.us-west-2.compute.internal"crucible",","pidlevel{"":{:429130"msg"}:""
2377 Crucible stats registered with UUID: 1a8122c3-953b-42de-a373-2c666ddd65a5"msg":","v":[2] Transition from New to WaitActive0"{,,"",nametime""::"""crucible"msg":v"":"2023-09-22T23:08:02.547492901Z",0,""level","hostname:name[1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active New New ds_transition to WaitActive""30:","":vip-10-150-1-74.us-west-2.compute.internal""":,0"crucible","pidname",:"":level"4291"crucible:}",
2378 "30,time""{:level""":msg":2023-09-22T23:08:02.547523165Z""30,"hostname"[2] Transition from New to WaitActive:"","v":0,ip-10-150-1-74.us-west-2.compute.internal""name,"":"pid":crucible"4291,"level"}:
2379 30,"{time":""2023-09-22T23:08:02.54754485Z"msg":","hostname":","time":ip-10-150-1-74.us-west-2.compute.internal"","Crucible 1a8122c3-953b-42de-a373-2c666ddd65a5 has session id: 53637ab1-3cfb-4773-8699-b0612dee3265,""2023-09-22T23:08:02.547550606Ztime,"":v""pid":2023-09-22T23:08:02.547560785Z0",:",""name"hostname:"":,crucible""4291",ip-10-150-1-74.us-west-2.compute.internal"hostname}"level
2380 :"":,""30pid":4291{ip-10-150-1-74.us-west-2.compute.internal"},
2381 ""pid":msg"4291:,{""}"time
2382 "msg:"":[2] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active Active WaitActive ds_transition to WaitQuorum""2023-09-22T23:08:02.547596024Z",,{""v":hostname"0:[2] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active Active WaitActive ds_transition to WaitQuorum"",",msg"":name"ip-10-150-1-74.us-west-2.compute.internal""v,""::pid0",:""4291name"}:
2383 ""{crucible"crucible,""msg"level:"":"[1] Transition from New to WaitActive30","level",:"[0] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) New New New ds_transition to WaitActivev"30:"0,",v":"0name,"",:name"":time"""crucible:""crucible",2023-09-22T23:08:02.547646361Z""level,,"":"hostname30"level":":,30"ip-10-150-1-74.us-west-2.compute.internal"time":,""pid":42912023-09-22T23:08:02.547651541Z"}
2384 ,,""time{hostname"""::msg"":""2023-09-22T23:08:02.54766642Z","ip-10-150-1-74.us-west-2.compute.internal"hostname[2] Transition from WaitActive to WaitQuorum,",pid""":time"4291:",""}:2023-09-22T23:08:02.547670162Z"v"",:
2385 "ip-10-150-1-74.us-west-2.compute.internal0",,""hostnamepidname""::"{4291":"}
2386 crucible"ip-10-150-1-74.us-west-2.compute.internal"{,"""level"msg:,30""msg"pid":::"4291"[0] Transition from New to WaitActive}"
2387 [2] Transition from WaitActive to WaitQuorum",,"",timev"""::0",{"2023-09-22T23:08:02.547715904Z"name",v"":hostname:""msg0":,":"""name":crucible"",ip-10-150-1-74.us-west-2.compute.internal""crucible",level""[1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active WaitActive New ds_transition to WaitQuorum"pid:":304291,,""}level"
2388 v"::{300,"",time"msg""::""name":"2023-09-22T23:08:02.547753507Zcrucible"",","level":hostname"[2] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active Active WaitQuorum ds_transition to Active:"30,""v":0,"ip-10-150-1-74.us-west-2.compute.internalname"":,""pidcrucible"":,4291"level"}:
2389 ,30"{time":""msg":"2023-09-22T23:08:02.547762515Z","[0] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) WaitActive New New ds_transition to WaitQuorum",,,"""vtime""::0",time":"2023-09-22T23:08:02.547781092Z"name",:"""hostnamecrucible"":","2023-09-22T23:08:02.547773487Z"level":ip-10-150-1-74.us-west-2.compute.internal30",","hostname"pid":":hostname4291":ip-10-150-1-74.us-west-2.compute.internal}"
2390 ,ip-10-150-1-74.us-west-2.compute.internal""{,""time",msg""::"""pidpid"2023-09-22T23:08:02.547802108Z[2] Transition from WaitQuorum to Active"":",,"4291hostname""v:""::}0ip-10-150-1-74.us-west-2.compute.internal",,""namepid"
2391 "{"msg":"[1] Transition from WaitActive to WaitQuorum"4291,"v":0,"}name
2392 ":"crucible","level":30{"msg"::"4291}
2393 [2] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active Active WaitQuorum ds_transition to Active","{v":0","msg"name,":"time":":"crucible"","[0] Transition from WaitActive to WaitQuorumlevel""2023-09-22T23:08:02.547873142Z":,,""v"hostname":"30:0,"ip-10-150-1-74.us-west-2.compute.internalname":"crucible","level":30,"time",:""time":"2023-09-22T23:08:02.547911704Z","2023-09-22T23:08:02.547906494Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid}"
2394 :4291}"
2395 {,"pid"":msg":4291{"}
2396 [0] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) WaitQuorum New New ds_transition to Active"","msg"v:""{:[2] Transition from WaitQuorum to Active",0""msg"v:,":"0name","":name":"crucible"","crucible"[1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active WaitQuorum New ds_transition to Active"level",:",level:"""30v":crucible30":0,","level"name"::"30crucible","level":30,"time":","2023-09-22T23:08:02.547985294Z",,""time":hostname""time:"":"2023-09-22T23:08:02.547983739Z",2023-09-22T23:08:02.547990417Zip-10-150-1-74.us-west-2.compute.internal""",,"hostname"pid"":hostname4291",:}"
2397 ip-10-150-1-74.us-west-2.compute.internal"",:time{""pid""":msg:"4291:"ip-10-150-1-74.us-west-2.compute.internal""},
2398 [0] Transition from WaitQuorum to Active"2023-09-22T23:08:02.547993227Z""pid"{,,"""v"msg:"0:,:""name"hostname"4291::""}3b2f825e-b910-4529-8739-ae82806d8b76 is now active with session: 2fa4702d-3c78-4506-b8ee-ef9259c4e868crucible""ip-10-150-1-74.us-west-2.compute.internal,,
2399 """,levelv""::300"{pid",":name":4291""crucible}"msg,
2400 "",level""::time30{"":"2023-09-22T23:08:02.548057734Z""msg",:"182bd4a1-263e-4c1e-9802-54d965519b48 is now active with session: 1ab74527-7a63-44a1-a8c0-319deaa158d7"hostname"":,""[1] Transition from WaitQuorum to Active"timeip-10-150-1-74.us-west-2.compute.internal"":,",,"2023-09-22T23:08:02.548074747Z""pid""v"v"::,:"42910hostname}"
2401 :,"{"0name","ip-10-150-1-74.us-west-2.compute.internalmsg"",:":""pid"name""::"4291crucible"}[1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active New New ds_transition to WaitActive"
2402 ,,crucible""{",v""msg:"0:,""levelname"":"":crucible"level"[1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active Active Active ds_transition to Faulted,""30:,level""v:"30:300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.548136235Z",,"",hostnametime""::""",time"ip-10-150-1-74.us-west-2.compute.internal:"2023-09-22T23:08:02.548140417Z,"",pid"":hostname4291"":}
2403 "time""{:"ip-10-150-1-74.us-west-2.compute.internal""2023-09-22T23:08:02.548134121Zmsg,":2023-09-22T23:08:02.548137199Z""""pid",[1] Transition from New to WaitActive:",4291"hostname,}"
2404 v"":{0hostname"",msg"""::""name"::[1] Transition from Active to Faulted""ip-10-150-1-74.us-west-2.compute.internal,crucible""v,"":"0level,"":pid30,"""name":ip-10-150-1-74.us-west-2.compute.internal":4291,""}crucible"pid,,"""leveltime:"":
2405 ":4291302023-09-22T23:08:02.548201056Z",}"
2406 hostname{":","time":"ip-10-150-1-74.us-west-2.compute.internal"{,2023-09-22T23:08:02.548220803Z"""msgpid",:"":msg""4291":}hostname
2407 "":[2] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Active New ds_transition to WaitActive""{,"v":"ip-10-150-1-74.us-west-2.compute.internal0msg"",,:""pid"[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active Active Active ds_transition to Faulted":"4291,name""}:"v"
2408 [1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active WaitActive New ds_transition to WaitQuorum"crucible"{,,""v"msg:":0"",level":name""::0[1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active Faulted Active ds_transition to LiveRepairReady""30,,crucible""v",:"0"level,"":name"30name"::""crucible"crucible",,""level"level"::3030,"time":",2023-09-22T23:08:02.548280296Z"","time":hostname"":","2023-09-22T23:08:02.548274497Z"timeip-10-150-1-74.us-west-2.compute.internal"":,"",pid"2023-09-22T23:08:02.548287802Z""hostname,:""4291:hostname}"
2409 :"","{ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",time""msg"",::pid""""pid":2023-09-22T23:08:02.548288574Z"4291:[1] Transition from WaitActive to WaitQuorum"4291,},
2410 "}"
2411 hostname"{v":"0:,msg""name:""{":"crucible[1] Transition from Faulted to LiveRepairReady"",",ip-10-150-1-74.us-west-2.compute.internal"level""v:":"030,msg"":name"",:""pid"[2] Transition from New to WaitActive"crucible":,,"4291,level"":time30""}:v"":
2412 ,2023-09-22T23:08:02.548354368Z""time",:""0hostname":2023-09-22T23:08:02.548367548Z"",,{"ip-10-150-1-74.us-west-2.compute.internal""name",hostname""":"pidcrucible"::"4291"msg"}ip-10-150-1-74.us-west-2.compute.internal
2413 ":,{,""""msgpid""::4291"level}"
2414 :[1] Transition from Active to Faulted"30{[1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active WaitQuorum New ds_transition to Active","",msg""v:v"""::00,","name"name"::[1] 3b2f825e-b910-4529-8739-ae82806d8b76 (2fa4702d-3c78-4506-b8ee-ef9259c4e868) Active LiveRepairReady Active ds_transition to LiveRepair""",crucible"v"",:"0level,"":crucible"30name":,""level"crucible,":",30time":""level":,"30time":2023-09-22T23:08:02.548409167Z"","2023-09-22T23:08:02.548425369Z"hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal":,,""pid"time:"4291:4291"}
2415 2023-09-22T23:08:02.548434436Z"},{
2416 ""msg",hostname:"":""{time"[1] Transition from WaitQuorum to Active"ip-10-150-1-74.us-west-2.compute.internal:,"",""vpid""::04291,msg}"
2417 name"":{"":crucible""msg,2023-09-22T23:08:02.548431931Z"""":",level":"30[1] Transition from LiveRepairReady to LiveRepairhostname"":","[2] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Active WaitActive ds_transition to WaitQuorum"v"ip-10-150-1-74.us-west-2.compute.internal:,0","",,v"name"time:""""crucible:"",:pid0"2023-09-22T23:08:02.5484838Z:""level,"":4291hostname30","}name
2418 ":":","crucible"timeip-10-150-1-74.us-west-2.compute.internal"":,""{pid,2023-09-22T23:08:02.548508294Z"":,4291""hostname}"level":
2419 ":"30{ip-10-150-1-74.us-west-2.compute.internal"msg",msg""pid:"":"4291:"}
2420 [2] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active Active New ds_transition to WaitActive","v":0,"[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active Faulted Active ds_transition to LiveRepairReady"name":","crucible"v",:"0level",:",30name"":"time":crucible"","level":2023-09-22T23:08:02.54853145Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time"}:
2421 "2023-09-22T23:08:02.548551187Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal,"","msg"time":":""pid":[2] Transition from WaitActive to WaitQuorum4291"2023-09-22T23:08:02.548556881Z"},,
2422 ""v"hostname":{:0",""name"msg"::ip-10-150-1-74.us-west-2.compute.internal"","crucible"",pid[2] Transition from New to WaitActive""":level"4291:,"30}v":
2423 0,"name":"crucible","level":{30"msg":"[1] Transition from Faulted to LiveRepairReady",",v"":time"0:","name":"2023-09-22T23:08:02.548606176Z"crucible,,"""hostname"time":,"":"level":ip-10-150-1-74.us-west-2.compute.internal302023-09-22T23:08:02.548616292Z"",","hostname":"pid":4291ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2424 }
2425 {{,"time":"""msg":"msg2023-09-22T23:08:02.548639017Z"":","hostname":"[2] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active Active WaitActive ds_transition to WaitQuorum","v":0ip-10-150-1-74.us-west-2.compute.internal",",name[2] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Active WaitQuorum ds_transition to Active"""pid:",""cruciblev:"4291,""level"::30}0
2426 ,"name":"crucible","{time":","level"2023-09-22T23:08:02.548675836Z":,"30hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2427 {"msg":"[1] 182bd4a1-263e-4c1e-9802-54d965519b48 (1ab74527-7a63-44a1-a8c0-319deaa158d7) Active LiveRepairReady Active ds_transition to LiveRepair",[2] Transition from WaitActive to WaitQuorum"","v"v"::0,0"name,":"",namecrucible"","timelevel"""::30:""crucible"2023-09-22T23:08:02.548689608Z",",level"":hostname"30,":time":""2023-09-22T23:08:02.54871486Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid",":pid":42914291}
2428 }
2429 ,{"time":""msg":"{2023-09-22T23:08:02.548721977Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","[2] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active Active WaitQuorum ds_transition to Activepid"[2] Transition from WaitQuorum to Active"":,,4291""v}v":
2430 "0:,"0name",:{""name":crucible"""crucible"msg",:"","level"level":[1] Transition from LiveRepairReady to LiveRepair"30:,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.548776723Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2431 ,"{time":""msg":"2023-09-22T23:08:02.548778741Z","hostname":"[2] Transition from WaitQuorum to Active","v":ip-10-150-1-74.us-west-2.compute.internal"0,,""pid"name"::4291"crucible"},
2432 "level":30test live_repair::repair_test::test_live_repair_send_io_write_below ... {ok
2433 ,"",time"":msg"":time""2023-09-22T23:08:02.548783846Z":","hostname":2023-09-22T23:08:02.548818038Z""231d0792-4fa5-43d8-bed8-61976ebe4426 is now active with session: 311603ce-e15d-47ce-bbc6-29c87d03dcc9",",hostname"ip-10-150-1-74.us-west-2.compute.internal"v",":":0pid"",:"4291name"ip-10-150-1-74.us-west-2.compute.internal":},"
2434 pid"":crucible4291","{}level"
2435 :"30msg":"{Write to Extent 0:2:9 under repair","v"":msg"0:","name":"crucible","level":401a8122c3-953b-42de-a373-2c666ddd65a5 is now active with session: 7c39838b-aea6-4a95-a4cb-86170299d0c5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.548869939Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}"
2436 ,"time":time":""{2023-09-22T23:08:02.548886389Z"2023-09-22T23:08:02.548880671Z","hostname":",""hostname"ip-10-150-1-74.us-west-2.compute.internalmsg:""",:ip-10-150-1-74.us-west-2.compute.internal""",pid":"4291pid":}
2437 [1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Active Active ds_transition to Faulted"4291,"{v"}:"0msg","
2438 :"name":"crucible","level"{:[1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active Active Active ds_transition to Faulted"30,"v":0,""name":msg"":"crucible","level":30Write 1:0:9 past extent under repair 0","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:08:02.548942143Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}
2439 time":"{"msg":"2023-09-22T23:08:02.54893712Z"[1] Transition from Active to Faulted",,""v":0hostname",":name",:"""crucible"time","ip-10-150-1-74.us-west-2.compute.internal":level":"30,"pid":2023-09-22T23:08:02.548949333Z"4291,"hostname"}:
2440 ","time":"2023-09-22T23:08:02.548973089Z"ip-10-150-1-74.us-west-2.compute.internal",",{"hostname":pid"":4291"ip-10-150-1-74.us-west-2.compute.internal",msg"}":"pid":
2441 4291[1] Transition from Active to Faulted"}
2442 ,{"{v":"msg0":",""msg"name"::[1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active Faulted Active ds_transition to LiveRepairReady"""crucible","v,Write 1:1:9 past extent under repair 0""":,0",v"level""name"::":crucible"300,","level":name"30:"crucible","level":40,"time":"2023-09-22T23:08:02.549029518Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2443 ,"{time":""msg":"2023-09-22T23:08:02.549026445Z"[1] Transition from Faulted to LiveRepairReady",",v":0","timename",""::hostname""":crucible"","2023-09-22T23:08:02.549033148Z"level"ip-10-150-1-74.us-west-2.compute.internal":,30,""pid":hostname":4291"}
2444 ip-10-150-1-74.us-west-2.compute.internal",",time":""pid":2023-09-22T23:08:02.549064822Z{"4291,"hostname":"}
2445 "ip-10-150-1-74.us-west-2.compute.internal",msg"":pid":"4291{}
2446 "{msg":""msg":"[1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Faulted Active ds_transition to LiveRepairReady"IO Write 1004 on eur 0 Added deps 1",",v""[1] 1a8122c3-953b-42de-a373-2c666ddd65a5 (7c39838b-aea6-4a95-a4cb-86170299d0c5) Active LiveRepairReady Active ds_transition to LiveRepair":,"v"v0":0:,","0name"name",:""name"::"cruciblecrucible""",crucible"",level","level""::level30"30:40,"time":"2023-09-22T23:08:02.549124336Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","time":pid,""":2023-09-22T23:08:02.549125439Z"4291time","}hostname:""
2447 2023-09-22T23:08:02.549126296Z:""{,"ip-10-150-1-74.us-west-2.compute.internal"hostname","msg""::pid""":4291ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from LiveRepairReady to LiveRepair",}"
2448 pid",:"4291v":0},"{name":
2449 ""crucible"msg":,""level":30[1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549194119Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2450 ,"time":"2023-09-22T23:08:02.549202312Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
24512023-09-22T23:08:02.549ZINFOcrucible: [1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active LiveRepairReady Active ds_transition to LiveRepair
2452 {{"msg":""[1] Transition from LiveRepairReady to LiveRepair"msg",":"v":0,"name":"crucibleCrucible stats registered with UUID: 6220a0c3-d619-4944-bb16-c06d6f7a401f"","level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549296486Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2453 ,"time":"{2023-09-22T23:08:02.549302079Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2454 [1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active LiveRepair Active ds_transition to Faulted","v":0,"name"{:"crucible","level":"30msg":"Crucible 6220a0c3-d619-4944-bb16-c06d6f7a401f has session id: e3f8e9de-d6e0-4089-ac3a-675d8ee0da5c","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549349067Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2455 ,"time":"2023-09-22T23:08:02.549359598Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from LiveRepair to Faulted"4291,"v":}0
2456 ,"name":"crucible","{level":30"msg":"[0] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"test live_repair::repair_test::test_live_repair_update ... time":"ok
2457 2023-09-22T23:08:02.54939765Z",",time"":"hostname":"2023-09-22T23:08:02.549410787Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,""pid"::"4291}ip-10-150-1-74.us-west-2.compute.internal"
2458 ,"pid":4291}
2459 {{""msg"msg":":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30[1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549455454Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2460 {"msg":"[0] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) WaitActive New New ds_transition to WaitQuorum","v":,0","time"name":"test live_repair::repair_test::test_live_repair_repair_write_push ... :2023-09-22T23:08:02.549463638Z"ok
2461 ,""crucible"hostname",:""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2462 {,"time":""msg"2023-09-22T23:08:02.549498231Z":","hostname":"[1] Transition from Faulted to LiveRepairReady","ip-10-150-1-74.us-west-2.compute.internalv":"0,",pid"":name"4291:"crucible"},
2463 "level":30{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549531084Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2464 ,"time":"2023-09-22T23:08:02.549542968Z"{,"hostname":""msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
2465 [1] 231d0792-4fa5-43d8-bed8-61976ebe4426 (311603ce-e15d-47ce-bbc6-29c87d03dcc9) Active LiveRepairReady Active ds_transition to LiveRepair","v":0{,"name":"crucible"",msg"":"level":30[0] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549583239Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time"}:"
2466 2023-09-22T23:08:02.549593231Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg}"
2467 :"[1] Transition from LiveRepairReady to LiveRepair{","v":"0msg":","name":"crucible"[0] Transition from WaitQuorum to Active",",level":"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549644825Z",",time":""hostname":"2023-09-22T23:08:02.549649568Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4291ip-10-150-1-74.us-west-2.compute.internal","}pid
2468 ":4291}
2469 {{"msg":""msg":"Write to Extent 0:2:9 under repair","v":0,"name":"crucible","[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active New New ds_transition to WaitActive"level",:"40v":0,"name":"crucible","level":30,"time":","time":2023-09-22T23:08:02.549700248Z"","hostname"2023-09-22T23:08:02.549704959Z:"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal":,4291"pid":}4291
2470 }
2471 {{"msg"":"msg":"Write 1:0:9 past extent under repair 0"[1] Transition from New to WaitActive",",v"":v"0:,0","name":name"":"crucible"crucible",","level"level"::4030,"time":","time":"2023-09-22T23:08:02.54975879Z","2023-09-22T23:08:02.54975902Z"hostname":,""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,4291"pid":}4291
2472 }
2473 {"{msg":""Write 1:1:9 past extent under repair 0"msg":","v":0,"name":"crucible","level":[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active WaitActive New ds_transition to WaitQuorum"40,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549811126Z",","time":hostname"":"2023-09-22T23:08:02.549816109Z"ip-10-150-1-74.us-west-2.compute.internal",,""pid":hostname4291":"}
2474 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
2475 "msg":"{IO Write 1004 on eur 0 Added deps 1","v":"0msg",":"name":"crucible","[1] Transition from WaitActive to WaitQuorum"level":,"40v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.549865138Z"time":","hostname":"2023-09-22T23:08:02.549870138Z","hostname"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4291ip-10-150-1-74.us-west-2.compute.internal","}pid
2476 "{:4291}
2477 "msg":{""msg":"Crucible stats registered with UUID: fce41f2d-d98f-4004-81f1-42f5f6597ea7","v":0,"name"[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active WaitQuorum New ds_transition to Active":","crucible","level":30{"msg":"Crucible stats registered with UUID: 4e3cf648-2b3b-4d4d-8069-b689dc174a75","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549927207Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2478 v":0,"name":{"crucible","level":"30msg":"Crucible fce41f2d-d98f-4004-81f1-42f5f6597ea7 has session id: d91cfece-2dd3-44d1-b808-deff3c07fda2","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.549978944Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":time4291":"}2023-09-22T23:08:02.549989699Z"
2479 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}
2480 {",msg":""msg"[0] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) New New New ds_transition to WaitActive:"","v":[1] Transition from WaitQuorum to Active"0,",name"":v"":crucible"0,,"level":30"time":"2023-09-22T23:08:02.549949711Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2481 ,"time":"2023-09-22T23:08:02.550066151Z"{,"hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg":,""pid":4291}
2482 {"msg":"[0] Transition from New to WaitActive"Crucible 4e3cf648-2b3b-4d4d-8069-b689dc174a75 has session id: 4f35d8e7-db1f-4b21-be79-84172e32182f,""vtest live_repair::repair_test::test_live_repair_span_write_write ... ",ok":0v",:0,"name":"crucible"",name":""level":crucible"30,"level":"30name":"crucible","level":30
2483 ,"time":"2023-09-22T23:08:02.550143778Z",","hostname":time":""2023-09-22T23:08:02.550150389Z"ip-10-150-1-74.us-west-2.compute.internal",",pid"":hostname":"4291}
2484 {"msg":"[0] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level,"":time30":"2023-09-22T23:08:02.550141553Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2485 ,"time":"{2023-09-22T23:08:02.550206853Z","hostname"ip-10-150-1-74.us-west-2.compute.internal:"""msg":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":4291pid":}
2486 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":42910,"name":"crucible"},
2487 "level"[0] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) New New New ds_transition to WaitActive:"30{,"v":"msg0":","name":"crucible","level":30[2] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:08:02.550264181Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2488 {,"time":"2023-09-22T23:08:02.55028015Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2489 {"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30"msg":","time":"[0] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) WaitQuorum New New ds_transition to Active"2023-09-22T23:08:02.550326127Z",","v":hostname":0","name":"crucible","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2490 {"msg":","time":","[2] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active Active WaitActive ds_transition to WaitQuorum"2023-09-22T23:08:02.550272985Z","timev,"":":0"2023-09-22T23:08:02.550352094Z",,""name":"hostname":"crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
2491 hostname"{:""ip-10-150-1-74.us-west-2.compute.internal"msg":,,"""time[0] Transition from WaitQuorum to Active"","v":0,"name":"crucible",pid"":level4291":30}
2492 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level",":time":"2023-09-22T23:08:02.550414176Z","hostname":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.550386023Z,""pid",:"4291hostname":"}
2493 30{"msg"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291}
2494 ,"[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active New New ds_transition to WaitActive"time":,""v":{2023-09-22T23:08:02.550456811Z"0,,"msg":"[2] Transition from WaitActive to WaitQuorum"",name""v":"crucible","level":30"hostname":"ip-10-150-1-74.us-west-2.compute.internal",":pid":04291,"}
2495 name":"crucible","level{":30"msg":","time":"2023-09-22T23:08:02.550490171Z","hostname":[0] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) WaitActive New New ds_transition to WaitQuorum"","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name,"":"pid":crucible"4291,","level"time:"30:}"
2496 ,"time":"2023-09-22T23:08:02.550520884Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2497 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{,"time":""2023-09-22T23:08:02.550553282Zmsg","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2498 {{"msg":""msg":"Crucible stats registered with UUID: 278316f9-9926-4cf9-908b-c5ecd8db3aac"[0] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) WaitQuorum New New ds_transition to Active",,""vv""::00,","name"name:"":"crucible"crucible",","level"level:":3030":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":,,""30timetime""::""2023-09-22T23:08:02.550590671Z"2023-09-22T23:08:02.550590968Z",","hostname"hostname:"":"2023-09-22T23:08:02.550503853Zip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,"""pidpid""::42914291}}
2499 
2500 {,,"{""msg"":time"":"[0] Transition from WaitQuorum to Active"2023-09-22T23:08:02.550604179Z","v,":"0,"hostnamename"":"msghostnameip-10-150-1-74.us-west-2.compute.internal"",":pid"""::"4291ip-10-150-1-74.us-west-2.compute.internal"},
2501 {:"msg":""crucible","level"[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active WaitActive New ds_transition to WaitQuorum"Crucible 278316f9-9926-4cf9-908b-c5ecd8db3aac has session id: 6ec92630-da8a-4479-b77d-86eef1bf4579","v":0,,""v"name:":0:30,"name":"crucible","level":30"crucible","level":30,"time"":"pid",":2023-09-22T23:08:02.550751895Z,"4291",timetime""}:
2502 "{"msg"::""2023-09-22T23:08:02.550761825Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":[2] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active Active WaitQuorum ds_transition to Active"4291}
2503 ,"v":0,"name":"2023-09-22T23:08:02.550755541Zcrucible{"","level,""msg":"hostname":"[0] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) New New New ds_transition to WaitActive""ip-10-150-1-74.us-west-2.compute.internal",hostname""v:"":,0"ip-10-150-1-74.us-west-2.compute.internal,""pid",name:""30:"crucible","level"::429130}
2504 {,"time":"""2023-09-22T23:08:02.550842181Zpid"",:,4291""msg"}time"
2505 hostname"::""{2023-09-22T23:08:02.550838095Z"",msg"":[1] Transition from WaitActive to WaitQuorumhostname"":"ip-10-150-1-74.us-west-2.compute.internal:"",,""ip-10-150-1-74.us-west-2.compute.internal"v,""pid":4291}
2506 ":[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active New New ds_transition to WaitActive"0,",{"namev"":"0:msg,"":name""":"pid[0] Transition from New to WaitActivecrucible""crucible","level":,"30level":30":4291,"v"}:
2507 ,0"time",:""name":"crucible2023-09-22T23:08:02.550912176Z"",","level":{hostname30":","time":"ip-10-150-1-74.us-west-2.compute.internal"",2023-09-22T23:08:02.550913553Z"msg,"hostname":"ip-10-150-1-74.us-west-2.compute.internal"",:"",pid"":[2] Transition from WaitQuorum to Active4291"}
2508 time":"2023-09-22T23:08:02.550931076Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2509 {"msg":"[0] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{"msg",:""v":,0"time"",:"[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active WaitQuorum New ds_transition to Active"name":"crucible"2023-09-22T23:08:02.550979658Z","",level"":,hostname"30":v""ip-10-150-1-74.us-west-2.compute.internal",":pid":04291,"}name":pid":4291"crucible"}
2510 ,"level":30,"{time"msg":"":[1] Transition from New to WaitActive"","v2023-09-22T23:08:02.551008026Z"":0,,"
2511 name"":"hostname"crucible"{,:","levelmsg""::30""[0] Transition from WaitActive to WaitQuorum","v":0time",:""name":"crucible2023-09-22T23:08:02.551027926Z"","level,"":30hostname"",:""time":ip-10-150-1-74.us-west-2.compute.internal""ip-10-150-1-74.us-west-2.compute.internal",",2023-09-22T23:08:02.551050104Z""pid,pid""::42914291}
2512 ,"hostname"{:"ip-10-150-1-74.us-west-2.compute.internal""msg},""
2513 :time":"2023-09-22T23:08:02.551061933Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":4291msg":}"
2514 6220a0c3-d619-4944-bb16-c06d6f7a401f is now active with session: a4be66d9-994e-49cf-8c96-b2599509ea94"{,"v":"0msg":","name":"crucible","[0] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) WaitQuorum New New ds_transition to Active"level",":v":030,"name":"crucible","level":30"[1] Transition from WaitQuorum to Active----------------------------------------------------------------
2515 ,"",time"pid:" Crucible gen:0 GIO:true work queues: Upstairs:6 downstairs:6
2516 "GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
2517 2023-09-22T23:08:02.551129591Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291 1 AckReady 1000 Write 1 Done Done Done false
2518 }
2519 2 NotAcked 1001 Read 1 Done Done Done false
2520 3 NotAcked 1002 WriteU 1":4291}
2521 ,"{v":0"msg":,""name":"crucible","level":[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active WaitActive New ds_transition to WaitQuorum"30,"v":0,"name":"crucible","level":30"time":"2023-09-22T23:08:02.551127669Z","time":, Done Done" Done,2023-09-22T23:08:02.551211437Z"","time"hostname false
2522 4 AckReady 1003 Write 1" Newhostname":"{ip-10-150-1-74.us-west-2.compute.internal"",msg":""pid":[0] Transition from WaitQuorum to Active"4291,"v":0,"}name"
2523 ::"crucible"","level":302023-09-22T23:08:02.551208531Z"{ Skip New, false"msg":"
2524 ",":"time":"2023-09-22T23:08:02.551308397Z""ip-10-150-1-74.us-west-2.compute.internal" 5 NotAcked 1004 Read 1 New Skip[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active Active Active ds_transition to Faulted","hostnamepid",:"4291":"}v"
2525 :ip-10-150-1-74.us-west-2.compute.internal"0,,""{name":pid""":crucible"4291msg":","}level"[1] Transition from WaitActive to WaitQuorum": New30, false
2526 ,"hostname": 6 NotAcked 1005 WriteU 1 New,
2527 {"msg":"[2] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active Active New ds_transition to WaitActive","v":0 Skip, New" falsename":
2528 "v":0, STATES DS:0 DS:1 DS:2 TOTAL
2529 New "crucible"","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
2530 {"msg":""[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active New New ds_transition to WaitActive","v":0time,""name"::","crucible"2023-09-22T23:08:02.551368709Z" 6 ,","level":hostname"30:" 3 6 ip-10-150-1-74.us-west-2.compute.internal" 15
2531 Sent "time":","time"2023-09-22T23:08:02.551436779Z":,"hostname"":"2023-09-22T23:08:02.551463329Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid"::"4291}
2532 ip-10-150-1-74.us-west-2.compute.internal","pid":"4291name}"
2533 :" 0 crucible{","level"":msg":30" 0 0 0
2534 Done 0 { 0 0 , 0
2535 Skipped ,"msg":"[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30 0 3 0 3
2536 "pid":4291 Error 0 0 [1] Transition from New to WaitActive 0 " 0
2537 ,Last Flush: ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.551589278Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2538 {"msg":"[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active WaitActive New ds_transition to WaitQuorum"0 ,0 "time":"0
2539 Downstairs last five completed:"time":"2023-09-22T23:08:02.551507331Z2023-09-22T23:08:02.55154613Z"","hostname":","ip-10-150-1-74.us-west-2.compute.internal"hostname,}""pid
2540 :":"4291}
2541 {
2542 {"msg":""msg":"[1] Transition from Active to Faulted","v":0,""name"v[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active WaitQuorum New ds_transition to Active"":Upstairs last five completed: ,:"
2543 0,"v":crucible0","name":",crucible"",level""level"::3030"name":"crucible","level":30ip-10-150-1-74.us-west-2.compute.internal","time":,"",time"2023-09-22T23:08:02.551742237Z"":"pid":,2023-09-22T23:08:02.551746509Z"",hostname,"4291""timehostname:"":"}"ip-10-150-1-74.us-west-2.compute.internal"
2544 ,ip-10-150-1-74.us-west-2.compute.internal""pid",:":pid4291":"}4291
2545 }{
2546 {"msg":{""msg"":"msg":"[1] Transition from WaitQuorum to Active"[1] Transition from WaitActive to WaitQuorum,""v,"":v"0:[2] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active Active WaitActive ds_transition to WaitQuorum"0,",",name""name:"":v""crucible"crucible,"":level,0"":,30level"":name"30:"crucible","level":30,"time":,""time":"2023-09-22T23:08:02.551811011Z","2023-09-22T23:08:02.551813609Z"hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"",:"4291pid":}4291
2547 }
2548 {2023-09-22T23:08:02.551742673Z,"""{timemsg"":msg"",""::hostname""":"[2] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active Active New ds_transition to WaitActive"2023-09-22T23:08:02.551817566Z","ip-10-150-1-74.us-west-2.compute.internal,""[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active WaitQuorum New ds_transition to Activehostname",""v,"":v:"0:,0",name""name:""crucible:""pid"":,crucible"4291"level,"":level30":}30ip-10-150-1-74.us-west-2.compute.internal"
2549 ,,"",timetime""::"""{pid"2023-09-22T23:08:02.551869234Z2023-09-22T23:08:02.551867573Z"":,,"4291"hostname"":msg""hostname"}:ip-10-150-1-74.us-west-2.compute.internal:"
2550 "","pid"[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active Faulted Active ds_transition to LiveRepairReady":ip-10-150-1-74.us-west-2.compute.internal"4291,",pid}"
2551 :{4291{"}v""
2552 msg""::"{0msg","[2] Transition from New to WaitActive"":",msg""v:"":name"0:"[2] Transition from WaitActive to WaitQuorum"crucible",[1] Transition from WaitQuorum to Active",,""","vv""name:"0:,""namecrucible""::level",:""level"crucible:"30,"0level30",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:02.551945492Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.551950294Zpid"":,4291"hostname}"
2553 :"{ip-10-150-1-74.us-west-2.compute.internal"",,msg""pid:"":",4291"time"time":}[2] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active Active WaitActive ds_transition to WaitQuorum
2554 "":,{""2023-09-22T23:08:02.551948544Z""v"msg:"0:,,""2023-09-22T23:08:02.551954107Z""hostname"name",::"[2] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active Active New ds_transition to WaitActive"""crucible,"",v"ip-10-150-1-74.us-west-2.compute.internal"hostname"level:,""::030,"""name":pid"":ip-10-150-1-74.us-west-2.compute.internal"crucible4291",",pid"}",level""
2555 :time:"30:"4291}2023-09-22T23:08:02.552002631Z"{
2556 ,",""msgtimehostname"{:"":"":""2023-09-22T23:08:02.552020306Z"ip-10-150-1-74.us-west-2.compute.internal",,""[1] Transition from Faulted to LiveRepairReady"pidhostname""::4291"msg,""}ip-10-150-1-74.us-west-2.compute.internal
2557 ":,{""pid""msg:"4291:v"}"
2558 [2] Transition from WaitActive to WaitQuorum"[2] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active Active WaitQuorum ds_transition to Active":{,0"",msg""vv""::"0:,,""name[2] Transition from New to WaitActive"":0,,"""vcrucible"":name0,,"""name:"namelevel""::"30crucible"crucible"",":level,""":level"30:,30"crucible"time":","level":2023-09-22T23:08:02.552083104Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",","pid"time:":4291"}
2559 2023-09-22T23:08:02.552092085Z","{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":time"4291:"}[2] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active Active WaitQuorum ds_transition to Active
2560 ",,{"""vmsg""::0",time2023-09-22T23:08:02.552091582Z""":,""name":hostname2023-09-22T23:08:02.552096834Z[2] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active Active WaitActive ds_transition to WaitQuorum"""",crucible"",v"",:"level:"0"ip-10-150-1-74.us-west-2.compute.internal:,30""name"hostname"::"",crucible"",pid"ip-10-150-1-74.us-west-2.compute.internal","leveltime""::"":302023-09-22T23:08:02.552139346Z"4291,,""}hostname
2561 pid","":time""::4291"ip-10-150-1-74.us-west-2.compute.internal"{2023-09-22T23:08:02.55215359Z,""}pid,"":
2562 "hostname4291":}"
2563 ip-10-150-1-74.us-west-2.compute.internal"{msg"{,":"pidmsg""::"4291""msg"[2] Transition from WaitQuorum to Active}":,
2564 ""v"[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active LiveRepairReady Active ds_transition to LiveRepair":{0[2] Transition from WaitQuorum to Active"",msg"",name:""",v":[2] Transition from WaitActive to WaitQuorum:"",0",vcrucible""v,"":""level0",:name"":name30"0::"",cruciblecrucible"""name,,""level,"":time30"level":""::"302023-09-22T23:08:02.552214305Z"crucible",","hostname"level",:"":time":"30ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.552225558Z,""pid,"":4291hostname":"}
2565 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
2566 ",msg{"":""time"msg":":"2023-09-22T23:08:02.552230633Z"4e3cf648-2b3b-4d4d-8069-b689dc174a75 is now active with session: e740b9e2-acbd-4260-8e40-b3b2459d8372",,"",v""hostname"[2] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active Active WaitQuorum ds_transition to Active:"0:time,,"""namev"""::0":crucible,"",ip-10-150-1-74.us-west-2.compute.internal"name"""2023-09-22T23:08:02.552238062Z",:,"level"crucible"":hostname"30,:"""pidlevel"":ip-10-150-1-74.us-west-2.compute.internal:"429130,"},
2567 pid"",:time"4291time""::""}{
2568 "2023-09-22T23:08:02.552280733Z2023-09-22T23:08:02.552288301Z"",msg","":{hostnamehostname""::""""msg":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::4291fce41f2d-d98f-4004-81f1-42f5f6597ea7 is now active with session: 76c19d0f-1c30-4091-9c4d-921b4ab004584291}"
2569 },
2570 {"[1] Transition from LiveRepairReady to LiveRepair"v""{msg",":msg""":v"::"00,"[2] Transition from WaitQuorum to Active[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active Active Active ds_transition to Faulted""name,,"",""namevv""::00,,"""namename"::"":""cruciblecrucible"":,,""level"cruciblecrucible"level""::3030",","level"level"::3030,,""timetime""::""2023-09-22T23:08:02.552369459Z2023-09-22T23:08:02.552369576Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
2571 
2572 ,{,{""""msgmsg"":time""::""time":"2023-09-22T23:08:02.552374457Z"[1] Transition from Active to Faulted"278316f9-9926-4cf9-908b-c5ecd8db3aac is now active with session: 504852f5-4e78-4bd4-9def-dcf505c09e8e",,""vv""::0,,0",name""name":":hostname"2023-09-22T23:08:02.552374589Z""":,"cruciblecrucible"",,""level"level:"30":hostname":ip-10-150-1-74.us-west-2.compute.internal""30,"pid":ip-10-150-1-74.us-west-2.compute.internal"4291,"pid":}4291
2573 ,,""timetime""::""}{
2574 2023-09-22T23:08:02.552423047Z2023-09-22T23:08:02.552426814Z""",,""msg"hostnamehostname""::""{:"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid""pid:"4291:msg"4291}:}
2575 
2576 {"[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active Active Active ds_transition to Faulted""{msg",":msg""":"[0] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Active LiveRepair Active ds_transition to Faulted"v":,"0v",:"0[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active Active Active ds_transition to Faulted[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active Faulted Active ds_transition to LiveRepairReady"",,,"""vvname""::00"name"::,,""name"name:"":""cruciblecrucible"crucible","",level"crucible"":"30,,""level"level:"30:level"30:30,,""timetime""::"","2023-09-22T23:08:02.552503334Z2023-09-22T23:08:02.552502151Z""time,,"",hostnamehostname"":"":"time":ip-10-150-1-74.us-west-2.compute.internal"":,"""pidip-10-150-1-74.us-west-2.compute.internal"":,4291"2023-09-22T23:08:02.552504549Z}pid
2577 "2023-09-22T23:08:02.552499948Z""{:,4291""}msghostname","
2578 ::""{"hostname":""ip-10-150-1-74.us-west-2.compute.internalmsg[1] Transition from Active to Faulted""ip-10-150-1-74.us-west-2.compute.internal":,"",v""[1] Transition from Faulted to LiveRepairReady:"0,pid",v""name:"0:,"""",:namecrucible"",:"""levelcrucible"":,30"4291level"pid":}:
2579 4291,30"time":}"{
2580 "2023-09-22T23:08:02.55257189Z"msg,,"""timehostname""::"":{"2023-09-22T23:08:02.552581626Zip-10-150-1-74.us-west-2.compute.internal"",,""pid"hostname:"4291:"[1] Transition from Active to Faulted""}ip-10-150-1-74.us-west-2.compute.internal
2581 "msg,{",":""pidmsg""::4291"[0] Transition from Active to Faulted"}"
2582 ,"vv""{[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active Faulted Active ds_transition to LiveRepairReady":,":msg""00:v"",:"0name",:,"[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active LiveRepairReady Active ds_transition to LiveRepair""name",:""v"crucible:"0,,""name"cruciblelevel""name"::"30crucible"",:",level"":level"30:"30crucible",",time"":level"":302023-09-22T23:08:02.552643459Z",,""time"hostname:"":"2023-09-22T23:08:02.552650352Z",ip-10-150-1-74.us-west-2.compute.internal"",hostname"":pid"":4291}
2583 ip-10-150-1-74.us-west-2.compute.internal,,"{"","timemsg""pid"time":"":::2023-09-22T23:08:02.552657773Z"4291""},
2584 [1] Transition from Faulted to LiveRepairReady""2023-09-22T23:08:02.552652134Zhostname,{""v""msg:"0:,""",:name[1] Transition from LiveRepairReady to LiveRepair"":,"""cruciblev"",:"0"hostname,level""":ip-10-150-1-74.us-west-2.compute.internal:name""30:""ip-10-150-1-74.us-west-2.compute.internal",crucible,"",pid"""level"pid:",:"304291time":}:4291
2585 "},
2586 2023-09-22T23:08:02.552713054Z""time",:""{hostname":"2023-09-22T23:08:02.552724386Z"{,"ip-10-150-1-74.us-west-2.compute.internalhostname"",:""pid"""msg:ip-10-150-1-74.us-west-2.compute.internal4291""msg",}"
2587 pid:"{::"4291""msg}"
2588 :"{[2] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Faulted LiveRepair Active ds_transition to Faulted""[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active Faulted Active ds_transition to LiveRepairReady"msg",[1] 278316f9-9926-4cf9-908b-c5ecd8db3aac (504852f5-4e78-4bd4-9def-dcf505c09e8e) Active LiveRepairReady Active ds_transition to LiveRepair,:"",""Write to Extent 0:0:9 under repair""v",:v"0",v"":name:"0:,""v"crucible0name:"",:,""level"crucible:"30,0",level"":name"40:"crucible","level":30,"time":"2023-09-22T23:08:02.552788191Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.552792721Z,"",pid"":hostname4291":"}
2589 ip-10-150-1-74.us-west-2.compute.internal""{,"pid"":,"msg4291"name"time":::}"
2590 "[1] Transition from LiveRepairReady to LiveRepair"{"crucible",""msgv""::0",,2023-09-22T23:08:02.552796235Z""Write to Extent 0:0:9 under repairname"",:""v"crucible:,0",,"""levelname"""::30"level"hostname"crucible":,:""30level":40ip-10-150-1-74.us-west-2.compute.internal","pid,"":time":4291"}
2591 ,2023-09-22T23:08:02.552840179Z""time",:""hostname":"2023-09-22T23:08:02.552847131Z"{,"ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":"4291msg",}ip-10-150-1-74.us-west-2.compute.internal
2592 "","timepid{":":"4291"}msg
2593 ":[1] Transition from Faulted to LiveRepairReady{:"""msg""2023-09-22T23:08:02.552846792Z",[1] 0 final dependency list []:""",v",:[1] client skip 3 in process jobs because fault"""v,""hostname"0:v:"0:,0",name"",:name""""crucible:"name","crucible:ip-10-150-1-74.us-west-2.compute.internal"",""crucible""level,,"pid"level"""level:"30::30:429130}
2594 ,",time"":time"":"2023-09-22T23:08:02.55291739Z"2023-09-22T23:08:02.552918617Z,""{hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal""ip-10-150-1-74.us-west-2.compute.internalmsg","",pid"":pid4291":",:,4291""[2] Transition from Active to Faulted"time",:"",:""":"v"downstairs""downstairs":}}
2595 
2596 {02023-09-22T23:08:02.552920144Z",",""name":msg{"hostname":"msg""":[1] job 1000 middle: AckReady""crucible,:[1] 0 final dependency list []""v",:"0v,"":""0name,"":,ip-10-150-1-74.us-west-2.compute.internal"name""level"":crucible"",crucible,"""pid:",level""level::"4291:303030}
2597 {,,"""timetime""::""msg":"2023-09-22T23:08:02.552998024Z2023-09-22T23:08:02.552997908Z"",,""hostnamehostname""::"","time"[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active LiveRepairReady Active ds_transition to LiveRepair:ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid"pid:"4291:"4291,",""2023-09-22T23:08:02.5529991Z""::"",""downstairsdownstairs"",}}hostname"":
2598 
2599 {"v":"{0ip-10-150-1-74.us-west-2.compute.internal","msg"msg:":""",name"":[1] changed 3 jobs to fault skippedpid"""[1] 0 final dependency list []",,""vv""::00,,""crucible":namename""::""4291,cruciblecrucible"","},""level"level:"30:level"
2600 :3030,"time"{:"2023-09-22T23:08:02.553073971Z",""hostname":","msg"time"::ip-10-150-1-74.us-west-2.compute.internal""","2023-09-22T23:08:02.55308023Z"pid":,"4291hostname",:"""Write to Extent 0:0:9 under repair",ip-10-150-1-74.us-west-2.compute.internal,:"",""downstairspid"":"}time"
2601 4291:v",{":"""msg:""2023-09-22T23:08:02.553080944Z"downstairs:""}0
2602 ,,""name"[1] 4e3cf648-2b3b-4d4d-8069-b689dc174a75 (e740b9e2-acbd-4260-8e40-b3b2459d8372) Active LiveRepair Active ds_transition to Faulted":,hostname"""crucible":v":,""0level,"ip-10-150-1-74.us-west-2.compute.internal""name",:":"crucible"40pid",:"4291level":30}
2603 ,"{time":","2023-09-22T23:08:02.553143692Z"time,"""msg"::hostname""":"2023-09-22T23:08:02.553140658Z",[1] Transition from LiveRepairReady to LiveRepair"ip-10-150-1-74.us-west-2.compute.internal"",hostname""pid"::"4291,"v":ip-10-150-1-74.us-west-2.compute.internal"}0,,
2604 ""pid"name"::4291{"crucible"}",
2605 "msg"level":":30{[1] Transition from LiveRepair to Faulted","v":0","msg"name:":""crucible","levelWrite to Extent 0:0:9 under repair"":,"30v":0,"name":"crucible","level":,"40time":"2023-09-22T23:08:02.55319907Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time"}:"
2606 2023-09-22T23:08:02.553210278Z","hostname",:""{time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.553216017Z""pid":,msg""4291:hostname"}:
2607 ""Write to Extent 0:0:9 under repair"ip-10-150-1-74.us-west-2.compute.internal"{,,""pid":"4291v"msg"}::"
2608 Extent 0 Aborting repair"0,",v":"{name"0:,"""name"msg":":crucible"",crucible""[1] client skip 3 in process jobs because fault,level""level""::,4040"v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:02.553282532Z"time":","hostname":"2023-09-22T23:08:02.553282931Z",",time":ip-10-150-1-74.us-west-2.compute.internal""",hostname"":"2023-09-22T23:08:02.5532883Z"pid":4291ip-10-150-1-74.us-west-2.compute.internal","},
2609 hostname":""pid":4291{ip-10-150-1-74.us-west-2.compute.internal","}pid""
2610 msg"::"4291,Extent 0 Create and send noop jobs""{,"":v""":downstairs0",msg}"
2611 :""name":"Write to Extent 0:0:9 under repair"crucible"{,,""v""levelmsg":"0:,:40""name":"[1] job 1000 middle: AckReady"crucible",,""v"level:":040,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.553362379Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291time":"}
2612 ,"2023-09-22T23:08:02.553369719Z"time":","{test live_repair::repair_test::test_repair_dep_cleanup_done ... 2023-09-22T23:08:02.553373652Z"hostname":,"""msg"hostname"ip-10-150-1-74.us-west-2.compute.internal"::ok""ip-10-150-1-74.us-west-2.compute.internalAbort repair on extent 0 starting with id 1003 deps:[JobId(1002), JobId(1001), JobId(1000)]",
2613 ,"v""":,pid0",pid"":"4291name":":crucible,""":","4291downstairs"level":}}
2614 
2615 {40{"msg":""[1] notify = true for 1001"msg":","v":0,[1] client skip 3 in process jobs because fault,"""time":,"name":v""crucible:"0",,""2023-09-22T23:08:02.55344461Zlevel"":,"name30hostname":":""crucible","level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
2616 ,"time":"2023-09-22T23:08:02.553468431Z",","time"hostname:"":"2023-09-22T23:08:02.553474358Z"ip-10-150-1-74.us-west-2.compute.internal,"","hostname"pid:"":4291,"ip-10-150-1-74.us-west-2.compute.internal"":,""pid"downstairs:"4291}
2617 ,"":"downstairs"}
2618 {"{msg":"[1] notify = true for 1002"",msg"":v"":0[1] job 1000 middle: AckReady",",name"":v"":crucible"0,,""name"level"::"30crucible","level":30,,""time":"time":"2023-09-22T23:08:02.55354077Z"2023-09-22T23:08:02.553538593Z","hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal:"4291,"pid,":"4291":","downstairs"":"}
2619 downstairs"}
2620 {"{msg":""[1] changed 3 jobs to fault skipped"msg":","v":0[1] changed 3 jobs to fault skipped",","namev""::"0crucible",","namelevel""::30"crucible","level":30,,""time":time":""2023-09-22T23:08:02.55360386Z"2023-09-22T23:08:02.553601089Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid""pid"::42914291,,""":"":"downstairs"downstairs"}
2621 }
2622 {"msg":"{"msg":"[1] 6220a0c3-d619-4944-bb16-c06d6f7a401f (a4be66d9-994e-49cf-8c96-b2599509ea94) Faulted LiveRepair Faulted ds_transition to Faulted","v":0,"name":"crucible","[1] fce41f2d-d98f-4004-81f1-42f5f6597ea7 (76c19d0f-1c30-4091-9c4d-921b4ab00458) Active LiveRepair Active ds_transition to Faulted"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.553663312Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:08:02.553668636Z":4291,"hostname"}:
2623 "ip-10-150-1-74.us-west-2.compute.internal","pid"{:4291}
2624 "msg":"{[1] Transition from LiveRepair to Faulted","v":"0msg",:""name":"crucible"[1] Transition from LiveRepair to Faulted",","level":v":300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.553724737Z,"","time":hostname"":"2023-09-22T23:08:02.553728729Z"ip-10-150-1-74.us-west-2.compute.internal,"","hostname":pid"":4291}ip-10-150-1-74.us-west-2.compute.internal"
2625 ,"pid":4291}{
2626 test live_repair::repair_test::test_repair_abort_reserved_jobs ... "msgok{
2627 "":"msg":"Extent 0 Aborting repair"Extent 0 Aborting repair",",v""v":0,"name":"crucible","level":40,"time":"2023-09-22T23:08:02.553791812Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2628 {"msg":"Crucible stats registered with UUID: b0e3c5c3-cd28-4727-a05c-bad7123639b4","v"::0,0"name":,""crucible"name",":"level":crucible"30,"level":40,"time":","time"2023-09-22T23:08:02.553869769Z":","hostname2023-09-22T23:08:02.553871771Z"":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal":,"4291pid":4291}
2629 }
2630 {"msg":"{"msg":"Crucible b0e3c5c3-cd28-4727-a05c-bad7123639b4 has session id: 3db7f790-4d53-4e7a-ade8-e826a5d38e48","v":0Abort repair on extent 0: All downstairs are Faulted",",name"":v"":crucible"0,,""name":level"":crucible"30,"level":50,"time":"2023-09-22T23:08:02.553936916Z",,""time":"hostname":"2023-09-22T23:08:02.553938703Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":"pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
2631 ,"pid":4291{}
2632 "msg":"[0] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.553988783Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2633 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30test live_repair::repair_test::test_repair_abort_basic ... ok
2634 ,"time":"2023-09-22T23:08:02.554030144Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
26352023-09-22T23:08:02.554ZINFOcrucible: [0] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) WaitActive New New ds_transition to WaitQuorum
26362023-09-22T23:08:02.554ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
26372023-09-22T23:08:02.554ZINFOcrucible: [0] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) WaitQuorum New New ds_transition to Active
26382023-09-22T23:08:02.554ZINFOcrucible: [0] Transition from WaitQuorum to Active
2639 {"msg":"test live_repair::repair_test::test_repair_abort_all_failed_reserved_jobs ... [1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active New New ds_transition to WaitActiveok"
2640 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554245758Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2641 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level{":30"msg":"Crucible stats registered with UUID: a67c3619-5a95-4827-9e5d-6fcdacafd685","v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.55428634Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2642 {"msg":","time":"2023-09-22T23:08:02.554302254Z"[1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active WaitActive New ds_transition to WaitQuorum",","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",",pid"":level":429130}
2643 {"msg":","time":"Crucible a67c3619-5a95-4827-9e5d-6fcdacafd685 has session id: ea4e2b43-abd0-4b45-9a7a-b02971269ea52023-09-22T23:08:02.554336473Z"",",v":"0hostname,"":"name":"crucible","ip-10-150-1-74.us-west-2.compute.internal"level":,"30pid":4291}
2644 {,"time":""msg":"2023-09-22T23:08:02.554365936Z","[1] Transition from WaitActive to WaitQuorumhostname":"","v":ip-10-150-1-74.us-west-2.compute.internal"0,","pidname""::4291"crucible"},
2645 "level":30{"msg":"[0] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554404672Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2646 ,"time":{"2023-09-22T23:08:02.554421125Z"","msg"hostname":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2647 [1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active WaitQuorum New ds_transition to Active","v":0,"{name":"crucible","level"":msg"30:"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554459478Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid"":time4291":"}
2648 2023-09-22T23:08:02.554467216Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid"":msg"4291:"}
2649 [1] Transition from WaitQuorum to Active","v":0,"{name":"crucible",""levelmsg"":":30[0] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554511435Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291},
2650 "time":"{2023-09-22T23:08:02.554521353Z",""hostname"msg"::""ip-10-150-1-74.us-west-2.compute.internal","pid":4291}[2] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active Active New ds_transition to WaitActive"
2651 ,"v":0,"name":"{crucible","level":30"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554563299Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2652 ,"time":"{2023-09-22T23:08:02.554572026Z","hostname"":msg"":"[2] Transition from New to WaitActiveip-10-150-1-74.us-west-2.compute.internal"",",pid"":v4291":0},
2653 "name":"crucible","level{":30"msg":"{"msg"[0] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) WaitQuorum New New ds_transition to Active":","v":0,"name":"Crucible stats registered with UUID: f238b930-a5e0-4425-a639-165303326233crucible"","level,,""time""v":0,"name":"crucible","level":30:"2023-09-22T23:08:02.554613521Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2654 {"msg":","time":"2023-09-22T23:08:02.554635552Z","hostname":"[2] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active Active WaitActive ds_transition to WaitQuorum","v"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2655 :{30"msg":"Crucible f238b930-a5e0-4425-a639-165303326233 has session id: ea6a7783-cfdd-4381-839d-ebe9eec602ed","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554673341Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}"
2656 time":"{2023-09-22T23:08:02.554685028Z",""hostname"msg:"":"ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from WaitQuorum to Active",","pid"v:":42910,"}
2657 {"msg":":0,"name":"crucible","level"[0] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) New New New ds_transition to WaitActive","v":0,"name":"crucible","namelevel""::"30crucible","level":30,,""timetime""::""2023-09-22T23:08:02.554729383Z2023-09-22T23:08:02.554727364Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2658 {:30"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.554761074Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid:"":4291"}2023-09-22T23:08:02.554768817Z"
2659 ,,""{hostname":"hostname""msg"ip-10-150-1-74.us-west-2.compute.internal":"ip-10-150-1-74.us-west-2.compute.internal",",pid""pid"::42914291}
2660 :"{[2] Transition from WaitActive to WaitQuorum"","msg"v:"":0,"name":"crucible","level":30[0] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) WaitActive New New ds_transition to WaitQuorum","}v":0
2661 {,"time":"2023-09-22T23:08:02.554811544Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2662 {"msg":"[2] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30"msg":"[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active New New ds_transition to WaitActive","v":0,,""name":name,"":"time"crucible"":crucible","","level"2023-09-22T23:08:02.554905557Zlevel":"30,:"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2663 {,"time"":msg"":"2023-09-22T23:08:02.554929777Z",[2] Transition from WaitQuorum to Active""hostname,"":"v":0,"name"ip-10-150-1-74.us-west-2.compute.internal:"","cruciblepid"":,"4291level":}30
2664 ,"time":"{2023-09-22T23:08:02.55493174Z"",msg"":"hostname":","time"[0] Transition from WaitActive to WaitQuorum:""ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:08:02.554951994Z""v":,0",,""hostnamename""::""pid":crucible"ip-10-150-1-74.us-west-2.compute.internal,"",4291level"":pid"30:4291}
2665 }
2666 ,"time":{{""2023-09-22T23:08:02.554975317Zmsg"":",""hostname"msg":":"b0e3c5c3-cd28-4727-a05c-bad7123639b4 is now active with session: 3c586b7d-7750-46c5-8dd5-c7e1fc78978a","ip-10-150-1-74.us-west-2.compute.internal[1] Transition from New to WaitActive""v",,""pid:"0:v"4291,":name}"0:
2667 ","crucible"{name":",""crucible"msglevel""::30","level":30[0] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible",","time"level:"":302023-09-22T23:08:02.555009776Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2668 ,","time"{:time"""msg"::2023-09-22T23:08:02.555020899Z""","hostname":2023-09-22T23:08:02.555012098Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal[1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active Active Active ds_transition to Faulted"",","pid"v:"4291:ip-10-150-1-74.us-west-2.compute.internal"}0,,
2669 ""name"pid":{":crucible""4291,msg"":level""}:
2670 [0] Transition from WaitQuorum to Active30","v":0,"name":"{crucible","level":30"msg":","time":"2023-09-22T23:08:02.555059959Z","[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active WaitActive New ds_transition to WaitQuorum"hostname":",","time"v:ip-10-150-1-74.us-west-2.compute.internal""",2023-09-22T23:08:02.555065316Z"":,pid"":hostname4291"0:}"
2671 ip-10-150-1-74.us-west-2.compute.internal",,"{"name"pid""::msg4291"":}"
2672 [1] Transition from Active to Faultedcrucible""{,,"""level"msgv""::"0:,"30name":"crucible","level":30[1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.555106996Z",","time,hostname""::"""time":2023-09-22T23:08:02.555111999Z"ip-10-150-1-74.us-west-2.compute.internal"",,""2023-09-22T23:08:02.555104233Z"pidhostname""::4291",}"
2673 ip-10-150-1-74.us-west-2.compute.internal"hostname"{,:"""pidmsg""::"4291ip-10-150-1-74.us-west-2.compute.internal"},
2674 "pid":{[1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active Faulted Active ds_transition to LiveRepairReady4291""msg",:""v}"
2675 [1] Transition from New to WaitActive:"0,,""v":name"0:","cruciblename"":,""{levelcrucible"":,30"level":30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible",,""time"time,"::"""level":2023-09-22T23:08:02.555156146Z2023-09-22T23:08:02.555157254Z""30,,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
2676 
2677 {"{msg":""msg":"[1] Transition from Faulted to LiveRepairReady","v":,0","time":[1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active WaitActive New ds_transition to WaitQuorumname"":","crucible""v",:"02023-09-22T23:08:02.555172631Zlevel,"":"30name":","crucible"hostname",:""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291},
2678 ,""timetime""::""2023-09-22T23:08:02.555198398Z2023-09-22T23:08:02.555203362Z""{,,""hostname"hostname:"":""msg"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"":,,""pidpid":"4291:"}4291
2679 }
2680 {[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active WaitQuorum New ds_transition to Active""{,msg""":msg"":"v":0,[1] Transition from WaitActive to WaitQuorum"",name""v"::[1] b0e3c5c3-cd28-4727-a05c-bad7123639b4 (3c586b7d-7750-46c5-8dd5-c7e1fc78978a) Active LiveRepairReady Active ds_transition to LiveRepair0",",""crucible"namev""::"0,crucible,"",""namelevel"level":"::"3030crucible","level":30,"time":","time2023-09-22T23:08:02.55526151Z"":","hostname":2023-09-22T23:08:02.555263922Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal",}"
2681 pid",:{"4291"time"msg}"
2682 :":"{2023-09-22T23:08:02.555262513Z""msg":","hostname":"[1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active WaitQuorum New ds_transition to Active"[1] Transition from LiveRepairReady to LiveRepair",","vv""::00,,""ip-10-150-1-74.us-west-2.compute.internal"namename""::"",cruciblecrucible"",,"""levellevel""::3030pid":4291}
2683 ,,""timetime""::""{2023-09-22T23:08:02.555309599Z2023-09-22T23:08:02.555309376Z"",,"""hostnamehostname""::""msg":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",",pid""pid:"4291:[1] Transition from WaitQuorum to Active}"
2684 4291,"v}{
2685 ""msg:{":0"",msg"":[1] Transition from WaitQuorum to Activename""":","Write to Extent 0:1:9 under repairv""crucible":,",v0","":level"name:"030,:""name"crucible:"","crucible"level",:"30level":40,,""timetime""::""2023-09-22T23:08:02.555360428Z2023-09-22T23:08:02.555359687Z"",,""hostnamehostname""::"","timeip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal""",,""pidpid""::42914291:"}}
2686 
2687 {2023-09-22T23:08:02.555356841Z"{{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid"msg""msg":"::"4291}
2688 Crucible stats registered with UUID: 47522d2c-b387-4d7d-a896-16a922aa39ba{","v":0,"name":"crucible","level":30[2] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time,":"""time2023-09-22T23:08:02.555432394Zmsg""":,"":"hostname":"2023-09-22T23:08:02.555423295Z"ip-10-150-1-74.us-west-2.compute.internal,"hostname"[2] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active Active New ds_transition to WaitActive":","v":ip-10-150-1-74.us-west-2.compute.internal"0,","pid"name":4291}
2689 {:"crucible""msg",":"level":"30,"pid":4291Crucible 47522d2c-b387-4d7d-a896-16a922aa39ba has session id: 05d7f4c3-b11e-4eed-a687-3a04e8a5e796"},"
2690 v":0,"name":"crucible"{,"level":30"msg":"[2] Transition from New to WaitActive","v":0,"name":",""crucible"time,msg""level"::"30":"Write to Extent 0:1:9 under repair"2023-09-22T23:08:02.555489878Z",",v":"0,,""time":"2023-09-22T23:08:02.555503876Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time",:""pid":42912023-09-22T23:08:02.555515227Z","}hostname
2691 {"msg":"":"ip-10-150-1-74.us-west-2.compute.internal"name",:""[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) New New New ds_transition to WaitActive"pidcrucible"",:,""levelv":0,"name":"":crucible"40,"level":304291}
2692 {,"time":","2023-09-22T23:08:02.555562656Ztime":"2023-09-22T23:08:02.555564084Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"hostname:"4291:"}
2693 {"msg":"[0] Transition from New to WaitActiveip-10-150-1-74.us-west-2.compute.internal"","v",":"0,,pid""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2694 {"msg":"Write to Extent 0:2:9 under repair","v":0,"name":"crucible","level":40"msg":"[2] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active Active WaitActive ds_transition to WaitQuorum","v":,0","time"name:"":name"2023-09-22T23:08:02.5556352Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2695 {"msg":""Write 1:0:9 past extent under repair 0":,"v4291":0,"name"}:
2696 ":"crucible"crucible"{""crucible"msg":","level":[2] Transition from New to WaitActive"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.55569827Z",","hostname":"level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2697 ,{""msg":"time":"[2] Transition from WaitActive to WaitQuorum"2023-09-22T23:08:02.555704878Z","v":,0,,""name"level:"":,40crucible"",time":"2023-09-22T23:08:02.55571875Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2698 {"msg":","time":""level2023-09-22T23:08:02.555741178Z"":30[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) WaitActive New New ds_transition to WaitQuorum",","hostname"v":":0,"nameip-10-150-1-74.us-west-2.compute.internal"":""crucible",","timelevel"":,30"pid":4291}
2699 {"msg":"IO Write 1013 on eur 0 Added deps 1","v":,0","time"name:":2023-09-22T23:08:02.55579352Z"hostname",:""hostname":""ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.555773116Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}"
2700 :"crucible","level":40{"msg":"[2] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active Active WaitQuorum ds_transition to Active",",pid"":v4291}
2701 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":""crucible"pid",":level"4291:30}
2702 {":0","msg":name":""crucible","level":30,"[2] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active Active WaitActive ds_transition to WaitQuorumtime"":","v":02023-09-22T23:08:02.555884256Z","name,",:"""hostname"time":"2023-09-22T23:08:02.555844646Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,"":time4291":"}
2703 2023-09-22T23:08:02.555900152Z","{hostname":""msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":[1] 1007 final dependency list []"4291,"v":0},
2704 ":name"":"crucible","ip-10-150-1-74.us-west-2.compute.internal{""msg":"crucible[2] Transition from WaitQuorum to Active"",,""level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.555959165Z",","pid"hostname"::"4291,"timeip-10-150-1-74.us-west-2.compute.internal"},
2705 {"msg":"[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) WaitQuorum New New ds_transition to Active","v":level0",:"30name":"crucible","level":30"pid":4291}
2706 ,"time":"2023-09-22T23:08:02.556004207Z"{",,:""time":"2023-09-22T23:08:02.556007587Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}
2707 {hostname""msg":":"ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from WaitQuorum to Active",","pid"v"2023-09-22T23:08:02.555963769Z",":hostname0:4291,"":"downstairs"}
2708 {"msg":"[1] 1008 final dependency list [JobId(1004)]","v":0,"name":"crucible","level":30","msg"name:","time":"2023-09-22T23:08:02.556092635Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
2709 {"msg":""[1] 1013 final dependency list [JobId(1008), JobId(1004), JobId(1012)]":,"":v":"0ip-10-150-1-74.us-west-2.compute.internal,"crucible",""name",pid"":4291}
2710 {""msg":[2] Transition from WaitActive to WaitQuorum"","v":0,"name":"cruciblef238b930-a5e0-4425-a639-165303326233 is now active with session: 45eb49ee-4fdc-4ffa-8697-e0375c977186"",","level"v:"30:level"0:,"30name":"crucible","level":30:"crucible",","level"time:":30"2023-09-22T23:08:02.556166399Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""timepid""::4291,"}"
2711 time,"2023-09-22T23:08:02.55616924Z{:"""2023-09-22T23:08:02.556181095Z"msg":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[2] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active Active WaitQuorum ds_transition to Active","",:""v":downstairs0","}
2712 name":"crucible","level":30,"time":"2023-09-22T23:08:02.556215827Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2713 {"time":""msg"2023-09-22T23:08:02.556172897Z":","hostname":"4291ip-10-150-1-74.us-west-2.compute.internal[1] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active New New ds_transition to WaitActive"","},pid"":4291}
2714 {"msg":"[1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30v":0,"name":"crucible","level":30
2715 ,"time":"{2023-09-22T23:08:02.556343206Z","hostname":"",msg""ip-10-150-1-74.us-west-2.compute.internal":,""timepid"":[2] Transition from WaitQuorum to Active"4291:",}"2023-09-22T23:08:02.55635025Z
2716 "v":0,,"{"msg":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.556395633Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2717 hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}
2718 [1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.556438302Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2719 {{"msg":"""msg"[1] Transition from New to WaitActive:"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.556481286Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2720 {"msg":"[1] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30name":,""timecrucible"":",[1] Transition from Faulted to LiveRepairReady2023-09-22T23:08:02.556533757Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2721 "level":{30"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,",time"":"v":2023-09-22T23:08:02.55660298Z"0,","hostname"name"::""crucibletest live_repair::repair_test::test_repair_dep_cleanup_repair ... ,","oklevel"
2722 :30"time":"2023-09-22T23:08:02.55661369Z",","time"hostname":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.556643946Z","pid":,"4291hostname":"}
2723 {"msg":"ip-10-150-1-74.us-west-2.compute.internal[1] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible"","level",:"30pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
2724 ,"pid":4291}{
2725 ","msg"time":"{2023-09-22T23:08:02.556704205Z"","msg":hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid"::4291"[1] f238b930-a5e0-4425-a639-165303326233 (45eb49ee-4fdc-4ffa-8697-e0375c977186) Active LiveRepairReady Active ds_transition to LiveRepair"}
2726 {a67c3619-5a95-4827-9e5d-6fcdacafd685 is now active with session: 29dc6a8c-0001-4949-b734-4fa4c3de49de","v":0,"name":"crucible","level":30"msg":"[1] Transition from WaitQuorum to Active","v":,"0time":","name"2023-09-22T23:08:02.556799686Z:"","crucible"hostname":","level"ip-10-150-1-74.us-west-2.compute.internal":,"v":0,"name":"crucible","level":3030,"time":"2023-09-22T23:08:02.556832004Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2727 ,"time":",2023-09-22T23:08:02.556851418Z""{,pid":""msg":"hostname":[1] Transition from LiveRepairReady to LiveRepair"","v":0ip-10-150-1-74.us-west-2.compute.internal",,"pid":4291}
2728 4291}
2729 {"msg":"[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.556909698Z"{,"hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg,"":pid":""name":"crucible","level":30[2] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"4291time":"}
2730 2023-09-22T23:08:02.556965994Z,""time{,"":"hostnamemsg"":"ip-10-150-1-74.us-west-2.compute.internal","pid":42912023-09-22T23:08:02.556975141Z"},
2731 "hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}[1] 0 final dependency list []","v":0,"
2732 {":"[1] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.557053286Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal","msg"pid":4291:"}
2733 [2] Transition from New to WaitActive","v":{0,"name"":msg":""namecrucible"",":level"[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active Faulted Active ds_transition to LiveRepairReady"",:crucible30,"time":"2023-09-22T23:08:02.557096468Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2734 {"msg":""[2] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active Active WaitActive ds_transition to WaitQuorum",",""vlevel"v"::0,"name":"crucible","level":030,"name":"crucible","level":30":30,"time":"2023-09-22T23:08:02.557155027Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time":}"
2735 {2023-09-22T23:08:02.557159392Z","{,hostname"""msgtime"":""::"2023-09-22T23:08:02.557164099Z","[1] Transition from Faulted to LiveRepairReady"hostname"",:""v"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internalmsg"",":"","pid":4291}
2736 :Crucible stats registered with UUID: ddd0b289-599b-47c8-9e88-afd221589469"0,","name"v:"":{crucible0",","name":level"":"30crucible"msg,":"[2] Transition from WaitActive to WaitQuorum"","level":v"30:0,"name":"crucible","level"pid:"30:,"4291time":",2023-09-22T23:08:02.557218759Z"","":"hostname":"downstairs","}ip-10-150-1-74.us-west-2.compute.internaltime"",:""pid
2737 2023-09-22T23:08:02.557226531Z"":,4291,"}"
2738 hostname"time"{"msg:"":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.557232588Z,""pid":,4291[1] a67c3619-5a95-4827-9e5d-6fcdacafd685 (29dc6a8c-0001-4949-b734-4fa4c3de49de) Active LiveRepairReady Active ds_transition to LiveRepair"}",
2739 "{hostname"{"msg":"":"msg":"ip-10-150-1-74.us-west-2.compute.internal"[1] 0 final dependency list [JobId(1006)]"Crucible ddd0b289-599b-47c8-9e88-afd221589469 has session id: 6162f099-e560-430c-abf9-296ea61a199b",,,""vpid":4291":0,"name}"
2740 v":0,"{name":"crucible","level"":msg"30:":"crucible","time":","2023-09-22T23:08:02.557310463Z"level",:"hostname"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2741 {"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"[2] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active Active WaitQuorum ds_transition to Active"time":","v"2023-09-22T23:08:02.557328946Z":0,","hostname"name,:"""time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.557344011Z":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"crucible:4291}
2742 ","level"{:30"msg":""pidWrite to Extent 0:1:9 under repairv""":,":v0":42910,","name"name:"",:crucible""",crucible""",level"":":level":30downstairs40"},
2743 "time":"2023-09-22T23:08:02.557377513Z","hostname":,{",ip-10-150-1-74.us-west-2.compute.internal"",""timepid":4291}
2744 "time":"2023-09-22T23:08:02.557393803Z","hostname":"{msgip-10-150-1-74.us-west-2.compute.internal"":msg"":"","pid":[1] 0 final dependency list [JobId(1007), JobId(1006)]"4291[2] Transition from WaitQuorum to Active"},
2745 ,"v":0{,"name":""crucible"msg":","level":30[1] 1004 final dependency list [JobId(1001), JobId(1002), JobId(1003)]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.557454546Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":,""time":"downstairs""2023-09-22T23:08:02.557449957Z"}v",:0,"name":"crucible"","hostnamelevel"":":"30ip-10-150-1-74.us-west-2.compute.internal":","pid":42912023-09-22T23:08:02.55739335Z,"":"downstairs""}
2746 ,"hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,""pid":":
2747 42912023-09-22T23:08:02.557485605Z"}
2748 {,"hostname":"{"msg":"ip-10-150-1-74.us-west-2.compute.internal"","msgpid[1] 1005 final dependency list [JobId(1004), JobId(1001)]"":","":v"4291:0,"}name[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) New New New ds_transition to WaitActive"":",
2749 {"msg":"crucible"",v"47522d2c-b387-4d7d-a896-16a922aa39ba is now active with session: 704620be-2e8a-473a-8f41-8d5ed8675af2level":30","v":0,"name":"crucible","level":30",":time":"02023-09-22T23:08:02.557585526Z",","hostname":name"":"crucible"ip-10-150-1-74.us-west-2.compute.internal,",,""pid":4291,"":"downstairs"}
2750 time":"2023-09-22T23:08:02.557594145Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2751 {"msg":"[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30"level":30,"time":"2023-09-22T23:08:02.557678645Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2752 {,"time"":"msg":"2023-09-22T23:08:02.557694139Z"[0] Transition from Active to Faulted",",hostname":""v":0,ip-10-150-1-74.us-west-2.compute.internal"",name""pid":"crucible","level":30:4291}
2753 ,"time":"2023-09-22T23:08:02.557734801Z"{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal""msg",":pid":4291}
2754 "{[0] Transition from New to WaitActive""msg":,""v":0,"name":"crucible","level":30[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) Faulted Active Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.557783399Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2755 test live_repair::repair_test::test_repair_dep_cleanup_some ... ,ok"
2756 time":"2023-09-22T23:08:02.557790645Z"test live_repair::repair_test::test_repair_dep_cleanup_sk_repair ... ,"ok
2757 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
2758 {"msg":""msg":"[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) WaitActive New New ds_transition to WaitQuorum"[0] Transition from Faulted to LiveRepairReady",",v":"0v",:"name0,"name":"crucible","level":30":"crucible","level":30,"time":"2023-09-22T23:08:02.557901515Z",,""hostname"time":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.557897532Z,""pid":,"4291hostname":"}
2759 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
2760 {"msg":""msg":"[0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) LiveRepairReady Active Active ds_transition to LiveRepair","v"[0] Transition from WaitActive to WaitQuorum":0,","namev":":"0crucible",","name"level":":crucible"30,"level":30,"time":","2023-09-22T23:08:02.557992933Z"time":","hostname":"2023-09-22T23:08:02.557995131Z","ip-10-150-1-74.us-west-2.compute.internalhostname"":","pid":4291ip-10-150-1-74.us-west-2.compute.internal","}pid"
2761 {"msg":"[0] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"time":":2023-09-22T23:08:02.558073613Z"4291,"hostname":"}
2762 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2763 {"msg":"{Waiting for Close + ReOpen jobs","v":0,""name":"msg"crucible:"","level":30[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) WaitQuorum New New ds_transition to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558127041Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2764 ,"time":"2023-09-22T23:08:02.558136732Z{","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pidRE:0 repair extent with ids 1000,1001,1002,1003 deps:[]"":,4291"v":}0
2765 ,"name":"crucible","level":{30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558176542Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time":
2766 {"msg":"RE:0 close id:1000 queued, notify DS","v":{0,"name":""crucible"msg":,""level":30Crucible stats registered with UUID: 6cc6b842-623b-4ef4-82a7-54fed8104c50","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558224792Z",""hostname":","2023-09-22T23:08:02.558187001Ztime""ip-10-150-1-74.us-west-2.compute.internal,"hostname":"","ip-10-150-1-74.us-west-2.compute.internalpid"":,"4291pid":}
2767 :"{2023-09-22T23:08:02.558232248Z",""msg"hostname"::""ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2768 RE:0 Wait for result from close command 1000:1"4291,"v":0,"{name"}:"
2769 "crucible"msg":","level":30{Crucible 6cc6b842-623b-4ef4-82a7-54fed8104c50 has session id: 9828f64b-5ac8-46a1-a32f-54d5107bf3f1","v":0,""name":"msg"crucible":","level":30[1] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active New New ds_transition to WaitActive","v":,0","time"name:"":"crucible"2023-09-22T23:08:02.558296568Z",",level"":hostname"30:"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid:"":42912023-09-22T23:08:02.558306885Z"}
2770 ,"time":"2023-09-22T23:08:02.558322383Z","hostname":{"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid"::"4291}
2771 Crucible stats registered with UUID: 4d2d5af5-94fb-4532-8477-38add7c548e4","v":0,"{name":"crucible",""level":msg"30:"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558369873Z",,""hostname":"hostname":",ip-10-150-1-74.us-west-2.compute.internal"",ip-10-150-1-74.us-west-2.compute.internal""time"pid",::4291""}pid2023-09-22T23:08:02.558378202Z"
2772 ",:"4291hostname":"{}
2773 ip-10-150-1-74.us-west-2.compute.internal"","msg"pid":{:"4291}
2774 "Crucible 4d2d5af5-94fb-4532-8477-38add7c548e4 has session id: 46d4e137-4411-47b8-a10b-3e746949a248"msg":","v":0{,"name":"crucible",""[0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) New New New ds_transition to WaitActive"levelmsg"":",:"30v":0,"name":"crucible"[1] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active WaitActive New ds_transition to WaitQuorum",","level"v"::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558440566Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2775 ,"time":"{,"2023-09-22T23:08:02.558449185Z"time":"",msg"2023-09-22T23:08:02.558453357Z""hostname",:":""hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"",pid[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) New New New ds_transition to WaitActive"""pid,:"4291":v"4291:}0
2776 },
2777 {"name":"crucible","level"{"msg"::"30"msg":"[0] Transition from New to WaitActive","v":[1] Transition from WaitActive to WaitQuorum"0,,""namev""::"0crucible",","name"level"::"30crucible",","level"time:":30"2023-09-22T23:08:02.558508174Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2778 ,"time":"2023-09-22T23:08:02.558522034Z"{,",hostname""":time"":msg"":ip-10-150-1-74.us-west-2.compute.internal"",2023-09-22T23:08:02.558526836Z""[0] Transition from New to WaitActive"pid",:,""4291hostname"v":}":
2779 ip-10-150-1-74.us-west-2.compute.internal"0,","pidname"{:":"4291crucible","level":"30}msg"
2780 :"{[0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) WaitActive New New ds_transition to WaitQuorum",,""time"msg":"":v"":0,"2023-09-22T23:08:02.558571574Z"name":"[1] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active WaitQuorum New ds_transition to Active"crucible",,"",hostname"v":""level"::0ip-10-150-1-74.us-west-2.compute.internal"30,,""name"pid":":crucible"4291,"level"}:
2781 30{","msg":time":""2023-09-22T23:08:02.558605809Z","hostname":"[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) WaitActive New New ds_transition to WaitQuorum",ip-10-150-1-74.us-west-2.compute.internal""v":,"0pid",:"4291,name"":}time""
2782 :crucible"","level":{2023-09-22T23:08:02.558616995Z30",""hostname"msg":":"ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from WaitActive to WaitQuorum",",pid"":v":42910,"name"}:"
2783 crucible",,""time"level:"{":302023-09-22T23:08:02.558649103Z",""hostname"msg"::""ip-10-150-1-74.us-west-2.compute.internal"[1] Transition from WaitQuorum to Active,""pid":,"4291v":0},
2784 ,""name"time:""{crucible":","level""msg"::302023-09-22T23:08:02.558673183Z"","hostname":"[0] Transition from WaitActive to WaitQuorum","v":ip-10-150-1-74.us-west-2.compute.internal0",","namepid""::"4291crucible","}
2785 level":,"30time":"{2023-09-22T23:08:02.558698748Z",""hostname"msg":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2786 [0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) WaitQuorum New New ds_transition to Active",","time":v{"":0,"2023-09-22T23:08:02.558716408Zname""":,"msg"":hostname"":crucible"","level":ip-10-150-1-74.us-west-2.compute.internal"30,"[2] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active Active New ds_transition to WaitActive"pid":,"4291v":0},
2787 ",name":""time"crucible{:"","level2023-09-22T23:08:02.558749307Z""":msg",30":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) WaitQuorum New New ds_transition to Active"}
2788 ,"v":0,"name":"{crucible","level":30"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.558791323Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2789 ,"{,"time":"time""msg2023-09-22T23:08:02.558774307Z":"":","hostname2023-09-22T23:08:02.558800315Z"[0] Transition from WaitQuorum to Active"",:","hostname"v:"":"0,"ip-10-150-1-74.us-west-2.compute.internalname":"ip-10-150-1-74.us-west-2.compute.internal""crucible,",""pid",:"4291level":pid}30
2790 ":4291}
2791 {,"time":"2023-09-22T23:08:02.558843638Z","hostname"":"msg{":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291"}
2792 msg":"{[1] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active New New ds_transition to WaitActive""msg[2] Transition from New to WaitActive":,"""v",:"0v",:[1] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active New New ds_transition to WaitActive""0name",:,"""cruciblename"v"::""0,,crucible""name","":level"":crucible"level",30":level":3030,"time":"2023-09-22T23:08:02.558896292Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2793 {"msg":","[1] Transition from New to WaitActive"time","v,"::0"",time""2023-09-22T23:08:02.558895262Z"name":",:crucible"","hostnamelevel"":30"2023-09-22T23:08:02.558897073Z":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4291,,""time":"pid"}2023-09-22T23:08:02.558927248Z"
2794 ,":hostname":"4291ip-10-150-1-74.us-west-2.compute.internal"},
2795 "{pid":4291}
2796 {"{msg":""msg":""msg"[1] Transition from New to WaitActive":","v":[1] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active WaitActive New ds_transition to WaitQuorum"0,",v":"0,name""name":":"[2] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active Active WaitActive ds_transition to WaitQuorumcrucible"crucible"",","v,""level"::level030",":name30":"crucible","level":30,"time":"2023-09-22T23:08:02.558979251Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2797 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level,"":30time":"2023-09-22T23:08:02.558982039Z",","time":hostname"":,""time":"2023-09-22T23:08:02.558985357Z"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.558999189Z",,"",pid"hostname"hostname""::":"4291ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"},
2798 ""pid":,"4291pid"}{:
2799 "4291msg"{}:""msg":"
2800 [1] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active WaitQuorum New ds_transition to Active"[1] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active WaitActive New ds_transition to WaitQuorum","v",:0"{,"v":name":""0msgcrucible,""","name:level":"":30"[2] Transition from WaitActive to WaitQuorumcrucible"",","level"v:":,"30time":"0,"2023-09-22T23:08:02.559057093Z"name":,""hostname":"crucible","ip-10-150-1-74.us-west-2.compute.internal","level"pid"::429130}
2801 {"msg":","[1] Transition from WaitQuorum to Active"time","v":0:","name":"crucible",2023-09-22T23:08:02.559065171Z""level":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291","time":"time"}2023-09-22T23:08:02.559087442Z"
2802 :","hostname":"2023-09-22T23:08:02.559075749Z"ip-10-150-1-74.us-west-2.compute.internal",",{pid":"4291}
2803 "hostname"msg"::"{""msg":"[1] Transition from WaitActive to WaitQuorum"ip-10-150-1-74.us-west-2.compute.internal,"","[2] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active Active New ds_transition to WaitActive"pid","v"v":0::,"42910name":",crucible""},
2804 name{"msg":"[2] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active Active WaitQuorum ds_transition to Active","v":0,"name":"crucible","level":30,""time"::""crucible"2023-09-22T23:08:02.55915953Z",","level":hostname":30"ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}
2805 level":30{"msg":"[2] Transition from WaitQuorum to Active",","time":v,""":time":"02023-09-22T23:08:02.559182408Z"2023-09-22T23:08:02.559192565Z",,""name",:hostname":"""hostnamecrucible""ip-10-150-1-74.us-west-2.compute.internal",":,"level"":pid"30ip-10-150-1-74.us-west-2.compute.internal":4291,"}pid"
2806 :4291{}
2807 "msg":"[2] Transition from New to WaitActive","v":0,"name,":"{crucible"","leveltime"""::"30msg":"2023-09-22T23:08:02.559221817Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","[1] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active WaitQuorum New ds_transition to Activepid"":,,4291""time"}:"
2808 v"2023-09-22T23:08:02.559244963Z:","0hostname":",{"ip-10-150-1-74.us-west-2.compute.internal"name",""pid":msg":"crucible:"4291"},
2809 "level"ddd0b289-599b-47c8-9e88-afd221589469 is now active with session: 9d011751-8e3e-40ec-9a26-e2c509116656"{:,"30msg":""v":0,"name":"crucible[2] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active Active WaitActive ds_transition to WaitQuorum"","v":0,,""name"level":":crucible"30,"level":30,"time":","2023-09-22T23:08:02.559285503Z"time":","2023-09-22T23:08:02.559297392Z"hostname,"":hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291,}"
2810 ,""{pid"time"":msg"::"4291"[2] Transition from WaitActive to WaitQuorum"},"
2811 2023-09-22T23:08:02.559296874Z"v":0,",name"":"hostnamecrucible""{,":level":"30"msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from WaitQuorum to Active"4291,"v"}:
2812 0,",time":""name"{2023-09-22T23:08:02.559338336Z":","crucible"hostname":"","msglevelip-10-150-1-74.us-west-2.compute.internal"",":"30pid"::4291"}
2813 {"msg":"[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Active Active Active ds_transition to Faulted","v":0[2] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active Active WaitQuorum ds_transition to Active",","namev":":0","crucible"name":",","crucible"timelevel",""::30"level":302023-09-22T23:08:02.559362311Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,""time"::"42912023-09-22T23:08:02.559386034Z",}"
2814 hostname":","time"ip-10-150-1-74.us-west-2.compute.internal":","{pid2023-09-22T23:08:02.55938561Z"":4291},"
2815 "msghostname"{":":msg":"""[2] Transition from WaitQuorum to Active","v":ip-10-150-1-74.us-west-2.compute.internal"0,",name":""crucible"pid,"[2] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active Active New ds_transition to WaitActive""level"::,4291"30v"}:
2816 0,","time":name"":{"2023-09-22T23:08:02.559430161Z","crucible"hostname"",:msg"":level""":ip-10-150-1-74.us-west-2.compute.internal"30,"[0] Transition from Active to Faulted"pid":4291,"}
2817 v":{0,""msg":"name":"crucible","level"4d2d5af5-94fb-4532-8477-38add7c548e4 is now active with session: b46ad456-c0c5-43cb-8b84-fa6016b7a39b":,"30v":0,"name":"crucible","level":,"30time":"2023-09-22T23:08:02.559449698Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","time":"pid":2023-09-22T23:08:02.55946625Z"4291,"hostname":"}
2818 ip-10-150-1-74.us-west-2.compute.internal",,""pid":4291{time"}:
2819 ""{msg":""msg":"2023-09-22T23:08:02.55946281Z"[2] Transition from New to WaitActive",,""v[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Active Active Active ds_transition to Faultedhostname":"":,""v":00,",ip-10-150-1-74.us-west-2.compute.internal"namename":""crucible"":,","cruciblelevel":"30",pid"":level"4291:30}
2820 ,"time":"2023-09-22T23:08:02.559514406Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2821 "msg"{:,"""time":msg":""[0] Transition from Active to Faulted","2023-09-22T23:08:02.559519119Z"v":0[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) Faulted Active Active ds_transition to LiveRepairReady",,,"""hostname"v"::name"":"0crucible",",ip-10-150-1-74.us-west-2.compute.internallevel":"30"name",:""pid":crucible"4291,"}level":,"30time":"
2822 2023-09-22T23:08:02.559553402Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
2823 ,""time"msg"{::""msg":""2023-09-22T23:08:02.559562215Z","hostname":"[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) Faulted Active Active ds_transition to LiveRepairReady","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name"[2] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active Active WaitActive ds_transition to WaitQuorum",:""crucible",pid"",":level4291":v"}30:
2824 0,"name":","time":"{crucible"2023-09-22T23:08:02.559602681Z",,""hostname""level"msg"::":"30ip-10-150-1-74.us-west-2.compute.internal"[0] Transition from Faulted to LiveRepairReady","pid":,4291"}v"
2825 :0{,""name"msg:":""crucible"[0] Transition from Faulted to LiveRepairReady",,","level"v":0:,""30name"time"::""crucible","level":302023-09-22T23:08:02.559618411Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}2023-09-22T23:08:02.559644024Z"
2826 ,",hostname":""{time"ip-10-150-1-74.us-west-2.compute.internal":,"""msg"pid"::2023-09-22T23:08:02.559640049Z"4291"},
2827 "[2] Transition from WaitActive to WaitQuorum{hostname""":msg":",""v":0,ip-10-150-1-74.us-west-2.compute.internal"",[0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) LiveRepairReady Active Active ds_transition to LiveRepairname""":pid"","crucible":,v4291"":level0},
2828 ""name":":crucible","{level":3030"msg":","time":"2023-09-22T23:08:02.559700189Z","[0] ddd0b289-599b-47c8-9e88-afd221589469 (9d011751-8e3e-40ec-9a26-e2c509116656) LiveRepairReady Active Active ds_transition to LiveRepair"hostname":","v":ip-10-150-1-74.us-west-2.compute.internal","0pid":,4291"}name"
2829 :,"{"crucibletime"":""msg":","2023-09-22T23:08:02.559701031Z"level"[0] Transition from LiveRepairReady to LiveRepair",,":v":300","namehostname"":":crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2830 ,"time":"2023-09-22T23:08:02.559736044Z","hostname":",{"ip-10-150-1-74.us-west-2.compute.internal","time"pid"::4291"}
2831 2023-09-22T23:08:02.559731271Z""{,"msg"hostname""::""msg":"Waiting for Close + ReOpen jobs"ip-10-150-1-74.us-west-2.compute.internal",",v":"0pid[2] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active Active WaitQuorum ds_transition to Active"",,":name4291":""crucible"}v"
2832 ,":level":300,"name":"{crucible","level":30,"time":"2023-09-22T23:08:02.559782311Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2833 {"msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.559787881Z""msg",,":""time":"hostname":"2023-09-22T23:08:02.559801664Z"[0] Transition from LiveRepairReady to LiveRepair,""hostname":,"ip-10-150-1-74.us-west-2.compute.internal"",v""ip-10-150-1-74.us-west-2.compute.internalpid:"0",,":pid4291":"}name"4291
2834 }
2835 :"{{crucible"",msg":"""level"msg"::RE:0 close id:1000 queued, notify DS30"","v":0,"name":"[2] Transition from WaitQuorum to Active"crucible",,""level":30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.559850269Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}
2836 "time{":""msg":"2023-09-22T23:08:02.5598442Z"RE:0 Wait for result from close command 1000:1",",v"":,hostname"0","time":name":""crucible":,""level":ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:02.559856111Z"30,,""pid":hostname"4291:"}
2837 ,"ip-10-150-1-74.us-west-2.compute.internaltime":"","2023-09-22T23:08:02.559882266Z"pid,""hostname:{":"4291ip-10-150-1-74.us-west-2.compute.internal","}"pidmsg"":
2838 4291:}"
2839 {Waiting for Close + ReOpen jobs","v":"0msg",:""name":"crucible","level":306cc6b842-623b-4ef4-82a7-54fed8104c50 is now active with session: b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.559922195Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid":2023-09-22T23:08:02.559929135Z"4291,"}hostname":"
2840 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
2841 {"{msg":""msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":0,"name":"crucible","[0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Active Active Active ds_transition to Faulted"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.559981726Z",",time":""hostname":"2023-09-22T23:08:02.559986603Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4291}ip-10-150-1-74.us-west-2.compute.internal
2842 ","pid":4291{}
2843 "msg":"RE:0 close id:1000 queued, notify DS"{,"v":0,""name"msg"::""crucible","[0] Transition from Active to Faulted"level":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.560035338Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:02.560040396Z"pid":,4291"hostname"}:
2844 "ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}"
2845 msg":"RE:0 Wait for result from close command 1000:1"{,"v":0","msg":name"":"crucible","level":30[0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) Faulted Active Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:02.560086627Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time"
2846 :"2023-09-22T23:08:02.560095294Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
28472023-09-22T23:08:02.560ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
28482023-09-22T23:08:02.560ZINFOcrucible: [0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) LiveRepairReady Active Active ds_transition to LiveRepair
28492023-09-22T23:08:02.560ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
28502023-09-22T23:08:02.560ZINFOcrucible: Waiting for Close + ReOpen jobs
28512023-09-22T23:08:02.560ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
28522023-09-22T23:08:02.560ZINFOcrucible: RE:0 close id:1000 queued, notify DS
28532023-09-22T23:08:02.560ZINFOcrucible: RE:0 Wait for result from close command 1000:1
28542023-09-22T23:08:03.425ZINFOcrucible: Checking if live repair is needed upstairs = 1
28552023-09-22T23:08:03.425ZINFOcrucible: No Live Repair required at this time upstairs = 1
28562023-09-22T23:08:03.426ZINFOcrucible: Checking if live repair is needed upstairs = 1
28572023-09-22T23:08:03.426ZINFOcrucible: No Live Repair required at this time upstairs = 1
28582023-09-22T23:08:03.427ZINFOcrucible: Checking if live repair is needed upstairs = 1
28592023-09-22T23:08:03.427ZINFOcrucible: No Live Repair required at this time upstairs = 1
28602023-09-22T23:08:03.430ZINFOcrucible: [0] ed581aed-4fb6-4c12-84d1-a857283bcda2 looper connected looper = 0 upstairs = 1
28612023-09-22T23:08:03.430ZINFOcrucible: [0] Proc runs for 127.0.0.1:60909 in state Offline upstairs = 1
28622023-09-22T23:08:03.430ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs upstairs = 1
28632023-09-22T23:08:03.431ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
28642023-09-22T23:08:03.431ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session 99a7dfa5-d205-4d07-9135-9f9e70387e3a upstairs = 1
28652023-09-22T23:08:03.431ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: ed581aed-4fb6-4c12-84d1-a857283bcda2, session_id: 99a7dfa5-d205-4d07-9135-9f9e70387e3a, gen: 1 } downstairs = 1
28662023-09-22T23:08:03.431ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
28672023-09-22T23:08:03.431ZINFOcrucible: [0] downstairs client at 127.0.0.1:60909 has UUID 761a4173-d6d6-4e0b-98cb-d77e4463419a upstairs = 1
28682023-09-22T23:08:03.431ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: 761a4173-d6d6-4e0b-98cb-d77e4463419a, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
28692023-09-22T23:08:03.431ZINFOcrucible: Returning client:0 UUID:761a4173-d6d6-4e0b-98cb-d77e4463419a matches upstairs = 1
28702023-09-22T23:08:03.431ZINFOcrucible: [0] send last flush ID to this DS: 0 upstairs = 1
28712023-09-22T23:08:03.431ZINFOcrucible: ed581aed-4fb6-4c12-84d1-a857283bcda2 Offline Active Active upstairs = 1
28722023-09-22T23:08:03.431ZINFOcrucible: negotiate packet LastFlush { last_flush_number: JobId(0) } downstairs = 1
2873 waiting for ds1 message in test_replay_occurs
28742023-09-22T23:08:03.432ZINFOcrucible: [0] Replied this last flush ID: 0 upstairs = 1
28752023-09-22T23:08:03.432ZINFOcrucible: [0] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) Offline Active Active ds_transition to Replay upstairs = 1
28762023-09-22T23:08:03.432ZINFOcrucible: [0] Transition from Offline to Replay upstairs = 1
28772023-09-22T23:08:03.432ZINFOcrucible: [0] ed581aed-4fb6-4c12-84d1-a857283bcda2 Transition from Replay to Active upstairs = 1
28782023-09-22T23:08:03.432ZINFOcrucible: [0] ed581aed-4fb6-4c12-84d1-a857283bcda2 (99a7dfa5-d205-4d07-9135-9f9e70387e3a) Replay Active Active ds_transition to Active upstairs = 1
28792023-09-22T23:08:03.432ZINFOcrucible: [0] Transition from Replay to Active upstairs = 1
28802023-09-22T23:08:03.432ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
28812023-09-22T23:08:03.432ZINFOcrucible: [0] 127.0.0.1:60909 task reports connection:true upstairs = 1
28822023-09-22T23:08:03.432ZINFOcrucible: ed581aed-4fb6-4c12-84d1-a857283bcda2 Active Active Active upstairs = 1
28832023-09-22T23:08:03.432ZINFOcrucible: Set check for repair upstairs = 1
28842023-09-22T23:08:03.441ZINFOcrucible: Checking if live repair is needed upstairs = 1
28852023-09-22T23:08:03.441ZINFOcrucible: No Live Repair required at this time upstairs = 1
28862023-09-22T23:08:03.556ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
28872023-09-22T23:08:03.556ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
28882023-09-22T23:08:03.556ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
28892023-09-22T23:08:03.556ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
28902023-09-22T23:08:03.556ZINFOcrucible: [0] 47522d2c-b387-4d7d-a896-16a922aa39ba (704620be-2e8a-473a-8f41-8d5ed8675af2) LiveRepair Active Active ds_transition to Faulted
28912023-09-22T23:08:03.556ZINFOcrucible: [0] Transition from LiveRepair to Faulted
28922023-09-22T23:08:03.556ZINFOcrucible: Now ACK the close job
28932023-09-22T23:08:03.556ZINFOcrucible: Waiting for 3 jobs (currently 2)
28942023-09-22T23:08:03.557ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
28952023-09-22T23:08:03.557ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
28962023-09-22T23:08:03.558ZINFOcrucible: Now ACK the close job
28972023-09-22T23:08:03.558ZINFOcrucible: Waiting for 3 jobs (currently 2)
28982023-09-22T23:08:03.559ZINFOcrucible: Repair for extent 0 s:1 d:[ClientId(0)] = downstairs
28992023-09-22T23:08:03.559ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
29002023-09-22T23:08:03.560ZINFOcrucible: Waiting for 3 jobs (currently 2)
2901 {{"msg":""No repair needed for extent 0"msg",:""v":0Waiting for 3 jobs (currently 2)","name":","crucible"v",:"level"0:,30"name":"crucible","level":30,"time":"2023-09-22T23:08:03.560214306Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs",}"
2902 time":"2023-09-22T23:08:03.560219058Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internalRE:0 Wait for result from repair command 1001:2"","v":,0","pidname":""crucible":,"4291level":30}
2903 ,"time":"2023-09-22T23:08:03.560263061Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg"}
2904 :"No repair needed for extent 0","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:03.560289567Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
29052023-09-22T23:08:03.560ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
29062023-09-22T23:08:03.649ZWARNcrucible: [2] flow control start upstairs = 1
29072023-09-22T23:08:03.649ZWARNcrucible: [0] flow control start upstairs = 1
29082023-09-22T23:08:03.649ZWARNcrucible: [1] flow control start upstairs = 1
2909 waiting for ds1 message in test_replay_occurs
29102023-09-22T23:08:04.433ZWARNcrucible: [0] flow control end upstairs = 1
29112023-09-22T23:08:04.433ZINFOcrucible: Checking if live repair is needed upstairs = 1
29122023-09-22T23:08:04.433ZINFOcrucible: No Live Repair required at this time upstairs = 1
29132023-09-22T23:08:04.558ZINFOcrucible: Waiting for 4 jobs (currently 3)
29142023-09-22T23:08:04.558ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
29152023-09-22T23:08:04.558ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
29162023-09-22T23:08:04.560ZINFOcrucible: Now ACK the repair job
29172023-09-22T23:08:04.560ZINFOcrucible: Waiting for 4 jobs (currently 3)
29182023-09-22T23:08:04.560ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
29192023-09-22T23:08:04.561ZINFOcrucible: Waiting for 4 jobs (currently 3)
29202023-09-22T23:08:04.561ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
29212023-09-22T23:08:04.561ZINFOcrucible: Waiting for 4 jobs (currently 3)
29222023-09-22T23:08:04.561ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
29232023-09-22T23:08:04.651ZWARNcrucible: [0] flow control end upstairs = 1
29242023-09-22T23:08:04.651ZWARNcrucible: [1] flow control end upstairs = 1
29252023-09-22T23:08:04.651ZWARNcrucible: [2] flow control end upstairs = 1
2926 test dummy_downstairs_tests::protocol_test::test_replay_occurs ... ok
29272023-09-22T23:08:05.435ZINFOcrucible: Crucible stats registered with UUID: f2efd0c8-30aa-476f-937f-c6a917e6b31a
29282023-09-22T23:08:05.435ZINFOcrucible: Crucible f2efd0c8-30aa-476f-937f-c6a917e6b31a has session id: 9293bb25-dab9-452a-9395-23615764e473
29292023-09-22T23:08:05.435ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) New New New ds_transition to WaitActive
29302023-09-22T23:08:05.435ZINFOcrucible: [0] Transition from New to WaitActive
29312023-09-22T23:08:05.435ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) WaitActive New New ds_transition to WaitQuorum
29322023-09-22T23:08:05.435ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
29332023-09-22T23:08:05.435ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) WaitQuorum New New ds_transition to Active
29342023-09-22T23:08:05.435ZINFOcrucible: [0] Transition from WaitQuorum to Active
29352023-09-22T23:08:05.435ZINFOcrucible: [1] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active New New ds_transition to WaitActive
29362023-09-22T23:08:05.436ZINFOcrucible: [1] Transition from New to WaitActive
29372023-09-22T23:08:05.436ZINFOcrucible: [1] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active WaitActive New ds_transition to WaitQuorum
29382023-09-22T23:08:05.436ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
29392023-09-22T23:08:05.436ZINFOcrucible: [1] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active WaitQuorum New ds_transition to Active
29402023-09-22T23:08:05.436ZINFOcrucible: [1] Transition from WaitQuorum to Active
29412023-09-22T23:08:05.436ZINFOcrucible: [2] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active Active New ds_transition to WaitActive
29422023-09-22T23:08:05.436ZINFOcrucible: [2] Transition from New to WaitActive
29432023-09-22T23:08:05.436ZINFOcrucible: [2] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active Active WaitActive ds_transition to WaitQuorum
29442023-09-22T23:08:05.436ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
29452023-09-22T23:08:05.436ZINFOcrucible: [2] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active Active WaitQuorum ds_transition to Active
29462023-09-22T23:08:05.436ZINFOcrucible: [2] Transition from WaitQuorum to Active
29472023-09-22T23:08:05.436ZINFOcrucible: f2efd0c8-30aa-476f-937f-c6a917e6b31a is now active with session: 96b0b6bd-173c-4e2c-90e7-f7302532f9ef
29482023-09-22T23:08:05.436ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Active Active Active ds_transition to Faulted
29492023-09-22T23:08:05.436ZINFOcrucible: [0] Transition from Active to Faulted
29502023-09-22T23:08:05.436ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) Faulted Active Active ds_transition to LiveRepairReady
29512023-09-22T23:08:05.436ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
29522023-09-22T23:08:05.436ZINFOcrucible: [0] f2efd0c8-30aa-476f-937f-c6a917e6b31a (96b0b6bd-173c-4e2c-90e7-f7302532f9ef) LiveRepairReady Active Active ds_transition to LiveRepair
29532023-09-22T23:08:05.436ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
29542023-09-22T23:08:05.436ZINFOcrucible: Waiting for Close + ReOpen jobs
29552023-09-22T23:08:05.436ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
29562023-09-22T23:08:05.436ZINFOcrucible: RE:0 close id:1000 queued, notify DS
29572023-09-22T23:08:05.436ZINFOcrucible: RE:0 Wait for result from close command 1000:1
29582023-09-22T23:08:05.519ZWARNcrucible: [0] flow control start upstairs = 1
29592023-09-22T23:08:05.558ZINFOcrucible: Now move the NoOp job forward
29602023-09-22T23:08:05.558ZINFOcrucible: Now ACK the NoOp job
29612023-09-22T23:08:05.558ZINFOcrucible: Finally, move the ReOpen job forward
29622023-09-22T23:08:05.558ZINFOcrucible: Now ACK the Reopen job
29632023-09-22T23:08:05.558ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
29642023-09-22T23:08:05.558ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
29652023-09-22T23:08:05.558ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
29662023-09-22T23:08:05.558ZWARNcrucible: RE:0 Bailing with error
2967 ----------------------------------------------------------------
2968 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
2969 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
2970 1 Acked 1000 FClose 0 Err Done Done false
2971 2 Acked 1001 NoOp 0 Skip Done Done false
2972 3 Acked 1002 NoOp 0 Skip Done Done false
2973 4 Acked 1003 Reopen 0 Skip Done Done false
2974 STATES DS:0 DS:1 DS:2 TOTAL
2975 New 0 0 0 0
2976 Sent 0 0 0 0
2977 Done 0 4 4 8
2978 Skipped 3 0 0 3
2979 Error 1 0 0 1
2980 Last Flush: 0 0 0
2981 Downstairs last five completed:
2982 Upstairs last five completed: 4 3 2 1
29832023-09-22T23:08:05.559ZINFOcrucible: Crucible stats registered with UUID: a539c30f-2c8a-4e0b-a419-6c092883f4c8
29842023-09-22T23:08:05.559ZINFOcrucible: Crucible a539c30f-2c8a-4e0b-a419-6c092883f4c8 has session id: 5ae92c7b-6713-4df8-9f2b-2c31010701d8
29852023-09-22T23:08:05.559ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) New New New ds_transition to WaitActive
29862023-09-22T23:08:05.559ZINFOcrucible: [0] Transition from New to WaitActive
29872023-09-22T23:08:05.559ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) WaitActive New New ds_transition to WaitQuorum
29882023-09-22T23:08:05.559ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
29892023-09-22T23:08:05.559ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) WaitQuorum New New ds_transition to Active
29902023-09-22T23:08:05.559ZINFOcrucible: [0] Transition from WaitQuorum to Active
29912023-09-22T23:08:05.559ZINFOcrucible: [1] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active New New ds_transition to WaitActive
29922023-09-22T23:08:05.559ZINFOcrucible: [1] Transition from New to WaitActive
29932023-09-22T23:08:05.559ZINFOcrucible: [1] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active WaitActive New ds_transition to WaitQuorum
29942023-09-22T23:08:05.559ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
29952023-09-22T23:08:05.559ZINFOcrucible: [1] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active WaitQuorum New ds_transition to Active
29962023-09-22T23:08:05.559ZINFOcrucible: [1] Transition from WaitQuorum to Active
29972023-09-22T23:08:05.559ZINFOcrucible: [2] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active Active New ds_transition to WaitActive
29982023-09-22T23:08:05.559ZINFOcrucible: [2] Transition from New to WaitActive
29992023-09-22T23:08:05.559ZINFOcrucible: [2] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active Active WaitActive ds_transition to WaitQuorum
30002023-09-22T23:08:05.559ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
30012023-09-22T23:08:05.560ZINFOcrucible: [2] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active Active WaitQuorum ds_transition to Active
30022023-09-22T23:08:05.560ZINFOcrucible: [2] Transition from WaitQuorum to Active
30032023-09-22T23:08:05.560ZINFOcrucible: a539c30f-2c8a-4e0b-a419-6c092883f4c8 is now active with session: 2bb8288f-73e5-46ca-83e3-b652d8887f09
30042023-09-22T23:08:05.560ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Active Active Active ds_transition to Faulted
30052023-09-22T23:08:05.560ZINFOcrucible: [0] Transition from Active to Faulted
30062023-09-22T23:08:05.560ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) Faulted Active Active ds_transition to LiveRepairReady
30072023-09-22T23:08:05.560ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
30082023-09-22T23:08:05.560ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) LiveRepairReady Active Active ds_transition to LiveRepair
30092023-09-22T23:08:05.560ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
30102023-09-22T23:08:05.560ZINFOcrucible: Waiting for Close + ReOpen jobs
30112023-09-22T23:08:05.560ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
30122023-09-22T23:08:05.560ZINFOcrucible: RE:0 close id:1000 queued, notify DS
30132023-09-22T23:08:05.560ZINFOcrucible: RE:0 Wait for result from close command 1000:1
30142023-09-22T23:08:05.561ZINFOcrucible: Now move the NoOp job forward
30152023-09-22T23:08:05.561ZINFOcrucible: Now ACK the NoOp job
30162023-09-22T23:08:05.561ZINFOcrucible: Finally, move the ReOpen job forward
30172023-09-22T23:08:05.561ZINFOcrucible: Now ACK the repair job
30182023-09-22T23:08:05.561ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
30192023-09-22T23:08:05.562ZINFOcrucible: Crucible stats registered with UUID: 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d
30202023-09-22T23:08:05.562ZINFOcrucible: Crucible 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d has session id: 9422dc73-f793-41ba-8b59-09b019134370
30212023-09-22T23:08:05.562ZINFOcrucible: [0] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) New New New ds_transition to WaitActive
30222023-09-22T23:08:05.562ZINFOcrucible: [0] Transition from New to WaitActive
30232023-09-22T23:08:05.562ZINFOcrucible: [0] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) WaitActive New New ds_transition to WaitQuorum
30242023-09-22T23:08:05.562ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
30252023-09-22T23:08:05.562ZINFOcrucible: [0] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) WaitQuorum New New ds_transition to Active
30262023-09-22T23:08:05.562ZINFOcrucible: [0] Transition from WaitQuorum to Active
30272023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active New New ds_transition to WaitActive
30282023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from New to WaitActive
30292023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active WaitActive New ds_transition to WaitQuorum
30302023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
30312023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active WaitQuorum New ds_transition to Active
30322023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from WaitQuorum to Active
30332023-09-22T23:08:05.562ZINFOcrucible: [2] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active Active New ds_transition to WaitActive
30342023-09-22T23:08:05.562ZINFOcrucible: [2] Transition from New to WaitActive
30352023-09-22T23:08:05.562ZINFOcrucible: [2] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active Active WaitActive ds_transition to WaitQuorum
30362023-09-22T23:08:05.562ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
30372023-09-22T23:08:05.562ZINFOcrucible: [2] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active Active WaitQuorum ds_transition to Active
30382023-09-22T23:08:05.562ZINFOcrucible: [2] Transition from WaitQuorum to Active
30392023-09-22T23:08:05.562ZINFOcrucible: 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d is now active with session: 57ff38fd-1d30-4627-9201-6d66cd8c9a9f
30402023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active Active Active ds_transition to Faulted
30412023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from Active to Faulted
30422023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active Faulted Active ds_transition to LiveRepairReady
30432023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
30442023-09-22T23:08:05.562ZINFOcrucible: [1] 1d851a9a-c4b3-459c-8d3d-fb8473f6f33d (57ff38fd-1d30-4627-9201-6d66cd8c9a9f) Active LiveRepairReady Active ds_transition to LiveRepair
30452023-09-22T23:08:05.562ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
30462023-09-22T23:08:05.562ZINFOcrucible: Waiting for Close + ReOpen jobs
30472023-09-22T23:08:05.562ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
30482023-09-22T23:08:05.562ZINFOcrucible: RE:0 close id:1000 queued, notify DS
30492023-09-22T23:08:05.562ZINFOcrucible: RE:0 Wait for result from close command 1000:1
30502023-09-22T23:08:05.562ZINFOcrucible: Now move the NoOp job forward
30512023-09-22T23:08:05.562ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = {downstairs
3052 "msg":"{Now move the NoOp job forward",""v"msg"::"0,"[0] Reports error GenericError(name\""bad\":"crucible","level":30) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:08:05.562918993Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291},
3053 {"msg":"[0] DS Reports error Err(GenericError(\"bad\""time":"2023-09-22T23:08:05.562935467Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291)) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","":"downstairs","}v"
3054 :0,"name":"crucible"{,"level":50"msg":"[0] client skip 4 in process jobs because fault","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:05.563042093Z",",time":""hostname2023-09-22T23:08:05.563048519Z"",":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"",:"4291pid",":":"4291downstairs"}
3055 ,"":"{"downstairs"msg":"}
3056 [0] changed 1 jobs to fault skipped","v":0,"name":"crucible"{,"level":30"msg":"[0] Reports error GenericError(\"bad\","time":"2023-09-22T23:08:05.563090351Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
3057 {"msg":"[0] 6cc6b842-623b-4ef4-82a7-54fed8104c50 (b47c3c9d-3c29-4a37-8eb5-6188f9b2bee7) LiveRepair Active Active ds_transition to Faulted","v":0,"name) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"":",crucible"","v":level":030,"name":"crucible","level":50,"time":"2023-09-22T23:08:05.563118671Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3058 {"msg":","[0] Transition from LiveRepair to Faultedtime"":,""v":0,"name":"crucible"2023-09-22T23:08:05.563122748Z",","level":30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":","timedownstairs"":"}
3059 2023-09-22T23:08:05.563142396Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3060 "msg"{:""msg":"Extent 0 close id:1002 Failed: Error: bad","[0] client skip 4 in process jobs because fault"v":0,,""name"v:""crucible":,"0level",:"50name":"crucible","level":30,"time":"2023-09-22T23:08:05.563174379Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3061 {,""msg":"time":"RE:0 Wait for result from reopen command 1003:4","v":2023-09-22T23:08:05.563177728Z"0,",name":""cruciblehostname"",":level":"30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs","}time":"
3062 2023-09-22T23:08:05.563200623Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}
3063 msg":"[0] changed 0 jobs to fault skipped","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:05.563226039Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
30642023-09-22T23:08:05.563ZINFOcrucible: [0] 4d2d5af5-94fb-4532-8477-38add7c548e4 (b46ad456-c0c5-43cb-8b84-fa6016b7a39b) LiveRepair Active Active ds_transition to Faulted
30652023-09-22T23:08:05.563ZINFOcrucible: [0] Transition from LiveRepair to Faulted
30662023-09-22T23:08:05.563ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
30672023-09-22T23:08:05.563ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
30682023-09-22T23:08:05.563ZWARNcrucible: RE:0 Bailing with error
30692023-09-22T23:08:05.563ZINFOcrucible: Crucible stats registered with UUID: 46ff01ca-5774-41c5-a0d0-e4852575769b
30702023-09-22T23:08:05.563ZINFOcrucible: Crucible 46ff01ca-5774-41c5-a0d0-e4852575769b has session id: 7db0feac-d135-4774-a085-fa438156d01a
30712023-09-22T23:08:05.563ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) New New New ds_transition to WaitActive
30722023-09-22T23:08:05.563ZINFOcrucible: [0] Transition from New to WaitActive
30732023-09-22T23:08:05.563ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) WaitActive New New ds_transition to WaitQuorum
30742023-09-22T23:08:05.563ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
30752023-09-22T23:08:05.563ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) WaitQuorum New New ds_transition to Active
30762023-09-22T23:08:05.563ZINFOcrucible: [0] Transition from WaitQuorum to Active
30772023-09-22T23:08:05.563ZINFOcrucible: [1] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active New New ds_transition to WaitActive
30782023-09-22T23:08:05.563ZINFOcrucible: [1] Transition from New to WaitActive
30792023-09-22T23:08:05.563ZINFOcrucible: [1] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active WaitActive New ds_transition to WaitQuorum
30802023-09-22T23:08:05.564ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
30812023-09-22T23:08:05.564ZINFOcrucible: [1] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active WaitQuorum New ds_transition to Active
30822023-09-22T23:08:05.564ZINFOcrucible: [1] Transition from WaitQuorum to Active
30832023-09-22T23:08:05.564ZINFOcrucible: [2] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active Active New ds_transition to WaitActive
30842023-09-22T23:08:05.564ZINFOcrucible: [2] Transition from New to WaitActive
30852023-09-22T23:08:05.564ZINFOcrucible: [2] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active Active WaitActive ds_transition to WaitQuorum
30862023-09-22T23:08:05.564ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
30872023-09-22T23:08:05.564ZINFOcrucible: [2] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active Active WaitQuorum ds_transition to Active
30882023-09-22T23:08:05.564ZINFOcrucible: [2] Transition from WaitQuorum to Active
30892023-09-22T23:08:05.564ZINFOcrucible: 46ff01ca-5774-41c5-a0d0-e4852575769b is now active with session: aeed4c9e-4d74-44ae-a346-e2cd02961908
30902023-09-22T23:08:05.564ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Active Active Active ds_transition to Faulted
30912023-09-22T23:08:05.564ZINFOcrucible: [0] Transition from Active to Faulted
30922023-09-22T23:08:05.564ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) Faulted Active Active ds_transition to LiveRepairReady
30932023-09-22T23:08:05.564ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
30942023-09-22T23:08:05.564ZINFOcrucible: [0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) LiveRepairReady Active Active ds_transition to LiveRepair
30952023-09-22T23:08:05.564ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
30962023-09-22T23:08:05.564ZINFOcrucible: Waiting for Close + ReOpen jobs
30972023-09-22T23:08:05.564ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
30982023-09-22T23:08:05.564ZINFOcrucible: RE:0 close id:1000 queued, notify DS
30992023-09-22T23:08:05.564ZINFOcrucible: RE:0 Wait for result from close command 1000:1
31002023-09-22T23:08:05.603ZWARNcrucible: [0] flow control start upstairs = 1
3101 test dummy_downstairs_tests::protocol_test::test_flow_control ... ok
31022023-09-22T23:08:05.676ZINFOcrucible: Crucible stats registered with UUID: dd353b52-59c8-4b1c-b796-3ac33420495a
31032023-09-22T23:08:05.676ZINFOcrucible: Crucible dd353b52-59c8-4b1c-b796-3ac33420495a has session id: b4f4e674-2022-4abb-84a7-f6e9d210c7c0
31042023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) New New New ds_transition to WaitActive
31052023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from New to WaitActive
31062023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) WaitActive New New ds_transition to WaitQuorum
31072023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
31082023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) WaitQuorum New New ds_transition to Active
31092023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from WaitQuorum to Active
31102023-09-22T23:08:05.676ZINFOcrucible: [1] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active New New ds_transition to WaitActive
31112023-09-22T23:08:05.676ZINFOcrucible: [1] Transition from New to WaitActive
31122023-09-22T23:08:05.676ZINFOcrucible: [1] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active WaitActive New ds_transition to WaitQuorum
31132023-09-22T23:08:05.676ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
31142023-09-22T23:08:05.676ZINFOcrucible: [1] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active WaitQuorum New ds_transition to Active
31152023-09-22T23:08:05.676ZINFOcrucible: [1] Transition from WaitQuorum to Active
31162023-09-22T23:08:05.676ZINFOcrucible: [2] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active Active New ds_transition to WaitActive
31172023-09-22T23:08:05.676ZINFOcrucible: [2] Transition from New to WaitActive
31182023-09-22T23:08:05.676ZINFOcrucible: [2] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active Active WaitActive ds_transition to WaitQuorum
31192023-09-22T23:08:05.676ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
31202023-09-22T23:08:05.676ZINFOcrucible: [2] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active Active WaitQuorum ds_transition to Active
31212023-09-22T23:08:05.676ZINFOcrucible: [2] Transition from WaitQuorum to Active
31222023-09-22T23:08:05.676ZINFOcrucible: dd353b52-59c8-4b1c-b796-3ac33420495a is now active with session: 6a5f3c94-1536-4e0f-b6e8-b72df1939203
31232023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Active Active Active ds_transition to Faulted
31242023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from Active to Faulted
31252023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) Faulted Active Active ds_transition to LiveRepairReady
31262023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
31272023-09-22T23:08:05.676ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) LiveRepairReady Active Active ds_transition to LiveRepair
31282023-09-22T23:08:05.676ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
31292023-09-22T23:08:05.676ZINFOcrucible: Waiting for Close + ReOpen jobs
31302023-09-22T23:08:05.676ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
31312023-09-22T23:08:05.676ZINFOcrucible: RE:0 close id:1000 queued, notify DS
31322023-09-22T23:08:05.676ZINFOcrucible: RE:0 Wait for result from close command 1000:1
31332023-09-22T23:08:06.437ZINFOcrucible: Waiting for 3 jobs (currently 2)
31342023-09-22T23:08:06.437ZINFOcrucible: No repair needed for extent 0 = downstairs
31352023-09-22T23:08:06.437ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
31362023-09-22T23:08:06.561ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
31372023-09-22T23:08:06.561ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
31382023-09-22T23:08:06.561ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
31392023-09-22T23:08:06.561ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
31402023-09-22T23:08:06.561ZINFOcrucible: [1] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) LiveRepair Active Active ds_transition to Faulted
31412023-09-22T23:08:06.561ZINFOcrucible: [1] Transition from Active to Faulted
31422023-09-22T23:08:06.561ZINFOcrucible: Now ACK the close job
31432023-09-22T23:08:06.561ZINFOcrucible: Waiting for 3 jobs (currently 2)
31442023-09-22T23:08:06.561ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
31452023-09-22T23:08:06.561ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
31462023-09-22T23:08:06.561ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
31472023-09-22T23:08:06.561ZINFOcrucible: [0] a539c30f-2c8a-4e0b-a419-6c092883f4c8 (2bb8288f-73e5-46ca-83e3-b652d8887f09) LiveRepair Faulted Active ds_transition to Faulted
31482023-09-22T23:08:06.561ZINFOcrucible: [0] Transition from LiveRepair to Faulted
31492023-09-22T23:08:06.561ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
31502023-09-22T23:08:06.563ZINFOcrucible: Now ACK the close job
31512023-09-22T23:08:06.563ZINFOcrucible: Waiting for 3 jobs (currently 2)
31522023-09-22T23:08:06.563ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(1)] = downstairs
31532023-09-22T23:08:06.563ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
31542023-09-22T23:08:06.564ZINFOcrucible: Waiting for 3 jobs (currently 2)
31552023-09-22T23:08:06.564ZINFOcrucible: No repair needed for extent 0 = downstairs
31562023-09-22T23:08:06.564ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
31572023-09-22T23:08:06.677ZINFOcrucible: Now ACK the close job
31582023-09-22T23:08:06.677ZINFOcrucible: Waiting for 3 jobs (currently 2)
31592023-09-22T23:08:06.678ZINFOcrucible: No repair needed for extent 0 = downstairs
31602023-09-22T23:08:06.678ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
31612023-09-22T23:08:07.437ZINFOcrucible: Waiting for 4 jobs (currently 3)
31622023-09-22T23:08:07.437ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
31632023-09-22T23:08:07.562ZINFOcrucible: Waiting for 4 jobs (currently 3)
31642023-09-22T23:08:07.562ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
31652023-09-22T23:08:07.562ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
31662023-09-22T23:08:07.564ZINFOcrucible: Now ACK the repair job
31672023-09-22T23:08:07.564ZINFOcrucible: Waiting for 4 jobs (currently 3)
31682023-09-22T23:08:07.564ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
31692023-09-22T23:08:07.564ZINFOcrucible: Finally, move the ReOpen job forward
31702023-09-22T23:08:07.564ZINFOcrucible: Now ACK the reopen job
31712023-09-22T23:08:07.564ZWARNcrucible: RE:0 Bailing with error
31722023-09-22T23:08:07.565ZINFOcrucible: Crucible stats registered with UUID: 60f418b7-1c68-4219-b2c6-8690095ef46b
31732023-09-22T23:08:07.565ZINFOcrucible: Crucible 60f418b7-1c68-4219-b2c6-8690095ef46b has session id: ca7de67c-586a-48a2-86f2-35661d12ff50
31742023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) New New New ds_transition to WaitActive
31752023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from New to WaitActive
31762023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) WaitActive New New ds_transition to WaitQuorum
31772023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
31782023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) WaitQuorum New New ds_transition to Active
31792023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from WaitQuorum to Active
31802023-09-22T23:08:07.565ZINFOcrucible: [1] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active New New ds_transition to WaitActive
31812023-09-22T23:08:07.565ZINFOcrucible: [1] Transition from New to WaitActive
31822023-09-22T23:08:07.565ZINFOcrucible: [1] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active WaitActive New ds_transition to WaitQuorum
31832023-09-22T23:08:07.565ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
31842023-09-22T23:08:07.565ZINFOcrucible: [1] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active WaitQuorum New ds_transition to Active
31852023-09-22T23:08:07.565ZINFOcrucible: [1] Transition from WaitQuorum to Active
31862023-09-22T23:08:07.565ZINFOcrucible: [2] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active Active New ds_transition to WaitActive
31872023-09-22T23:08:07.565ZINFOcrucible: [2] Transition from New to WaitActive
31882023-09-22T23:08:07.565ZINFOcrucible: [2] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active Active WaitActive ds_transition to WaitQuorum
31892023-09-22T23:08:07.565ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
31902023-09-22T23:08:07.565ZINFOcrucible: [2] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active Active WaitQuorum ds_transition to Active
31912023-09-22T23:08:07.565ZINFOcrucible: [2] Transition from WaitQuorum to Active
31922023-09-22T23:08:07.565ZINFOcrucible: 60f418b7-1c68-4219-b2c6-8690095ef46b is now active with session: b7455263-b441-43aa-ab5d-bc91879214ec
31932023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Active Active Active ds_transition to Faulted
31942023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from Active to Faulted
31952023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) Faulted Active Active ds_transition to LiveRepairReady
31962023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
31972023-09-22T23:08:07.565ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) LiveRepairReady Active Active ds_transition to LiveRepair
31982023-09-22T23:08:07.565ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
31992023-09-22T23:08:07.565ZINFOcrucible: Waiting for Close + ReOpen jobs
32002023-09-22T23:08:07.565ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
32012023-09-22T23:08:07.565ZINFOcrucible: RE:0 close id:1000 queued, notify DS
32022023-09-22T23:08:07.565ZINFOcrucible: RE:0 Wait for result from close command 1000:1
32032023-09-22T23:08:07.565ZINFOcrucible: Waiting for 4 jobs (currently 3)
32042023-09-22T23:08:07.565ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
32052023-09-22T23:08:07.679ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
32062023-09-22T23:08:07.679ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
32072023-09-22T23:08:07.679ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
32082023-09-22T23:08:07.679ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
32092023-09-22T23:08:07.679ZINFOcrucible: [0] dd353b52-59c8-4b1c-b796-3ac33420495a (6a5f3c94-1536-4e0f-b6e8-b72df1939203) LiveRepair Active Active ds_transition to Faulted
32102023-09-22T23:08:07.679ZINFOcrucible: [0] Transition from LiveRepair to Faulted
32112023-09-22T23:08:07.679ZINFOcrucible: Waiting for 4 jobs (currently 3)
32122023-09-22T23:08:07.679ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
32132023-09-22T23:08:07.679ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
32142023-09-22T23:08:08.564ZINFOcrucible: Now move the NoOp job forward
32152023-09-22T23:08:08.564ZINFOcrucible: Now ACK the NoOp job
32162023-09-22T23:08:08.564ZINFOcrucible: Finally, move the ReOpen job forward
32172023-09-22T23:08:08.564ZINFOcrucible: Now ACK the Reopen job
32182023-09-22T23:08:08.564ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
3219 {"msg":"RE:0 Wait for result from reopen command 1003:4"----------------------------------------------------------------
3220 ,"v":0,"name":"crucible" Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
3221 ,"level":30GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3222 1 Acked 1000 FClose 0,"time": Done" Err Done false2023-09-22T23:08:08.564325154Z
3223 ","hostname 2 Acked 1001 NoOp 0": Skip" Skip Done false
3224 ip-10-150-1-74.us-west-2.compute.internal"," 3 Acked 1002 NoOp 0pid" Skip: Skip4291 Done false
3225 }
3226 4 Acked 1003 Reopen 0 Skip Skip Done{ false
3227 " STATES DS:0 DS:1 DS:2 TOTAL
3228 msg": New " 0 0 Extent 0 close id:1003 Failed: Error: bad 0 " 0
3229 ," Sent v" 0 : 0 0 0 , 0
3230 " Done name 1 " 0 :" 4 crucible 5
3231 " Skipped , 3 " 3 level" 0 : 6
3232 50 Error 0 1 0 1
3233 Last Flush: 0 0 0
3234 Downstairs last five completed:,"
3235 time":"2023-09-22T23:08:08.564383122Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3236 {Upstairs last five completed: "msg":" 4 3RE:0 Bailing with error 2" 1,
3237 "v":0,"name":"crucible","level":40,"time":"2023-09-22T23:08:08.564416629Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
32382023-09-22T23:08:08.564ZINFOcrucible: Crucible stats registered with UUID: 3d402dde-4b42-442d-92a4-920632ce6a71
32392023-09-22T23:08:08.564ZINFOcrucible: Crucible 3d402dde-4b42-442d-92a4-920632ce6a71 has session id: df6e2135-aea0-4363-80e7-c85916e908da
32402023-09-22T23:08:08.564ZINFOcrucible: [0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) New New New ds_transition to WaitActive
32412023-09-22T23:08:08.565ZINFOcrucible: [0] Transition from New to WaitActive
32422023-09-22T23:08:08.565ZINFOcrucible: [0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) WaitActive New New ds_transition to WaitQuorum
32432023-09-22T23:08:08.565ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
32442023-09-22T23:08:08.565ZINFOcrucible: [0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) WaitQuorum New New ds_transition to Active
32452023-09-22T23:08:08.565ZINFOcrucible: [0] Transition from WaitQuorum to Active
32462023-09-22T23:08:08.565ZINFOcrucible: [1] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active New New ds_transition to WaitActive
32472023-09-22T23:08:08.565ZINFOcrucible: [1] Transition from New to WaitActive
32482023-09-22T23:08:08.565ZINFOcrucible: [1] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active WaitActive New ds_transition to WaitQuorum
32492023-09-22T23:08:08.565ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
32502023-09-22T23:08:08.565ZINFOcrucible: [1] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active WaitQuorum New ds_transition to Active
32512023-09-22T23:08:08.565ZINFOcrucible: [1] Transition from WaitQuorum to Active
32522023-09-22T23:08:08.565ZINFOcrucible: [2] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active Active New ds_transition to WaitActive
32532023-09-22T23:08:08.565ZINFOcrucible: [2] Transition from New to WaitActive
32542023-09-22T23:08:08.565ZINFOcrucible: [2] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active Active WaitActive ds_transition to WaitQuorum
32552023-09-22T23:08:08.565ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
32562023-09-22T23:08:08.565ZINFOcrucible: [2] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active Active WaitQuorum ds_transition to Active
32572023-09-22T23:08:08.565ZINFOcrucible: [2] Transition from WaitQuorum to Active
3258 {"msg":"3d402dde-4b42-442d-92a4-920632ce6a71 is now active with session: 8170d26c-f851-4a07-85f6-cfb52781a824","v":0,"name":"crucible","level":30{,"time":""2023-09-22T23:08:08.565355946Z",msg""hostname":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291Now move the NoOp job forward"}
3259 ,"v"{:"0msg":","name":"crucible","[0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Active Active Active ds_transition to Faulted"level,"":v":030,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.565384438Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3260 {"msg":"[0] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":",2023-09-22T23:08:08.565383032Z""time":","2023-09-22T23:08:08.565402135Z"hostname",":hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}
3261 ,"pid"{:4291"msg":"}
3262 [0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) Faulted Active Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":{30"msg":"Now ACK the NoOp job","v":0,"name":","timecrucible"":","level2023-09-22T23:08:08.565437451Z"",:"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3263 {"msg":"[0] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.565450665Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:08.565461726Z",,""hostname":"pid":4291ip-10-150-1-74.us-west-2.compute.internal","}pid":
3264 4291}
3265 {{"msg"":"msg":"Finally, move the ReOpen job forward","[0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) LiveRepairReady Active Active ds_transition to LiveRepair"v":,"0v",:"0,"name":name":""crucible","crucible"level":,"30level":30,"time":"2023-09-22T23:08:08.565499169Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3266 ,"{time":""msg":"2023-09-22T23:08:08.565500374Z"[0] Transition from LiveRepairReady to LiveRepair",,""v":0hostname",:""name":"crucible","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3267 ,"time":"{2023-09-22T23:08:08.565522731Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291Now ACK the repair job"}
3268 ,"v"{:0",msg":""name"Waiting for Close + ReOpen jobs":,""v":crucible"0,,""namelevel""::"30crucible","level":30,"time":"2023-09-22T23:08:08.565551352Z",",hostname":""time":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
3269 2023-09-22T23:08:08.565550391Z"{,""msghostname"":":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":ip-10-150-1-74.us-west-2.compute.internal"0,,""name":"pid"crucible":,"4291level":30}
3270 {,"time":""msg":"2023-09-22T23:08:08.565580673Z","hostname":"RE:0 Wait for result from reopen command 1003:4"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291v"}:
3271 0,"{name":""msg":"crucible","RE:0 close id:1000 queued, notify DS"level",":v":300,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.565607201Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291time"}:"
3272 2023-09-22T23:08:08.565605255Z"{,""hostname"msg":":"RE:0 Wait for result from close command 1000:1","v":0,ip-10-150-1-74.us-west-2.compute.internal"",name":""crucible"pid",":level":429130}
3273 ,"time":"2023-09-22T23:08:08.565637361Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
32742023-09-22T23:08:08.566ZINFOcrucible: Crucible stats registered with UUID: 125fefd1-90b0-4b49-b834-ad2ad49681ef
3275 {"msg":"Crucible 125fefd1-90b0-4b49-b834-ad2ad49681ef has session id: 19e8d39b-da45-43b4-8430-bfa6fd4ee3df","v":0,"name":"crucible","level":30{,"time":"2023-09-22T23:08:08.566100764Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":msg4291"}:
3276 "{Now move the NoOp job forward""msg":","v":0,"name"[0] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) New New New ds_transition to WaitActive:"","crucible"v":0,,""name":"level"crucible":,"level":3030,"time":"2023-09-22T23:08:08.566131884Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3277 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566132377Z",","hostnametime":"":"2023-09-22T23:08:08.566150896Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"","pidpid""::42914291}
3278 }
32792023-09-22T23:08:08.566ZINFOcrucible: [0] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) WaitActive New New ds_transition to WaitQuorum
32802023-09-22T23:08:08.566ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
32812023-09-22T23:08:08.566ZINFOcrucible: [0] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) WaitQuorum New New ds_transition to Active
32822023-09-22T23:08:08.566ZINFOcrucible: [0] Transition from WaitQuorum to Active
3283 {"msg":"[1] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30{,"time":"2023-09-22T23:08:08.566266243Z","hostname":""msgip-10-150-1-74.us-west-2.compute.internal"",":pid":"4291}
3284 [1] DS Reports error Err(GenericError({\""badmsg":"\"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.56628936Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3285 )) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"{"msg":","v":0,"name":[1] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active WaitActive New ds_transition to WaitQuorum"","vcrucible"":0,","name":level"":crucible","50level":30,"time":"2023-09-22T23:08:08.566315204Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3286 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566315459Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:08.566333484Z",,""pid"hostname":":4291ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}":
3287 "{downstairs""msg"}:"
3288 [1] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30{"msg":","time":"[1] Reports error GenericError(\"2023-09-22T23:08:08.566362524Z"bad\","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3289 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }",","v"time":":02023-09-22T23:08:08.566382492Z",,""hostname":name"":"crucible"ip-10-150-1-74.us-west-2.compute.internal",,""pid":level"4291:}
3290 50{"msg":"[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.56641019Z",",hostname"":"time":"ip-10-150-1-74.us-west-2.compute.internal","pid":2023-09-22T23:08:08.566402919Z4291"}
3291 ,"{hostname":""msg":"[2] Transition from New to WaitActive"ip-10-150-1-74.us-west-2.compute.internal,""v":0,",name"":"pid"crucible":,"4291level":30,"":"downstairs"}
3292 ,"time":"2023-09-22T23:08:08.566436913Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3293 "msg"{:""msg":"[1] client skip 4 in process jobs because fault","v":0[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active WaitActive ds_transition to WaitQuorum",,""vname""::0","namecrucible":""crucible",,""level":level30":30,"time":"2023-09-22T23:08:08.566467813Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3294 ,"time"{:""msg":"2023-09-22T23:08:08.566469192Z","[2] Transition from WaitActive to WaitQuorum"hostname,"v""::"0,"name":"crucible","ip-10-150-1-74.us-west-2.compute.internal"level":,30"pid":4291,"":"downstairs"}
3295 ,"time":"2023-09-22T23:08:08.566497019Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":msg"4291:"}
3296 [1] changed 0 jobs to fault skipped",{"v":"0msg":","name":"crucible","level":[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active WaitQuorum ds_transition to Active"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566526305Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time"}:
3297 "{"2023-09-22T23:08:08.566523851Zmsg":"",[2] Transition from WaitQuorum to Active"","vhostname""::0","name":"crucible","levelip-10-150-1-74.us-west-2.compute.internal":"30,"pid":4291,"":"downstairs"}
3298 ,"time":"2023-09-22T23:08:08.566554311Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3299 "msg"{:""msg":"125fefd1-90b0-4b49-b834-ad2ad49681ef is now active with session: f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a","v":0,"name":"[1] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) LiveRepair Active Active ds_transition to Faulted"crucible",",level":"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566581249Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3300 {"msg":"[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active Active ds_transition to Faulted","v":0,"name":"crucible",,""level":30time":"2023-09-22T23:08:08.566586315Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",","time"pid":":42912023-09-22T23:08:08.56660357Z","}hostname":"
3301 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3302 {{"msg"":"msg":"[2] Transition from Active to Faulted","v":0,[1] Transition from Active to Faulted""name":","cruciblev"":,"0level",":30name":"crucible","level":30{,"time":"2023-09-22T23:08:08.566635618Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":msg4291"}:
3303 ",{"time":Waiting for 3 jobs (currently 2)"""msg":",2023-09-22T23:08:08.56663877Z"",v"":hostname":0[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active Faulted ds_transition to LiveRepairReady",,"""v":ip-10-150-1-74.us-west-2.compute.internalname"":,"0pid,":"4291name":}""
3304 crucible"crucible",",{"level":level30"":msg":"30RE:0 Wait for result from reopen command 1003:4","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:08:08.566678614Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3305 {"msg":","time":"[2] Transition from Faulted to LiveRepairReady","2023-09-22T23:08:08.566685416Zv":"0,",hostname""name":":"crucible","ip-10-150-1-74.us-west-2.compute.internal,level""":time","30pid"::"4291}
3306 2023-09-22T23:08:08.566681336Z"{,,"""hostnamemsgtime":""":2023-09-22T23:08:08.566708977Z":,"""hostname":"Extent 0 close id:1003 Failed: Error: bad","v":ip-10-150-1-74.us-west-2.compute.internal"0ip-10-150-1-74.us-west-2.compute.internal",",name",""pidpid":"":crucible"4291,":level4291}"
3307 :50}
3308 {"{msg":","time":""2023-09-22T23:08:08.566744483Z"[2] 125fefd1-90b0-4b49-b834-ad2ad49681ef (f48d1731-e4d3-4e1d-bdf3-dfdfd3dd7a2a) Active Active LiveRepairReady ds_transition to LiveRepair,"msg,""v:""":hostname":"0No repair needed for extent 0",ip-10-150-1-74.us-west-2.compute.internal",","name"v":""cruciblepid"::"4291,"}level":030
3309 ,"name"{:""cruciblemsg",",time":"":"2023-09-22T23:08:08.566772872Z"","level[0] client skip 4 in process jobs because fault"","hostname":"v":0:ip-10-150-1-74.us-west-2.compute.internal",,"30pid":"4291name":"}
3310 crucible","{level":30"msg":"[2] Transition from LiveRepairReady to LiveRepair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566799186Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,,"""time":,":"time""downstairs":}"
3311 2023-09-22T23:08:08.566804333Z"2023-09-22T23:08:08.56679358Z","{hostname","":hostname"msg":":""ip-10-150-1-74.us-west-2.compute.internal","[0] changed 0 jobs to fault skipped"pid"ip-10-150-1-74.us-west-2.compute.internal:,"4291",v":}0"
3312 ,"pidname{":""crucible":",4291msg":"",level"Waiting for Close + ReOpen jobs":":30"","downstairs"v":0,"}name"
3313 :","crucibletime":"","level":2023-09-22T23:08:08.56685579Z{30","hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":msg4291":,""":","downstairstime":"RE:0 Wait for result from repair command 1001:2""2023-09-22T23:08:08.566870132Z,}"
3314 "v",":{hostname":""0msg":ip-10-150-1-74.us-west-2.compute.internal"",,""pid":name4291"[0] 46ff01ca-5774-41c5-a0d0-e4852575769b (aeed4c9e-4d74-44ae-a346-e2cd02961908) LiveRepair Faulted Active ds_transition to Faulted}":,""v":
3315 crucible"0{,,""msg":""name":"levelRE:0 repair extent with ids 1000,1001,1002,1003 deps:[]crucible"","":level","30v"::030,"name":"crucible","level":30,"time":"2023-09-22T23:08:08.566915318Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid":2023-09-22T23:08:08.566917372Z4291","}hostname":"
3316 ip-10-150-1-74.us-west-2.compute.internal",,""pid"time:"4291:{}""
3317 msg":"{2023-09-22T23:08:08.566913829Z""[0] Transition from LiveRepair to Faultedmsg,""":"hostname",":v":0RE:0 close id:1000 queued, notify DS"",",name"ip-10-150-1-74.us-west-2.compute.internal"v":",0:""pid"crucible":,,"4291name":""crucible"level,"":}level":3030
3318 ,,""time":"time":"2023-09-22T23:08:08.566968757Z"2023-09-22T23:08:08.56696921Z,""hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid":,4291"}pid":
3319 4291}
3320 {"{msg":""msg":"RE:0 Wait for result from close command 1000:1"RE:0 Bailing with error",,""v":v":00,,""namename":"":crucible"","crucible"level":40,"level":30,"time":","time":"2023-09-22T23:08:08.567008978Z","2023-09-22T23:08:08.567009867Z"hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid":,"4291pid":}4291
3321 }
33222023-09-22T23:08:08.567ZINFOcrucible: Crucible stats registered with UUID: 2a9017ca-3786-4928-aa25-af146cb13064
33232023-09-22T23:08:08.567ZINFOcrucible: Crucible 2a9017ca-3786-4928-aa25-af146cb13064 has session id: af0da4c6-782e-4684-a546-1550f807b027
33242023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) New New New ds_transition to WaitActive
33252023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from New to WaitActive
33262023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) WaitActive New New ds_transition to WaitQuorum
33272023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
33282023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) WaitQuorum New New ds_transition to Active
33292023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from WaitQuorum to Active
33302023-09-22T23:08:08.567ZINFOcrucible: [1] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active New New ds_transition to WaitActive
33312023-09-22T23:08:08.567ZINFOcrucible: [1] Transition from New to WaitActive
33322023-09-22T23:08:08.567ZINFOcrucible: [1] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active WaitActive New ds_transition to WaitQuorum
33332023-09-22T23:08:08.567ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
33342023-09-22T23:08:08.567ZINFOcrucible: [1] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active WaitQuorum New ds_transition to Active
33352023-09-22T23:08:08.567ZINFOcrucible: [1] Transition from WaitQuorum to Active
33362023-09-22T23:08:08.567ZINFOcrucible: [2] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active Active New ds_transition to WaitActive
33372023-09-22T23:08:08.567ZINFOcrucible: [2] Transition from New to WaitActive
33382023-09-22T23:08:08.567ZINFOcrucible: [2] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active Active WaitActive ds_transition to WaitQuorum
33392023-09-22T23:08:08.567ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
33402023-09-22T23:08:08.567ZINFOcrucible: [2] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active Active WaitQuorum ds_transition to Active
33412023-09-22T23:08:08.567ZINFOcrucible: [2] Transition from WaitQuorum to Active
33422023-09-22T23:08:08.567ZINFOcrucible: 2a9017ca-3786-4928-aa25-af146cb13064 is now active with session: 41790776-3f0f-4ed5-8993-f2ed274b7319
33432023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Active Active Active ds_transition to Faulted
33442023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from Active to Faulted
33452023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) Faulted Active Active ds_transition to LiveRepairReady
33462023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
33472023-09-22T23:08:08.567ZINFOcrucible: [0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) LiveRepairReady Active Active ds_transition to LiveRepair
33482023-09-22T23:08:08.567ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
33492023-09-22T23:08:08.567ZINFOcrucible: Waiting for Close + ReOpen jobs
33502023-09-22T23:08:08.567ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
33512023-09-22T23:08:08.567ZINFOcrucible: RE:0 close id:1000 queued, notify DS
33522023-09-22T23:08:08.567ZINFOcrucible: RE:0 Wait for result from close command 1000:1
33532023-09-22T23:08:08.680ZINFOcrucible: Now move the NoOp job forward
33542023-09-22T23:08:08.680ZINFOcrucible: Now ACK the NoOp job
33552023-09-22T23:08:08.680ZINFOcrucible: Finally, move the ReOpen job forward
33562023-09-22T23:08:08.680ZINFOcrucible: Now ACK the Reopen job
33572023-09-22T23:08:08.680ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
33582023-09-22T23:08:08.680ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
33592023-09-22T23:08:08.680ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
33602023-09-22T23:08:08.680ZWARNcrucible: RE:0 Bailing with error
33612023-09-22T23:08:08.681ZINFOcrucible: Crucible stats registered with UUID: 340c04f3-49b8-4ced-b263-4b0e2af7ead7
33622023-09-22T23:08:08.681ZINFOcrucible: Crucible 340c04f3-49b8-4ced-b263-4b0e2af7ead7 has session id: d0ad791d-e99f-4e9b-a416-2c649bca391b
33632023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) New New New ds_transition to WaitActive
33642023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from New to WaitActive
33652023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) WaitActive New New ds_transition to WaitQuorum
33662023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
33672023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) WaitQuorum New New ds_transition to Active
33682023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from WaitQuorum to Active
33692023-09-22T23:08:08.681ZINFOcrucible: [1] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active New New ds_transition to WaitActive
33702023-09-22T23:08:08.681ZINFOcrucible: [1] Transition from New to WaitActive
33712023-09-22T23:08:08.681ZINFOcrucible: [1] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active WaitActive New ds_transition to WaitQuorum
33722023-09-22T23:08:08.681ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
33732023-09-22T23:08:08.681ZINFOcrucible: [1] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active WaitQuorum New ds_transition to Active
33742023-09-22T23:08:08.681ZINFOcrucible: [1] Transition from WaitQuorum to Active
33752023-09-22T23:08:08.681ZINFOcrucible: [2] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active Active New ds_transition to WaitActive
33762023-09-22T23:08:08.681ZINFOcrucible: [2] Transition from New to WaitActive
33772023-09-22T23:08:08.681ZINFOcrucible: [2] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active Active WaitActive ds_transition to WaitQuorum
33782023-09-22T23:08:08.681ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
33792023-09-22T23:08:08.681ZINFOcrucible: [2] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active Active WaitQuorum ds_transition to Active
33802023-09-22T23:08:08.681ZINFOcrucible: [2] Transition from WaitQuorum to Active
33812023-09-22T23:08:08.681ZINFOcrucible: 340c04f3-49b8-4ced-b263-4b0e2af7ead7 is now active with session: 85274e4a-b9a3-47aa-95cb-f70c1885651d
33822023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Active Active Active ds_transition to Faulted
33832023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from Active to Faulted
33842023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) Faulted Active Active ds_transition to LiveRepairReady
33852023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
33862023-09-22T23:08:08.681ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) LiveRepairReady Active Active ds_transition to LiveRepair
33872023-09-22T23:08:08.681ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
33882023-09-22T23:08:08.681ZINFOcrucible: Waiting for Close + ReOpen jobs
33892023-09-22T23:08:08.681ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
33902023-09-22T23:08:08.681ZINFOcrucible: RE:0 close id:1000 queued, notify DS
33912023-09-22T23:08:08.681ZINFOcrucible: RE:0 Wait for result from close command 1000:1
33922023-09-22T23:08:09.566ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
33932023-09-22T23:08:09.566ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(1), repair_downstairs: [ClientId(0)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
33942023-09-22T23:08:09.566ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
33952023-09-22T23:08:09.566ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
33962023-09-22T23:08:09.566ZINFOcrucible: [2] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) LiveRepair Active Active ds_transition to Faulted
33972023-09-22T23:08:09.566ZINFOcrucible: [2] Transition from Active to Faulted
33982023-09-22T23:08:09.566ZINFOcrucible: Now ACK the close job
33992023-09-22T23:08:09.566ZINFOcrucible: Waiting for 3 jobs (currently 2)
34002023-09-22T23:08:09.566ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
34012023-09-22T23:08:09.566ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
34022023-09-22T23:08:09.566ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
34032023-09-22T23:08:09.566ZINFOcrucible: [0] 3d402dde-4b42-442d-92a4-920632ce6a71 (8170d26c-f851-4a07-85f6-cfb52781a824) LiveRepair Active Faulted ds_transition to Faulted
34042023-09-22T23:08:09.566ZINFOcrucible: [0] Transition from LiveRepair to Faulted
34052023-09-22T23:08:09.566ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
34062023-09-22T23:08:09.567ZINFOcrucible: Waiting for 4 jobs (currently 3)
34072023-09-22T23:08:09.567ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
34082023-09-22T23:08:09.567ZINFOcrucible: Now ACK the close job
34092023-09-22T23:08:09.567ZINFOcrucible: Waiting for 3 jobs (currently 2)
34102023-09-22T23:08:09.567ZINFOcrucible: Repair for extent 0 s:0 d:[ClientId(2)] = downstairs
34112023-09-22T23:08:09.567ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
34122023-09-22T23:08:09.568ZINFOcrucible: Waiting for 3 jobs (currently 2)
34132023-09-22T23:08:09.568ZINFOcrucible: No repair needed for extent 0 = downstairs
34142023-09-22T23:08:09.568ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
34152023-09-22T23:08:09.682ZINFOcrucible: Now ACK the close job
34162023-09-22T23:08:09.682ZINFOcrucible: Waiting for 3 jobs (currently 2)
34172023-09-22T23:08:09.682ZINFOcrucible: No repair needed for extent 0 = downstairs
34182023-09-22T23:08:09.682ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
3419 ----------------------------------------------------------------
3420 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
3421 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3422 1 Acked 1000 FClose 0 Done Done Done false
3423 2 Acked 1001 NoOp 0 Done Done Done false
3424 3 NotAcked 1002 NoOp 0 New New New false
3425 4 NotAcked 1003 Reopen 0 New New New false
3426 STATES DS:0 DS:1 DS:2 TOTAL
3427 New 2 2 2 6
3428 Sent 0 0 0 0
3429 Done 2 2 2 6
3430 Skipped 0 0 0 0
3431 Error 0 0 0 0
3432 Last Flush: 0 0 0
3433 Downstairs last five completed:
3434 Upstairs last five completed: 2 1
34352023-09-22T23:08:10.439ZINFOcrucible: Now move the NoOp job forward
34362023-09-22T23:08:10.439ZINFOcrucible: Finally, move the ReOpen job forward
34372023-09-22T23:08:10.439ZINFOcrucible: Now ACK the reopen job
34382023-09-22T23:08:10.439ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
34392023-09-22T23:08:10.439ZINFOcrucible: jobs are: 4
34402023-09-22T23:08:10.439ZINFOcrucible: Crucible stats registered with UUID: 471b8a98-0f08-4754-9135-7ad94157ce5e
34412023-09-22T23:08:10.439ZINFOcrucible: Crucible 471b8a98-0f08-4754-9135-7ad94157ce5e has session id: 6704f77c-c61f-4544-ad9e-8cb9ea762f74
34422023-09-22T23:08:10.439ZINFOcrucible: [0] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) New New New ds_transition to WaitActive
34432023-09-22T23:08:10.439ZINFOcrucible: [0] Transition from New to WaitActive
34442023-09-22T23:08:10.439ZINFOcrucible: [0] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) WaitActive New New ds_transition to WaitQuorum
34452023-09-22T23:08:10.439ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
34462023-09-22T23:08:10.439ZINFOcrucible: [0] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) WaitQuorum New New ds_transition to Active
34472023-09-22T23:08:10.439ZINFOcrucible: [0] Transition from WaitQuorum to Active
34482023-09-22T23:08:10.439ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active New New ds_transition to WaitActive
34492023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from New to WaitActive
34502023-09-22T23:08:10.440ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active WaitActive New ds_transition to WaitQuorum
34512023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
34522023-09-22T23:08:10.440ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active WaitQuorum New ds_transition to Active
34532023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from WaitQuorum to Active
34542023-09-22T23:08:10.440ZINFOcrucible: [2] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active Active New ds_transition to WaitActive
34552023-09-22T23:08:10.440ZINFOcrucible: [2] Transition from New to WaitActive
34562023-09-22T23:08:10.440ZINFOcrucible: [2] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active Active WaitActive ds_transition to WaitQuorum
34572023-09-22T23:08:10.440ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
34582023-09-22T23:08:10.440ZINFOcrucible: [2] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active Active WaitQuorum ds_transition to Active
34592023-09-22T23:08:10.440ZINFOcrucible: [2] Transition from WaitQuorum to Active
34602023-09-22T23:08:10.440ZINFOcrucible: 471b8a98-0f08-4754-9135-7ad94157ce5e is now active with session: c0eacee2-3501-40d4-8616-f8e3a6a14e2b
34612023-09-22T23:08:10.440ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active Active Active ds_transition to Faulted
34622023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from Active to Faulted
34632023-09-22T23:08:10.440ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active Faulted Active ds_transition to LiveRepairReady
34642023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
34652023-09-22T23:08:10.440ZINFOcrucible: [1] 471b8a98-0f08-4754-9135-7ad94157ce5e (c0eacee2-3501-40d4-8616-f8e3a6a14e2b) Active LiveRepairReady Active ds_transition to LiveRepair
34662023-09-22T23:08:10.440ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
34672023-09-22T23:08:10.440ZINFOcrucible: Waiting for Close + ReOpen jobs
34682023-09-22T23:08:10.440ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
34692023-09-22T23:08:10.440ZINFOcrucible: RE:0 close id:1000 queued, notify DS
34702023-09-22T23:08:10.440ZINFOcrucible: RE:0 Wait for result from close command 1000:1
34712023-09-22T23:08:10.567ZINFOcrucible: Waiting for 4 jobs (currently 3)
34722023-09-22T23:08:10.567ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
34732023-09-22T23:08:10.567ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
34742023-09-22T23:08:10.568ZINFOcrucible: Now move the NoOp job forward
34752023-09-22T23:08:10.568ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
34762023-09-22T23:08:10.568ZERROcrucible: [1] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
34772023-09-22T23:08:10.568ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
34782023-09-22T23:08:10.568ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
34792023-09-22T23:08:10.568ZINFOcrucible: [1] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) LiveRepair Active Active ds_transition to Faulted
34802023-09-22T23:08:10.568ZINFOcrucible: [1] Transition from Active to Faulted
34812023-09-22T23:08:10.568ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
34822023-09-22T23:08:10.568ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
34832023-09-22T23:08:10.568ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
34842023-09-22T23:08:10.568ZINFOcrucible: [0] 60f418b7-1c68-4219-b2c6-8690095ef46b (b7455263-b441-43aa-ab5d-bc91879214ec) LiveRepair Faulted Active ds_transition to Faulted
34852023-09-22T23:08:10.568ZINFOcrucible: [0] Transition from LiveRepair to Faulted
34862023-09-22T23:08:10.568ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
34872023-09-22T23:08:10.569ZINFOcrucible: Now ACK the repair job
34882023-09-22T23:08:10.569ZINFOcrucible: Waiting for 4 jobs (currently 3)
34892023-09-22T23:08:10.569ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
34902023-09-22T23:08:10.570ZINFOcrucible: Waiting for 4 jobs (currently 3)
34912023-09-22T23:08:10.570ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
34922023-09-22T23:08:10.684ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
34932023-09-22T23:08:10.684ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
34942023-09-22T23:08:10.684ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
34952023-09-22T23:08:10.684ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
34962023-09-22T23:08:10.684ZINFOcrucible: [1] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) LiveRepair Active Active ds_transition to Faulted
34972023-09-22T23:08:10.684ZINFOcrucible: [1] Transition from Active to Faulted
34982023-09-22T23:08:10.684ZINFOcrucible: Waiting for 4 jobs (currently 3)
34992023-09-22T23:08:10.684ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
35002023-09-22T23:08:10.684ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
35012023-09-22T23:08:10.684ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
35022023-09-22T23:08:10.684ZINFOcrucible: [0] 340c04f3-49b8-4ced-b263-4b0e2af7ead7 (85274e4a-b9a3-47aa-95cb-f70c1885651d) LiveRepair Faulted Active ds_transition to Faulted
35032023-09-22T23:08:10.684ZINFOcrucible: [0] Transition from LiveRepair to Faulted
35042023-09-22T23:08:10.684ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
35052023-09-22T23:08:11.441ZINFOcrucible: Waiting for 3 jobs (currently 2)
35062023-09-22T23:08:11.441ZINFOcrucible: No repair needed for extent 0 = downstairs
35072023-09-22T23:08:11.441ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
35082023-09-22T23:08:11.569ZINFOcrucible: Now move the NoOp job forward
35092023-09-22T23:08:11.569ZINFOcrucible: Now ACK the NoOp job
35102023-09-22T23:08:11.569ZINFOcrucible: Finally, move the ReOpen job forward
35112023-09-22T23:08:11.569ZINFOcrucible: Now ACK the Reopen job
35122023-09-22T23:08:11.569ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
35132023-09-22T23:08:11.569ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
35142023-09-22T23:08:11.569ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
35152023-09-22T23:08:11.569ZWARNcrucible: RE:0 Bailing with error
3516 ----------------------------------------------------------------
3517 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
3518 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
3519 1 Acked 1000 FClose 0 Done Done Err false
3520 2 Acked 1001 NoOp 0 Skip Done Skip false
3521 3 Acked 1002 NoOp 0 Skip Done Skip false
3522 4 Acked 1003 Reopen 0 Skip Done Skip false
3523 STATES DS:0 DS:1 DS:2 TOTAL
3524 New 0 0 0 0
3525 Sent 0 0 0 0
3526 Done 1 4 0 5
3527 Skipped 3 0 3 6
3528 Error 0 0 1 1
3529 Last Flush: 0 0 0
3530 Downstairs last five completed:
3531 Upstairs last five completed: 4 3 2 1
35322023-09-22T23:08:11.570ZINFOcrucible: Crucible stats registered with UUID: bb7a424d-9b30-4a13-96a7-a41c0f6628b1
35332023-09-22T23:08:11.570ZINFOcrucible: Crucible bb7a424d-9b30-4a13-96a7-a41c0f6628b1 has session id: 99f11c25-87f7-4d78-9e1c-2f89f88bf53d
35342023-09-22T23:08:11.570ZINFOcrucible: [0] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) New New New ds_transition to WaitActive
35352023-09-22T23:08:11.570ZINFOcrucible: [0] Transition from New to WaitActive
35362023-09-22T23:08:11.570ZINFOcrucible: [0] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) WaitActive New New ds_transition to WaitQuorum
35372023-09-22T23:08:11.570ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
35382023-09-22T23:08:11.570ZINFOcrucible: [0] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) WaitQuorum New New ds_transition to Active
35392023-09-22T23:08:11.570ZINFOcrucible: [0] Transition from WaitQuorum to Active
35402023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active New New ds_transition to WaitActive
35412023-09-22T23:08:11.570ZINFOcrucible: [1] Transition from New to WaitActive
35422023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active WaitActive New ds_transition to WaitQuorum
35432023-09-22T23:08:11.570ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
35442023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active WaitQuorum New ds_transition to Active
35452023-09-22T23:08:11.570ZINFOcrucible: [1] Transition from WaitQuorum to Active
35462023-09-22T23:08:11.570ZINFOcrucible: [2] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active Active New ds_transition to WaitActive
35472023-09-22T23:08:11.570ZINFOcrucible: [2] Transition from New to WaitActive
35482023-09-22T23:08:11.570ZINFOcrucible: [2] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active Active WaitActive ds_transition to WaitQuorum
35492023-09-22T23:08:11.570ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
35502023-09-22T23:08:11.570ZINFOcrucible: [2] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active Active WaitQuorum ds_transition to Active
35512023-09-22T23:08:11.570ZINFOcrucible: [2] Transition from WaitQuorum to Active
35522023-09-22T23:08:11.570ZINFOcrucible: bb7a424d-9b30-4a13-96a7-a41c0f6628b1 is now active with session: 5a93f758-ea9a-4a78-ad67-6f82f309f96e
35532023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active Active Active ds_transition to Faulted
35542023-09-22T23:08:11.570ZINFOcrucible: [1] Transition from Active to Faulted
35552023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active Faulted Active ds_transition to LiveRepairReady
35562023-09-22T23:08:11.570ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
35572023-09-22T23:08:11.570ZINFOcrucible: [1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active LiveRepairReady Active ds_transition to LiveRepair
3558 {""msg":"msg":"[1] Transition from LiveRepairReady to LiveRepair","v":0,"nameNow move the NoOp job forward"":"crucible",",level":30"v":0,"name":"crucible","level",":time":"302023-09-22T23:08:11.570658866Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
35592023-09-22T23:08:11.570ZINFOcrucible: Waiting for Close + ReOpen jobs
3560 ,"{time":""msg":"2023-09-22T23:08:11.57066757Z"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":,0","name"hostname:""crucible":,""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3561 ,"time":"2023-09-22T23:08:11.570703077Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"}msg"
3562 :"{Now ACK the NoOp job""msg":","v":RE:0 close id:1000 queued, notify DS"0,",v"":0name",":name"":"cruciblecrucible"",",level":"30level":30,"time":"2023-09-22T23:08:11.5707386Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3563 ,{""time"msg:"":"RE:0 Wait for result from close command 1000:12023-09-22T23:08:11.570739934Z"","v":,0","name":"hostnamecrucible"",":level"":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3564 ,"time":"2023-09-22T23:08:11.570767051Z{","hostname":""msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291}
3565 Finally, move the ReOpen job forward","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.570789712Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
35662023-09-22T23:08:11.570ZINFOcrucible: Now ACK the repair job
35672023-09-22T23:08:11.570ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
3568 test live_repair::repair_test::test_repair_extent_do_repair_all ... ok
35692023-09-22T23:08:11.571ZINFOcrucible: Now move the NoOp job forward
3570 {{"msg":"Crucible stats registered with UUID: a8ba4201-0b23-4992-9a71-32e39bda2e5f"","msg"v:"":0,"name":"crucible"[2] DS Reports error Err(GenericError(,"\"level":bad30\",")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"time":"2023-09-22T23:08:11.571665256Z",","v"hostname":":0ip-10-150-1-74.us-west-2.compute.internal",,""pid":name"4291:"}crucible"
3571 ,"level":50{"msg":"Crucible a8ba4201-0b23-4992-9a71-32e39bda2e5f has session id: 3f4228c8-9c93-483f-af02-1dcfce7a1e85","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.571706091Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}"
3572 time":"{2023-09-22T23:08:11.571698706Z""msg":","hostname":"[0] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) New New New ds_transition to WaitActive"ip-10-150-1-74.us-west-2.compute.internal",",v"":0pid",":name":4291"crucible","level":,30"":"downstairs"}
3573 ,"time":"2023-09-22T23:08:11.571741391Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","{pid":4291}
3574 "msg"{:""msg":"[2] Reports error GenericError(\"[0] Transition from New to WaitActive"bad\","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.571765371Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3575 ) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"{",msg":""v":0,"name":"[0] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) WaitActive New New ds_transition to WaitQuorumcrucible"",",v":0","levelname":"":crucible"50,"level":30,"time":"2023-09-22T23:08:11.571793075Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3576 {"msg":"[0] Transition from WaitActive to WaitQuorum",","v"time"::0","name":"crucible",2023-09-22T23:08:11.571792618Z""level":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,,""time":":""downstairs2023-09-22T23:08:11.571815443Z"","hostname}":"
3577 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3578 {{"msg":""msg":"[0] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) WaitQuorum New New ds_transition to Active","v":0,"[2] client skip 4 in process jobs because fault"name":","cruciblev"":,"0level":,30"name":"crucible","level":30,"time":"2023-09-22T23:08:11.571848912Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3579 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.57185311Z","hostname":","time":"2023-09-22T23:08:11.57186721Z"ip-10-150-1-74.us-west-2.compute.internal,""hostname":","pid":ip-10-150-1-74.us-west-2.compute.internal4291","pid":4291,"}"
3580 :"{downstairs""}msg"
3581 :"{[1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active New New ds_transition to WaitActive","v":0,"name":"crucible"","msg"level"::"30[2] changed 0 jobs to fault skipped","v":0,"name":"crucible",","level":time":"302023-09-22T23:08:11.571899656Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3582 {"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:11.571908238Ztime":""2023-09-22T23:08:11.571919426Z,"","hostname"hostname":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}
3583 ,"pid"{:4291"msg":","":"downstairs"}
3584 [1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:08:11.571950002Z","hostname":"[2] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) LiveRepair Active Active ds_transition to Faulted"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291v"}:
3585 0,{"name"":msg":""crucible"[1] Transition from WaitActive to WaitQuorum",,""v":level"0:,"name":"30crucible","level":30,"time":"2023-09-22T23:08:11.571978238Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3586 {"msg":","time":"[1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active WaitQuorum New ds_transition to Active","2023-09-22T23:08:11.571977588Z"v":0,",name":""crucible",hostname"":level"":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3587 {"msg":"[2] Transition from Active to Faulted","v":0,"name":"crucible",,""level":time":"302023-09-22T23:08:11.572003813Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3588 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.572070113Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"}time"
3589 :"2023-09-22T23:08:11.572087211Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3590 "msg"{:""msg":"RE:0 Wait for result from reopen command 1003:4","v":0[2] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active Active New ds_transition to WaitActive",,""v":name"0:","namecrucible"":","crucible"level,""level"::3030,"time":"2023-09-22T23:08:11.572126268Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3591 {"msg":"[2] Transition from New to WaitActive","v":0,,""name":"time"crucible":,""level":302023-09-22T23:08:11.572127107Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}2023-09-22T23:08:11.572148778Z
3592 ","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
3593 "{msg":""msg":"Extent 0 close id:1003 Failed: Error: bad","v":0[2] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active Active WaitActive ds_transition to WaitQuorum",,""v":name"0:,""name":"crucible"crucible",",level":"30level":50,"time":"2023-09-22T23:08:11.572183392Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3594 {"msg":"[2] Transition from WaitActive to WaitQuorum","v,":"0,"timename"":":crucible"","level":302023-09-22T23:08:11.572184872Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":"}2023-09-22T23:08:11.572205986Z
3595 ","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
3596 "msg"{:""msg":"[0] client skip 4 in process jobs because fault","v":0[2] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active Active WaitQuorum ds_transition to Active",,""v":name"0:,""name":"crucible"crucible",","level"level"::3030,"time":"2023-09-22T23:08:11.572240993Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3597 {"msg":"[2] Transition from WaitQuorum to Active",",v":"0,"time"name":":"crucible","level":302023-09-22T23:08:11.572241737Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":","":"2023-09-22T23:08:11.57226238Z","downstairs"hostname":"}
3598 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3599 {{"msg":""msg":"a8ba4201-0b23-4992-9a71-32e39bda2e5f is now active with session: 68aef9ae-886d-4c85-a384-fc1ac4cdd4e7",[0] changed 0 jobs to fault skipped""v":,"0v",":name":"0crucible",,""level":name30":"crucible","level":30,"time":"2023-09-22T23:08:11.572293756Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3600 {"msg":"[1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active Active Active ds_transition to Faulted","v":0,,""name":time"":crucible"","level":302023-09-22T23:08:11.572297341Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4291time":","2023-09-22T23:08:11.572317785Z"",":hostname":""downstairs"ip-10-150-1-74.us-west-2.compute.internal","}pid":
3601 4291}
3602 {{"msg":"[1] Transition from Active to Faulted","v"":0msg",":name"":"crucible","level":30[0] 2a9017ca-3786-4928-aa25-af146cb13064 (41790776-3f0f-4ed5-8993-f2ed274b7319) LiveRepair Active Faulted ds_transition to Faulted","v":0,","time":"name":"2023-09-22T23:08:11.572348058Z"crucible",",hostname":""level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4291}
3603 {"msg":"[1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active Faulted Active ds_transition to LiveRepairReady","v":0,"name":"crucible","level":30,"time":","time":"2023-09-22T23:08:11.57236227Z"2023-09-22T23:08:11.572373044Z",","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal"}
3604 ,"pid"{:4291"msg":"}
3605 [1] Transition from Faulted to LiveRepairReady","v":0,"name":"crucible","level":30{"msg":"[0] Transition from LiveRepair to Faulted",",time":""v":2023-09-22T23:08:11.572398474Z"0,","hostname":name"":"crucible"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291level"}:
3606 30{"msg":"[1] a8ba4201-0b23-4992-9a71-32e39bda2e5f (68aef9ae-886d-4c85-a384-fc1ac4cdd4e7) Active LiveRepairReady Active ds_transition to LiveRepair","v":0,"name":"crucible","level":30,"time,":""time2023-09-22T23:08:11.572431937Z"":","hostname":"2023-09-22T23:08:11.572423366Z"ip-10-150-1-74.us-west-2.compute.internal",,""pid":4291hostname":}"
3607 ip-10-150-1-74.us-west-2.compute.internal",{"pid"":msg":4291"}[1] Transition from LiveRepairReady to LiveRepair
3608 ","v":0,"name":"crucible","level":30{"msg":"RE:0 Bailing with error","v":,"0time",:""name":"crucible","2023-09-22T23:08:11.572464377Z"level",:"40hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3609 ,"time":"2023-09-22T23:08:11.572478638Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3610 test live_repair::repair_test::test_repair_io_above_el_skipped ... ok
36112023-09-22T23:08:11.573ZINFOcrucible: Crucible stats registered with UUID: e5daa207-6492-4466-92d0-cc614d6c56ce
36122023-09-22T23:08:11.573ZINFOcrucible: Crucible e5daa207-6492-4466-92d0-cc614d6c56ce has session id: f6c82c27-cea7-4f69-8d6f-e4bb7ab6a3bb
36132023-09-22T23:08:11.573ZINFOcrucible: [0] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) New New New ds_transition to WaitActive
36142023-09-22T23:08:11.573ZINFOcrucible: [0] Transition from New to WaitActive
36152023-09-22T23:08:11.573ZINFOcrucible: [0] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) WaitActive New New ds_transition to WaitQuorum
36162023-09-22T23:08:11.573ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
36172023-09-22T23:08:11.573ZINFOcrucible: [0] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) WaitQuorum New New ds_transition to Active
36182023-09-22T23:08:11.573ZINFOcrucible: [0] Transition from WaitQuorum to Active
36192023-09-22T23:08:11.573ZINFOcrucible: [1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active New New ds_transition to WaitActive
36202023-09-22T23:08:11.573ZINFOcrucible: [1] Transition from New to WaitActive
36212023-09-22T23:08:11.573ZINFOcrucible: [1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active WaitActive New ds_transition to WaitQuorum
36222023-09-22T23:08:11.573ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
3623 {"msg":"[1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active WaitQuorum New ds_transition to Active","v":0,"name":"crucible","level":30,{"time":"2023-09-22T23:08:11.573318003Z"","msg"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal","pidCrucible stats registered with UUID: 3bb5f814-a110-4a59-9e32-8b8ba8617c89"":4291,"}v"
3624 {"msg":"[1] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.573393707Z","hostname":":ip-10-150-1-74.us-west-2.compute.internal","0pid":4291,"name}":
3625 "crucible","level{":30"msg":"[2] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active Active New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:11.573422322Ztime"":","hostname"2023-09-22T23:08:11.573433033Z:"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291","pid":}4291
3626 }
3627 {{"msg":""msg":"[2] Transition from New to WaitActive","v":0,"name":"Crucible 3bb5f814-a110-4a59-9e32-8b8ba8617c89 has session id: 1d016687-67b4-4780-a901-1e206fe29c5acrucible"",,""vlevel""::030,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.573466598Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
3628 {"msg":"[2] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.573469942Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal""time,"":"pid":42912023-09-22T23:08:11.573523504Z"}
3629 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4291msg":"}
3630 {"[0] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) New New New ds_transition to WaitActivemsg"":","v":0,"[2] Transition from WaitActive to WaitQuorum"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,","time"time:"":"2023-09-22T23:08:11.57355599Z"2023-09-22T23:08:11.573557822Z",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42914291}}
3631 
3632 {{"msg"":"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","[2] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active Active WaitQuorum ds_transition to Active"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.573589434Z",","hostname"time:"":"2023-09-22T23:08:11.573593536Z"ip-10-150-1-74.us-west-2.compute.internal",,""pidhostname""::"4291}
3633 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
3634 "msg":"{"msg":"[2] Transition from WaitQuorum to Active","v"[0] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) WaitActive New New ds_transition to WaitQuorum:"0,,""vname""::0","crucible"name",:""level"crucible:"30,"level":30,"time,"":"time":"2023-09-22T23:08:11.573625676Z"2023-09-22T23:08:11.573626954Z,"","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""pid,"":pid4291":4291}
3635 }
3636 {"{msg":""msg":"[0] Transition from WaitActive to WaitQuorum","ve5daa207-6492-4466-92d0-cc614d6c56ce is now active with session: 8a6b2194-d5ad-4cea-9c66-5ded3e31a721"":0,","v":name"0:","cruciblename"":,""cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:08:11.573660552Z"2023-09-22T23:08:11.573659644Z",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,""pidpid""::42914291}}
3637 
3638 {"{msg":""msg":"[0] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) WaitQuorum New New ds_transition to Active","v"[1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active Active Active ds_transition to Faulted:"0,,""v"name:"0:","cruciblename"":,""cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:08:11.573697916Z2023-09-22T23:08:11.573697048Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
3639 
3640 {"{msg":""msg":"[1] Transition from Active to Faulted","v[0] Transition from WaitQuorum to Active"":0,","v":name0":,""namecrucible"":","cruciblelevel"":,"30level":30,,""timetime""::""2023-09-22T23:08:11.573731115Z2023-09-22T23:08:11.573730444Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
3641 
3642 {{"msg"":"msg":"[1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active New New ds_transition to WaitActive","[1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active Faulted Active ds_transition to LiveRepairReadyv"":,"0v",:"0name,"":"name":crucible"",crucible"",level"":level"30:30,,""timetime""::""2023-09-22T23:08:11.573775536Z2023-09-22T23:08:11.573775326Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
3643 
3644 {"{msg":""msg":"[1] Transition from Faulted to LiveRepairReady",[1] Transition from New to WaitActive""v",:"0v",:"0,name"":name"":"crucible"crucible,"","level":level"30:30,,""timetime""::""2023-09-22T23:08:11.573810728Z2023-09-22T23:08:11.573810405Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291}}
3645 
3646 {"{msg":""msg":"[1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active WaitActive New ds_transition to WaitQuorum","v":[1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active LiveRepairReady Active ds_transition to LiveRepair0",","namev""::"0,crucible""name,"":"level":crucible"30,"level":30,",time"":time"":"2023-09-22T23:08:11.573847805Z"2023-09-22T23:08:11.573849218Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42914291}}
3647 
3648 {"{msg":""msg":"[1] Transition from WaitActive to WaitQuorum","v":[1] Transition from LiveRepairReady to LiveRepair0",","namev""::"0,"crucible"name",:""level"crucible:"30,"level":30,"time,"":"time":"2023-09-22T23:08:11.573880365Z"2023-09-22T23:08:11.573881743Z,"","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid",:"4291pid":}4291
3649 }
3650 {"{msg":""msg":"Waiting for Close + ReOpen jobs","v":0,"name"[1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active WaitQuorum New ds_transition to Active:"","crucible"v",:"0level,"":30name":"crucible","level":30,"time":","time"2023-09-22T23:08:11.573913Z:"","hostname":2023-09-22T23:08:11.573915629Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal",}"
3651 pid":4291}{
3652 "msg":"{"msg":"RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]","v":0[1] Transition from WaitQuorum to Active,"",name"":v"":0crucible,"","name":level"":30crucible","level":30,"time":,""time":"2023-09-22T23:08:11.573946115Z","2023-09-22T23:08:11.5739478Z"hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid4291":4291}
3653 }
3654 {"{msg":""msg":"RE:0 close id:1000 queued, notify DS","v":0,"name":"crucible","[2] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active Active New ds_transition to WaitActivelevel"":,30"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.573977993Z",","hostname"time:"":"2023-09-22T23:08:11.573982233Z"ip-10-150-1-74.us-west-2.compute.internal",,""pidhostname""::"4291}
3655 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}"
3656 msg":"{"RE:0 Wait for result from close command 1000:1msg"":","v":0,"[2] Transition from New to WaitActive"name":,""v"crucible:"0,,""levelname""::30"crucible","level":30,"time":","time2023-09-22T23:08:11.574011315Z"":","hostname":"2023-09-22T23:08:11.574013602Z","hostname":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
3657 "pid":4291}
36582023-09-22T23:08:11.574ZINFOcrucible: [2] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active Active WaitActive ds_transition to WaitQuorum
36592023-09-22T23:08:11.574ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
36602023-09-22T23:08:11.574ZINFOcrucible: [2] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active Active WaitQuorum ds_transition to Active
36612023-09-22T23:08:11.574ZINFOcrucible: [2] Transition from WaitQuorum to Active
36622023-09-22T23:08:11.574ZINFOcrucible: 3bb5f814-a110-4a59-9e32-8b8ba8617c89 is now active with session: f4d73180-9a3c-4adf-9c0e-cd91d883ab93
36632023-09-22T23:08:11.574ZINFOcrucible: [1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active Active Active ds_transition to Faulted
36642023-09-22T23:08:11.574ZINFOcrucible: [1] Transition from Active to Faulted
36652023-09-22T23:08:11.574ZINFOcrucible: [1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active Faulted Active ds_transition to LiveRepairReady
36662023-09-22T23:08:11.574ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
36672023-09-22T23:08:11.574ZINFOcrucible: [1] 3bb5f814-a110-4a59-9e32-8b8ba8617c89 (f4d73180-9a3c-4adf-9c0e-cd91d883ab93) Active LiveRepairReady Active ds_transition to LiveRepair
36682023-09-22T23:08:11.574ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
36692023-09-22T23:08:11.574ZWARNcrucible: Write to Extent 0:0:9 under repair
36702023-09-22T23:08:11.574ZWARNcrucible: Write to Extent 0:0:9 under repair
3671 test live_repair::repair_test::test_repair_io_at_el_sent ... ok
36722023-09-22T23:08:11.575ZINFOcrucible: Crucible stats registered with UUID: 88bef107-1f4b-44d9-ac8b-a7773f1a6565
36732023-09-22T23:08:11.575ZINFOcrucible: Crucible 88bef107-1f4b-44d9-ac8b-a7773f1a6565 has session id: 3de173a3-0b31-42b5-b10d-3ba18bf4e7c1
36742023-09-22T23:08:11.575ZINFOcrucible: [0] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) New New New ds_transition to WaitActive
36752023-09-22T23:08:11.575ZINFOcrucible: [0] Transition from New to WaitActive
36762023-09-22T23:08:11.575ZINFOcrucible: [0] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) WaitActive New New ds_transition to WaitQuorum
36772023-09-22T23:08:11.575ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
36782023-09-22T23:08:11.575ZINFOcrucible: [0] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) WaitQuorum New New ds_transition to Active
36792023-09-22T23:08:11.575ZINFOcrucible: [0] Transition from WaitQuorum to Active
36802023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active New New ds_transition to WaitActive
36812023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from New to WaitActive
36822023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active WaitActive New ds_transition to WaitQuorum
36832023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
36842023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active WaitQuorum New ds_transition to Active
36852023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from WaitQuorum to Active
36862023-09-22T23:08:11.575ZINFOcrucible: [2] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active Active New ds_transition to WaitActive
36872023-09-22T23:08:11.575ZINFOcrucible: [2] Transition from New to WaitActive
36882023-09-22T23:08:11.575ZINFOcrucible: [2] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active Active WaitActive ds_transition to WaitQuorum
36892023-09-22T23:08:11.575ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
36902023-09-22T23:08:11.575ZINFOcrucible: [2] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active Active WaitQuorum ds_transition to Active
36912023-09-22T23:08:11.575ZINFOcrucible: [2] Transition from WaitQuorum to Active
36922023-09-22T23:08:11.575ZINFOcrucible: 88bef107-1f4b-44d9-ac8b-a7773f1a6565 is now active with session: 9756f0fa-d081-4984-bbe5-479ed9dc2abd
36932023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active Active Active ds_transition to Faulted
36942023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from Active to Faulted
36952023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active Faulted Active ds_transition to LiveRepairReady
36962023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
36972023-09-22T23:08:11.575ZINFOcrucible: [1] 88bef107-1f4b-44d9-ac8b-a7773f1a6565 (9756f0fa-d081-4984-bbe5-479ed9dc2abd) Active LiveRepairReady Active ds_transition to LiveRepair
36982023-09-22T23:08:11.575ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3699 test live_repair::repair_test::test_repair_io_below_el_sent ... ok
37002023-09-22T23:08:11.576ZINFOcrucible: Crucible stats registered with UUID: 40c33510-4dde-4fb8-bcf3-28cc28cde07e
37012023-09-22T23:08:11.576ZINFOcrucible: Crucible 40c33510-4dde-4fb8-bcf3-28cc28cde07e has session id: c5fd77f9-e431-4650-b803-1cbe789e5ab9
37022023-09-22T23:08:11.576ZINFOcrucible: [0] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) New New New ds_transition to WaitActive
37032023-09-22T23:08:11.576ZINFOcrucible: [0] Transition from New to WaitActive
37042023-09-22T23:08:11.576ZINFOcrucible: [0] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) WaitActive New New ds_transition to WaitQuorum
37052023-09-22T23:08:11.576ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
37062023-09-22T23:08:11.576ZINFOcrucible: [0] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) WaitQuorum New New ds_transition to Active
37072023-09-22T23:08:11.576ZINFOcrucible: [0] Transition from WaitQuorum to Active
37082023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active New New ds_transition to WaitActive
37092023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from New to WaitActive
37102023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active WaitActive New ds_transition to WaitQuorum
37112023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
37122023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active WaitQuorum New ds_transition to Active
37132023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from WaitQuorum to Active
37142023-09-22T23:08:11.576ZINFOcrucible: [2] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active Active New ds_transition to WaitActive
37152023-09-22T23:08:11.576ZINFOcrucible: [2] Transition from New to WaitActive
37162023-09-22T23:08:11.576ZINFOcrucible: [2] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active Active WaitActive ds_transition to WaitQuorum
37172023-09-22T23:08:11.576ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
37182023-09-22T23:08:11.576ZINFOcrucible: [2] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active Active WaitQuorum ds_transition to Active
37192023-09-22T23:08:11.576ZINFOcrucible: [2] Transition from WaitQuorum to Active
37202023-09-22T23:08:11.576ZINFOcrucible: 40c33510-4dde-4fb8-bcf3-28cc28cde07e is now active with session: bae9366e-30e1-42e2-8fd8-3f0c49bf0433
37212023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active Active Active ds_transition to Faulted
37222023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from Active to Faulted
37232023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active Faulted Active ds_transition to LiveRepairReady
37242023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
37252023-09-22T23:08:11.576ZINFOcrucible: [1] 40c33510-4dde-4fb8-bcf3-28cc28cde07e (bae9366e-30e1-42e2-8fd8-3f0c49bf0433) Active LiveRepairReady Active ds_transition to LiveRepair
37262023-09-22T23:08:11.576ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3727 test live_repair::repair_test::test_repair_io_no_el_skipped ... ok
37282023-09-22T23:08:11.577ZINFOcrucible: Crucible stats registered with UUID: b7d2533c-27bb-4b70-ba98-81ede3647353
37292023-09-22T23:08:11.577ZINFOcrucible: Crucible b7d2533c-27bb-4b70-ba98-81ede3647353 has session id: 2a94e344-a7a3-4742-bc06-4a867d1b3f91
37302023-09-22T23:08:11.577ZINFOcrucible: [0] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) New New New ds_transition to WaitActive
37312023-09-22T23:08:11.577ZINFOcrucible: [0] Transition from New to WaitActive
37322023-09-22T23:08:11.577ZINFOcrucible: [0] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) WaitActive New New ds_transition to WaitQuorum
37332023-09-22T23:08:11.577ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
37342023-09-22T23:08:11.577ZINFOcrucible: [0] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) WaitQuorum New New ds_transition to Active
37352023-09-22T23:08:11.577ZINFOcrucible: [0] Transition from WaitQuorum to Active
37362023-09-22T23:08:11.577ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active New New ds_transition to WaitActive
37372023-09-22T23:08:11.577ZINFOcrucible: [1] Transition from New to WaitActive
37382023-09-22T23:08:11.577ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active WaitActive New ds_transition to WaitQuorum
37392023-09-22T23:08:11.577ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
37402023-09-22T23:08:11.577ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active WaitQuorum New ds_transition to Active
37412023-09-22T23:08:11.577ZINFOcrucible: [1] Transition from WaitQuorum to Active
37422023-09-22T23:08:11.577ZINFOcrucible: [2] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active Active New ds_transition to WaitActive
37432023-09-22T23:08:11.577ZINFOcrucible: [2] Transition from New to WaitActive
37442023-09-22T23:08:11.577ZINFOcrucible: [2] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active Active WaitActive ds_transition to WaitQuorum
37452023-09-22T23:08:11.577ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
37462023-09-22T23:08:11.577ZINFOcrucible: [2] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active Active WaitQuorum ds_transition to Active
37472023-09-22T23:08:11.577ZINFOcrucible: [2] Transition from WaitQuorum to Active
37482023-09-22T23:08:11.577ZINFOcrucible: b7d2533c-27bb-4b70-ba98-81ede3647353 is now active with session: f97dd42b-6e52-41df-971f-7ff9a9ed49fa
37492023-09-22T23:08:11.577ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active Active Active ds_transition to Faulted
37502023-09-22T23:08:11.577ZINFOcrucible: [1] Transition from Active to Faulted
37512023-09-22T23:08:11.577ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active Faulted Active ds_transition to LiveRepairReady
37522023-09-22T23:08:11.577ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
37532023-09-22T23:08:11.578ZINFOcrucible: [1] b7d2533c-27bb-4b70-ba98-81ede3647353 (f97dd42b-6e52-41df-971f-7ff9a9ed49fa) Active LiveRepairReady Active ds_transition to LiveRepair
37542023-09-22T23:08:11.578ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
37552023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:0:9 under repair
37562023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:1:9 under repair
37572023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:2:9 under repair
37582023-09-22T23:08:11.578ZWARNcrucible: Write 2:0:9 past extent under repair 1
37592023-09-22T23:08:11.578ZWARNcrucible: Write 2:1:9 past extent under repair 1
37602023-09-22T23:08:11.578ZWARNcrucible: Write 2:2:9 past extent under repair 1
37612023-09-22T23:08:11.578ZWARNcrucible: IO Write 1004 on eur 1 Added deps 2
37622023-09-22T23:08:11.578ZWARNcrucible: Create read repair deps for extent 2
37632023-09-22T23:08:11.578ZWARNcrucible: IO Read 1005 extent 1 added deps 2
37642023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:0:9 under repair
37652023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:1:9 under repair
37662023-09-22T23:08:11.578ZWARNcrucible: Write to Extent 1:2:9 under repair
37672023-09-22T23:08:11.578ZWARNcrucible: Write 2:0:9 past extent under repair 1
37682023-09-22T23:08:11.578ZWARNcrucible: Write 2:1:9 past extent under repair 1
37692023-09-22T23:08:11.578ZWARNcrucible: Write 2:2:9 past extent under repair 1
37702023-09-22T23:08:11.578ZWARNcrucible: IO Write 1006 on eur 1 Added deps 2
3771 test live_repair::repair_test::test_repair_io_span_el_sent ... ok
37722023-09-22T23:08:11.579ZINFOcrucible: Crucible stats registered with UUID: ddbb46f6-ed67-479c-8e81-15b7886450cd
37732023-09-22T23:08:11.579ZINFOcrucible: Crucible ddbb46f6-ed67-479c-8e81-15b7886450cd has session id: f8cd1053-bdd4-4f75-a78e-4f93dda8f9c3
37742023-09-22T23:08:11.579ZINFOcrucible: [0] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) New New New ds_transition to WaitActive
37752023-09-22T23:08:11.579ZINFOcrucible: [0] Transition from New to WaitActive
37762023-09-22T23:08:11.579ZINFOcrucible: [0] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) WaitActive New New ds_transition to WaitQuorum
37772023-09-22T23:08:11.579ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
37782023-09-22T23:08:11.579ZINFOcrucible: [0] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) WaitQuorum New New ds_transition to Active
37792023-09-22T23:08:11.579ZINFOcrucible: [0] Transition from WaitQuorum to Active
37802023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active New New ds_transition to WaitActive
37812023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from New to WaitActive
37822023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active WaitActive New ds_transition to WaitQuorum
37832023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
37842023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active WaitQuorum New ds_transition to Active
37852023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from WaitQuorum to Active
37862023-09-22T23:08:11.579ZINFOcrucible: [2] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active Active New ds_transition to WaitActive
37872023-09-22T23:08:11.579ZINFOcrucible: [2] Transition from New to WaitActive
37882023-09-22T23:08:11.579ZINFOcrucible: [2] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active Active WaitActive ds_transition to WaitQuorum
37892023-09-22T23:08:11.579ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
37902023-09-22T23:08:11.579ZINFOcrucible: [2] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active Active WaitQuorum ds_transition to Active
37912023-09-22T23:08:11.579ZINFOcrucible: [2] Transition from WaitQuorum to Active
37922023-09-22T23:08:11.579ZINFOcrucible: ddbb46f6-ed67-479c-8e81-15b7886450cd is now active with session: 1025ea98-7a45-49c5-9cc6-a08e2c6de343
37932023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active Active Active ds_transition to Faulted
37942023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from Active to Faulted
37952023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active Faulted Active ds_transition to LiveRepairReady
37962023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
37972023-09-22T23:08:11.579ZINFOcrucible: [1] ddbb46f6-ed67-479c-8e81-15b7886450cd (1025ea98-7a45-49c5-9cc6-a08e2c6de343) Active LiveRepairReady Active ds_transition to LiveRepair
37982023-09-22T23:08:11.579ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
37992023-09-22T23:08:11.579ZWARNcrucible: Create read repair deps for extent 2
38002023-09-22T23:08:11.579ZWARNcrucible: IO Read 1004 extent 1 added deps 2
3801 test live_repair::repair_test::test_repair_read_span_el_sent ... ok
38022023-09-22T23:08:11.580ZINFOcrucible: Crucible stats registered with UUID: ddbe0475-cb31-4366-8d95-8ff6d6cadb7d
38032023-09-22T23:08:11.580ZINFOcrucible: Crucible ddbe0475-cb31-4366-8d95-8ff6d6cadb7d has session id: 51ea6dad-bef7-469c-9bae-157ef82d84ae
38042023-09-22T23:08:11.580ZINFOcrucible: [0] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) New New New ds_transition to WaitActive
38052023-09-22T23:08:11.580ZINFOcrucible: [0] Transition from New to WaitActive
38062023-09-22T23:08:11.580ZINFOcrucible: [0] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) WaitActive New New ds_transition to WaitQuorum
38072023-09-22T23:08:11.580ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38082023-09-22T23:08:11.580ZINFOcrucible: [0] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) WaitQuorum New New ds_transition to Active
38092023-09-22T23:08:11.580ZINFOcrucible: [0] Transition from WaitQuorum to Active
38102023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active New New ds_transition to WaitActive
38112023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from New to WaitActive
38122023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active WaitActive New ds_transition to WaitQuorum
38132023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38142023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active WaitQuorum New ds_transition to Active
38152023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from WaitQuorum to Active
38162023-09-22T23:08:11.580ZINFOcrucible: [2] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active Active New ds_transition to WaitActive
38172023-09-22T23:08:11.580ZINFOcrucible: [2] Transition from New to WaitActive
38182023-09-22T23:08:11.580ZINFOcrucible: [2] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active Active WaitActive ds_transition to WaitQuorum
38192023-09-22T23:08:11.580ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38202023-09-22T23:08:11.580ZINFOcrucible: [2] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active Active WaitQuorum ds_transition to Active
38212023-09-22T23:08:11.580ZINFOcrucible: [2] Transition from WaitQuorum to Active
38222023-09-22T23:08:11.580ZINFOcrucible: ddbe0475-cb31-4366-8d95-8ff6d6cadb7d is now active with session: e680a265-0383-4b2d-b5c1-7fda88a3d8b1
38232023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active Active Active ds_transition to Faulted
38242023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from Active to Faulted
38252023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active Faulted Active ds_transition to LiveRepairReady
38262023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
38272023-09-22T23:08:11.580ZINFOcrucible: [1] ddbe0475-cb31-4366-8d95-8ff6d6cadb7d (e680a265-0383-4b2d-b5c1-7fda88a3d8b1) Active LiveRepairReady Active ds_transition to LiveRepair
38282023-09-22T23:08:11.580ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
38292023-09-22T23:08:11.580ZWARNcrucible: Write to Extent 1:0:9 under repair
38302023-09-22T23:08:11.580ZWARNcrucible: Write to Extent 1:1:9 under repair
38312023-09-22T23:08:11.580ZWARNcrucible: Write to Extent 1:2:9 under repair
38322023-09-22T23:08:11.580ZWARNcrucible: Write 2:0:9 past extent under repair 1
38332023-09-22T23:08:11.580ZWARNcrucible: Write 2:1:9 past extent under repair 1
38342023-09-22T23:08:11.580ZWARNcrucible: Write 2:2:9 past extent under repair 1
38352023-09-22T23:08:11.580ZWARNcrucible: IO Write 1004 on eur 1 Added deps 2
3836 test live_repair::repair_test::test_repair_write_span_el_sent ... ok
38372023-09-22T23:08:11.581ZINFOcrucible: Crucible stats registered with UUID: 4a1430a7-119a-493e-9505-fe3d80a07fe9
38382023-09-22T23:08:11.581ZINFOcrucible: Crucible 4a1430a7-119a-493e-9505-fe3d80a07fe9 has session id: 1df896f2-b218-460b-9ff9-6c436b028406
38392023-09-22T23:08:11.581ZINFOcrucible: [0] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) New New New ds_transition to WaitActive
38402023-09-22T23:08:11.581ZINFOcrucible: [0] Transition from New to WaitActive
38412023-09-22T23:08:11.581ZINFOcrucible: [0] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) WaitActive New New ds_transition to WaitQuorum
38422023-09-22T23:08:11.581ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38432023-09-22T23:08:11.581ZINFOcrucible: [0] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) WaitQuorum New New ds_transition to Active
38442023-09-22T23:08:11.581ZINFOcrucible: [0] Transition from WaitQuorum to Active
38452023-09-22T23:08:11.581ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active New New ds_transition to WaitActive
38462023-09-22T23:08:11.581ZINFOcrucible: [1] Transition from New to WaitActive
38472023-09-22T23:08:11.581ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active WaitActive New ds_transition to WaitQuorum
38482023-09-22T23:08:11.581ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38492023-09-22T23:08:11.581ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active WaitQuorum New ds_transition to Active
38502023-09-22T23:08:11.581ZINFOcrucible: [1] Transition from WaitQuorum to Active
38512023-09-22T23:08:11.581ZINFOcrucible: [2] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active Active New ds_transition to WaitActive
38522023-09-22T23:08:11.581ZINFOcrucible: [2] Transition from New to WaitActive
38532023-09-22T23:08:11.581ZINFOcrucible: [2] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active Active WaitActive ds_transition to WaitQuorum
38542023-09-22T23:08:11.581ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38552023-09-22T23:08:11.581ZINFOcrucible: [2] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active Active WaitQuorum ds_transition to Active
38562023-09-22T23:08:11.581ZINFOcrucible: [2] Transition from WaitQuorum to Active
38572023-09-22T23:08:11.581ZINFOcrucible: 4a1430a7-119a-493e-9505-fe3d80a07fe9 is now active with session: f37022e1-0807-4535-91d2-ef91755ded99
38582023-09-22T23:08:11.582ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active Active Active ds_transition to Faulted
38592023-09-22T23:08:11.582ZINFOcrucible: [1] Transition from Active to Faulted
38602023-09-22T23:08:11.582ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active Faulted Active ds_transition to LiveRepairReady
38612023-09-22T23:08:11.582ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
38622023-09-22T23:08:11.582ZINFOcrucible: [1] 4a1430a7-119a-493e-9505-fe3d80a07fe9 (f37022e1-0807-4535-91d2-ef91755ded99) Active LiveRepairReady Active ds_transition to LiveRepair
38632023-09-22T23:08:11.582ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
38642023-09-22T23:08:11.582ZWARNcrucible: Write to Extent 0:0:9 under repair
38652023-09-22T23:08:11.582ZWARNcrucible: Write to Extent 0:1:9 under repair
38662023-09-22T23:08:11.582ZWARNcrucible: Write to Extent 0:2:9 under repair
38672023-09-22T23:08:11.582ZWARNcrucible: Write 1:0:9 past extent under repair 0
38682023-09-22T23:08:11.582ZWARNcrucible: Write 1:1:9 past extent under repair 0
38692023-09-22T23:08:11.582ZWARNcrucible: Write 1:2:9 past extent under repair 0
38702023-09-22T23:08:11.582ZWARNcrucible: Write 2:0:9 past extent under repair 0
38712023-09-22T23:08:11.582ZWARNcrucible: Write 2:1:9 past extent under repair 0
38722023-09-22T23:08:11.582ZWARNcrucible: Write 2:2:9 past extent under repair 0
38732023-09-22T23:08:11.582ZWARNcrucible: IO Write 1008 on eur 0 Added deps 1
38742023-09-22T23:08:11.582ZWARNcrucible: IO Write 1008 on eur 0 Added deps 2
3875 test live_repair::repair_test::test_repair_write_span_two_el_sent ... ok
38762023-09-22T23:08:11.583ZINFOcrucible: Crucible stats registered with UUID: f977a52e-aaca-4b8d-a154-33554e95a9ae
38772023-09-22T23:08:11.583ZINFOcrucible: Crucible f977a52e-aaca-4b8d-a154-33554e95a9ae has session id: 8e529dfd-872a-4973-a616-82c0bf4954f7
38782023-09-22T23:08:11.583ZINFOcrucible: [0] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) New New New ds_transition to WaitActive
38792023-09-22T23:08:11.583ZINFOcrucible: [0] Transition from New to WaitActive
38802023-09-22T23:08:11.583ZINFOcrucible: [0] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) WaitActive New New ds_transition to WaitQuorum
38812023-09-22T23:08:11.583ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
38822023-09-22T23:08:11.583ZINFOcrucible: [0] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) WaitQuorum New New ds_transition to Active
38832023-09-22T23:08:11.583ZINFOcrucible: [0] Transition from WaitQuorum to Active
38842023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active New New ds_transition to WaitActive
38852023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from New to WaitActive
38862023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active WaitActive New ds_transition to WaitQuorum
38872023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
38882023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active WaitQuorum New ds_transition to Active
38892023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from WaitQuorum to Active
38902023-09-22T23:08:11.583ZINFOcrucible: [2] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active Active New ds_transition to WaitActive
38912023-09-22T23:08:11.583ZINFOcrucible: [2] Transition from New to WaitActive
38922023-09-22T23:08:11.583ZINFOcrucible: [2] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active Active WaitActive ds_transition to WaitQuorum
38932023-09-22T23:08:11.583ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
38942023-09-22T23:08:11.583ZINFOcrucible: [2] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active Active WaitQuorum ds_transition to Active
38952023-09-22T23:08:11.583ZINFOcrucible: [2] Transition from WaitQuorum to Active
38962023-09-22T23:08:11.583ZINFOcrucible: f977a52e-aaca-4b8d-a154-33554e95a9ae is now active with session: f4abcc1d-5f2e-4c67-ac26-73a45f2966c8
38972023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active Active Active ds_transition to Faulted
38982023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from Active to Faulted
38992023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active Faulted Active ds_transition to LiveRepairReady
39002023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
39012023-09-22T23:08:11.583ZINFOcrucible: [1] f977a52e-aaca-4b8d-a154-33554e95a9ae (f4abcc1d-5f2e-4c67-ac26-73a45f2966c8) Active LiveRepairReady Active ds_transition to LiveRepair
39022023-09-22T23:08:11.583ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
3903 test live_repair::repair_test::test_reserve_extent_repair_ids ... ok
3904 Testing repair with s:0 r:[ClientId(1)]
3905 Sep 22 23:08:11.584 DEBG Get repair info for 0 source, : downstairs
3906 Sep 22 23:08:11.584 DEBG Get repair info for 1 bad, : downstairs
3907 Sep 22 23:08:11.584 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
3908 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3909 Testing repair with s:0 r:[ClientId(2)]
3910 Sep 22 23:08:11.584 DEBG Get repair info for 0 source, : downstairs
3911 Sep 22 23:08:11.584 DEBG Get repair info for 2 bad, : downstairs
3912 Sep 22 23:08:11.584 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
3913 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3914 Testing repair with s:1 r:[ClientId(0)]
3915 Sep 22 23:08:11.584 DEBG Get repair info for 1 source, : downstairs
3916 Sep 22 23:08:11.584 DEBG Get repair info for 0 bad, : downstairs
3917 Sep 22 23:08:11.584 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
3918 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3919 Testing repair with s:1 r:[ClientId(2)]
3920 Sep 22 23:08:11.584 DEBG Get repair info for 1 source, : downstairs
3921 Sep 22 23:08:11.584 DEBG Get repair info for 2 bad, : downstairs
3922 Sep 22 23:08:11.584 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
3923 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3924 Testing repair with s:2 r:[ClientId(0)]
3925 Sep 22 23:08:11.584 DEBG Get repair info for 2 source, : downstairs
3926 Sep 22 23:08:11.584 DEBG Get repair info for 0 bad, : downstairs
3927 Sep 22 23:08:11.584 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
3928 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3929 Testing repair with s:2 r:[ClientId(1)]
3930 Sep 22 23:08:11.584 DEBG Get repair info for 2 source, : downstairs
3931 Sep 22 23:08:11.584 DEBG Get repair info for 1 bad, : downstairs
3932 Sep 22 23:08:11.584 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
3933 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3934 test live_repair::repair_test::test_solver_dirty_needs_repair_one ... ok
3935 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
3936 Sep 22 23:08:11.585 DEBG Get repair info for 0 source, : downstairs
3937 Sep 22 23:08:11.585 DEBG Get repair info for 1 bad, : downstairs
3938 Sep 22 23:08:11.585 DEBG Get repair info for 2 bad, : downstairs
3939 Sep 22 23:08:11.585 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
3940 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3941 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
3942 Sep 22 23:08:11.585 DEBG Get repair info for 1 source, : downstairs
3943 Sep 22 23:08:11.585 DEBG Get repair info for 0 bad, : downstairs
3944 Sep 22 23:08:11.585 DEBG Get repair info for 2 bad, : downstairs
3945 Sep 22 23:08:11.585 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
3946 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3947 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
3948 Sep 22 23:08:11.585 DEBG Get repair info for 2 source, : downstairs
3949 Sep 22 23:08:11.585 DEBG Get repair info for 0 bad, : downstairs
3950 Sep 22 23:08:11.585 DEBG Get repair info for 1 bad, : downstairs
3951 Sep 22 23:08:11.585 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
3952 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3953 test live_repair::repair_test::test_solver_dirty_needs_repair_two ... ok
3954 Testing repair with s:0 r:[ClientId(1)]
3955 Sep 22 23:08:11.585 DEBG Get repair info for 0 source, : downstairs
3956 Sep 22 23:08:11.585 DEBG Get repair info for 1 bad, : downstairs
3957 Sep 22 23:08:11.585 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
3958 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3959 Testing repair with s:0 r:[ClientId(2)]
3960 Sep 22 23:08:11.585 DEBG Get repair info for 0 source, : downstairs
3961 Sep 22 23:08:11.585 DEBG Get repair info for 2 bad, : downstairs
3962 Sep 22 23:08:11.585 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
3963 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3964 Testing repair with s:1 r:[ClientId(0)]
3965 Sep 22 23:08:11.585 DEBG Get repair info for 1 source, : downstairs
3966 Sep 22 23:08:11.585 DEBG Get repair info for 0 bad, : downstairs
3967 Sep 22 23:08:11.585 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
3968 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3969 Testing repair with s:1 r:[ClientId(2)]
3970 Sep 22 23:08:11.585 DEBG Get repair info for 1 source, : downstairs
3971 Sep 22 23:08:11.585 DEBG Get repair info for 2 bad, : downstairs
3972 Sep 22 23:08:11.585 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
3973 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3974 Testing repair with s:2 r:[ClientId(0)]
3975 Sep 22 23:08:11.585 DEBG Get repair info for 2 source, : downstairs
3976 Sep 22 23:08:11.585 DEBG Get repair info for 0 bad, : downstairs
3977 Sep 22 23:08:11.585 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
3978 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3979 Testing repair with s:2 r:[ClientId(1)]
3980 Sep 22 23:08:11.585 DEBG Get repair info for 2 source, : downstairs
3981 Sep 22 23:08:11.585 DEBG Get repair info for 1 bad, : downstairs
3982 Sep 22 23:08:11.585 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
3983 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3984 test live_repair::repair_test::test_solver_flush_higher_needs_repair_one ... ok
3985 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
3986 Sep 22 23:08:11.586 DEBG Get repair info for 0 source, : downstairs
3987 Sep 22 23:08:11.586 DEBG Get repair info for 1 bad, : downstairs
3988 Sep 22 23:08:11.586 DEBG Get repair info for 2 bad, : downstairs
3989 Sep 22 23:08:11.586 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
3990 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3991 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
3992 Sep 22 23:08:11.586 DEBG Get repair info for 1 source, : downstairs
3993 Sep 22 23:08:11.586 DEBG Get repair info for 0 bad, : downstairs
3994 Sep 22 23:08:11.586 DEBG Get repair info for 2 bad, : downstairs
3995 Sep 22 23:08:11.586 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
3996 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
3997 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
3998 Sep 22 23:08:11.586 DEBG Get repair info for 2 source, : downstairs
3999 Sep 22 23:08:11.586 DEBG Get repair info for 0 bad, : downstairs
4000 Sep 22 23:08:11.586 DEBG Get repair info for 1 bad, : downstairs
4001 Sep 22 23:08:11.586 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4002 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4003 test live_repair::repair_test::test_solver_flush_higher_needs_repair_two ... ok
4004 Testing repair with s:0 r:[ClientId(1)]
4005 Sep 22 23:08:11.587 DEBG Get repair info for 0 source, : downstairs
4006 Sep 22 23:08:11.587 DEBG Get repair info for 1 bad, : downstairs
4007 Sep 22 23:08:11.587 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4008 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4009 Testing repair with s:0 r:[ClientId(2)]
4010 Sep 22 23:08:11.587 DEBG Get repair info for 0 source, : downstairs
4011 Sep 22 23:08:11.587 DEBG Get repair info for 2 bad, : downstairs
4012 Sep 22 23:08:11.587 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4013 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4014 Testing repair with s:1 r:[ClientId(0)]
4015 Sep 22 23:08:11.587 DEBG Get repair info for 1 source, : downstairs
4016 Sep 22 23:08:11.587 DEBG Get repair info for 0 bad, : downstairs
4017 Sep 22 23:08:11.587 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4018 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4019 Testing repair with s:1 r:[ClientId(2)]
4020 Sep 22 23:08:11.587 DEBG Get repair info for 1 source, : downstairs
4021 Sep 22 23:08:11.587 DEBG Get repair info for 2 bad, : downstairs
4022 Sep 22 23:08:11.587 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4023 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4024 Testing repair with s:2 r:[ClientId(0)]
4025 Sep 22 23:08:11.587 DEBG Get repair info for 2 source, : downstairs
4026 Sep 22 23:08:11.587 DEBG Get repair info for 0 bad, : downstairs
4027 Sep 22 23:08:11.587 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4028 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4029 Testing repair with s:2 r:[ClientId(1)]
4030 Sep 22 23:08:11.587 DEBG Get repair info for 2 source, : downstairs
4031 Sep 22 23:08:11.587 DEBG Get repair info for 1 bad, : downstairs
4032 Sep 22 23:08:11.587 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4033 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4034 test live_repair::repair_test::test_solver_flush_lower_needs_repair_one ... ok
4035 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4036 Sep 22 23:08:11.587 DEBG Get repair info for 0 source, : downstairs
4037 Sep 22 23:08:11.587 DEBG Get repair info for 1 bad, : downstairs
4038 Sep 22 23:08:11.587 DEBG Get repair info for 2 bad, : downstairs
4039 Sep 22 23:08:11.587 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4040 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4041 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4042 Sep 22 23:08:11.587 DEBG Get repair info for 1 source, : downstairs
4043 Sep 22 23:08:11.587 DEBG Get repair info for 0 bad, : downstairs
4044 Sep 22 23:08:11.587 DEBG Get repair info for 2 bad, : downstairs
4045 Sep 22 23:08:11.587 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4046 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4047 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4048 Sep 22 23:08:11.587 DEBG Get repair info for 2 source, : downstairs
4049 Sep 22 23:08:11.587 DEBG Get repair info for 0 bad, : downstairs
4050 Sep 22 23:08:11.587 DEBG Get repair info for 1 bad, : downstairs
4051 Sep 22 23:08:11.587 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4052 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4053 test live_repair::repair_test::test_solver_flush_lower_needs_repair_two ... ok
4054 Testing repair with s:0 r:[ClientId(1)]
4055 Sep 22 23:08:11.588 DEBG Get repair info for 0 source, : downstairs
4056 Sep 22 23:08:11.588 DEBG Get repair info for 1 bad, : downstairs
4057 Sep 22 23:08:11.588 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4058 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4059 Testing repair with s:0 r:[ClientId(2)]
4060 Sep 22 23:08:11.588 DEBG Get repair info for 0 source, : downstairs
4061 Sep 22 23:08:11.588 DEBG Get repair info for 2 bad, : downstairs
4062 Sep 22 23:08:11.588 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4063 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4064 Testing repair with s:1 r:[ClientId(0)]
4065 Sep 22 23:08:11.588 DEBG Get repair info for 1 source, : downstairs
4066 Sep 22 23:08:11.588 DEBG Get repair info for 0 bad, : downstairs
4067 Sep 22 23:08:11.588 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4068 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4069 Testing repair with s:1 r:[ClientId(2)]
4070 Sep 22 23:08:11.588 DEBG Get repair info for 1 source, : downstairs
4071 Sep 22 23:08:11.588 DEBG Get repair info for 2 bad, : downstairs
4072 Sep 22 23:08:11.588 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4073 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4074 Testing repair with s:2 r:[ClientId(0)]
4075 Sep 22 23:08:11.588 DEBG Get repair info for 2 source, : downstairs
4076 Sep 22 23:08:11.588 DEBG Get repair info for 0 bad, : downstairs
4077 Sep 22 23:08:11.588 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4078 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4079 Testing repair with s:2 r:[ClientId(1)]
4080 Sep 22 23:08:11.588 DEBG Get repair info for 2 source, : downstairs
4081 Sep 22 23:08:11.588 DEBG Get repair info for 1 bad, : downstairs
4082 Sep 22 23:08:11.588 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4083 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4084 test live_repair::repair_test::test_solver_gen_higher_needs_repair_one ... ok
4085 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4086 Sep 22 23:08:11.589 DEBG Get repair info for 0 source, : downstairs
4087 Sep 22 23:08:11.589 DEBG Get repair info for 1 bad, : downstairs
4088 Sep 22 23:08:11.589 DEBG Get repair info for 2 bad, : downstairs
4089 Sep 22 23:08:11.589 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4090 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4091 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4092 Sep 22 23:08:11.589 DEBG Get repair info for 1 source, : downstairs
4093 Sep 22 23:08:11.589 DEBG Get repair info for 0 bad, : downstairs
4094 Sep 22 23:08:11.589 DEBG Get repair info for 2 bad, : downstairs
4095 Sep 22 23:08:11.589 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4096 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4097 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4098 Sep 22 23:08:11.589 DEBG Get repair info for 2 source, : downstairs
4099 Sep 22 23:08:11.589 DEBG Get repair info for 0 bad, : downstairs
4100 Sep 22 23:08:11.589 DEBG Get repair info for 1 bad, : downstairs
4101 Sep 22 23:08:11.589 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4102 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4103 test live_repair::repair_test::test_solver_gen_higher_needs_repair_two ... ok
4104 Testing repair with s:0 r:[ClientId(1)]
4105 Sep 22 23:08:11.589 DEBG Get repair info for 0 source, : downstairs
4106 Sep 22 23:08:11.589 DEBG Get repair info for 1 bad, : downstairs
4107 Sep 22 23:08:11.589 INFO Repair for extent 0 s:0 d:[ClientId(1)], : downstairs
4108 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4109 Testing repair with s:0 r:[ClientId(2)]
4110 Sep 22 23:08:11.589 DEBG Get repair info for 0 source, : downstairs
4111 Sep 22 23:08:11.589 DEBG Get repair info for 2 bad, : downstairs
4112 Sep 22 23:08:11.589 INFO Repair for extent 0 s:0 d:[ClientId(2)], : downstairs
4113 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4114 Testing repair with s:1 r:[ClientId(0)]
4115 Sep 22 23:08:11.589 DEBG Get repair info for 1 source, : downstairs
4116 Sep 22 23:08:11.589 DEBG Get repair info for 0 bad, : downstairs
4117 Sep 22 23:08:11.589 INFO Repair for extent 0 s:1 d:[ClientId(0)], : downstairs
4118 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4119 Testing repair with s:1 r:[ClientId(2)]
4120 Sep 22 23:08:11.589 DEBG Get repair info for 1 source, : downstairs
4121 Sep 22 23:08:11.589 DEBG Get repair info for 2 bad, : downstairs
4122 Sep 22 23:08:11.589 INFO Repair for extent 0 s:1 d:[ClientId(2)], : downstairs
4123 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4124 Testing repair with s:2 r:[ClientId(0)]
4125 Sep 22 23:08:11.589 DEBG Get repair info for 2 source, : downstairs
4126 Sep 22 23:08:11.589 DEBG Get repair info for 0 bad, : downstairs
4127 Sep 22 23:08:11.589 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
4128 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4129 Testing repair with s:2 r:[ClientId(1)]
4130 Sep 22 23:08:11.589 DEBG Get repair info for 2 source, : downstairs
4131 Sep 22 23:08:11.589 DEBG Get repair info for 1 bad, : downstairs
4132 Sep 22 23:08:11.589 INFO Repair for extent 0 s:2 d:[ClientId(1)], : downstairs
4133 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4134 test live_repair::repair_test::test_solver_gen_lower_needs_repair_one ... ok
4135 Testing repair with s:0 r:[ClientId(1), ClientId(2)]
4136 Sep 22 23:08:11.590 DEBG Get repair info for 0 source, : downstairs
4137 Sep 22 23:08:11.590 DEBG Get repair info for 1 bad, : downstairs
4138 Sep 22 23:08:11.590 DEBG Get repair info for 2 bad, : downstairs
4139 Sep 22 23:08:11.590 INFO Repair for extent 0 s:0 d:[ClientId(1), ClientId(2)], : downstairs
4140 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(0), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4141 Testing repair with s:1 r:[ClientId(0), ClientId(2)]
4142 Sep 22 23:08:11.590 DEBG Get repair info for 1 source, : downstairs
4143 Sep 22 23:08:11.590 DEBG Get repair info for 0 bad, : downstairs
4144 Sep 22 23:08:11.590 DEBG Get repair info for 2 bad, : downstairs
4145 Sep 22 23:08:11.590 INFO Repair for extent 0 s:1 d:[ClientId(0), ClientId(2)], : downstairs
4146 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(1), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(2)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4147 Testing repair with s:2 r:[ClientId(0), ClientId(1)]
4148 Sep 22 23:08:11.590 DEBG Get repair info for 2 source, : downstairs
4149 Sep 22 23:08:11.590 DEBG Get repair info for 0 bad, : downstairs
4150 Sep 22 23:08:11.590 DEBG Get repair info for 1 bad, : downstairs
4151 Sep 22 23:08:11.590 INFO Repair for extent 0 s:2 d:[ClientId(0), ClientId(1)], : downstairs
4152 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveRepair { dependencies: [], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:1234, repair_downstairs: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4153 test live_repair::repair_test::test_solver_gen_lower_needs_repair_two ... ok
4154 Sep 22 23:08:11.590 DEBG Get repair info for 0 source, : downstairs
4155 Sep 22 23:08:11.590 DEBG Get repair info for 1 bad, : downstairs
4156 Sep 22 23:08:11.590 DEBG Get repair info for 2 bad, : downstairs
4157 Sep 22 23:08:11.590 INFO No repair needed for extent 0, : downstairs
4158 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4159 Passed for source 0
4160 Sep 22 23:08:11.590 DEBG Get repair info for 1 source, : downstairs
4161 Sep 22 23:08:11.590 DEBG Get repair info for 0 bad, : downstairs
4162 Sep 22 23:08:11.591 DEBG Get repair info for 2 bad, : downstairs
4163 Sep 22 23:08:11.591 INFO No repair needed for extent 0, : downstairs
4164 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4165 Passed for source 1
4166 Sep 22 23:08:11.591 DEBG Get repair info for 2 source, : downstairs
4167 Sep 22 23:08:11.591 DEBG Get repair info for 0 bad, : downstairs
4168 Sep 22 23:08:11.591 DEBG Get repair info for 1 bad, : downstairs
4169 Sep 22 23:08:11.591 INFO No repair needed for extent 0, : downstairs
4170 repair op: DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentLiveNoOp { dependencies: [] }, state: ClientData([New, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty }
4171 Passed for source 2
4172 test live_repair::repair_test::test_solver_no_work ... ok
41732023-09-22T23:08:11.591ZINFOcrucible: Crucible stats registered with UUID: 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5
41742023-09-22T23:08:11.591ZINFOcrucible: Crucible 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 has session id: 2e11232a-0bde-48dd-b700-3773d8ee797a
41752023-09-22T23:08:11.591ZINFOcrucible: [0] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) New New New ds_transition to WaitActive
41762023-09-22T23:08:11.591ZINFOcrucible: [0] Transition from New to WaitActive
41772023-09-22T23:08:11.591ZINFOcrucible: [0] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) WaitActive New New ds_transition to WaitQuorum
41782023-09-22T23:08:11.591ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
41792023-09-22T23:08:11.591ZINFOcrucible: [0] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) WaitQuorum New New ds_transition to Active
41802023-09-22T23:08:11.591ZINFOcrucible: [0] Transition from WaitQuorum to Active
41812023-09-22T23:08:11.591ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active New New ds_transition to WaitActive
41822023-09-22T23:08:11.591ZINFOcrucible: [1] Transition from New to WaitActive
41832023-09-22T23:08:11.591ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active WaitActive New ds_transition to WaitQuorum
41842023-09-22T23:08:11.591ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
41852023-09-22T23:08:11.591ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active WaitQuorum New ds_transition to Active
41862023-09-22T23:08:11.591ZINFOcrucible: [1] Transition from WaitQuorum to Active
41872023-09-22T23:08:11.592ZINFOcrucible: [2] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Active New ds_transition to WaitActive
41882023-09-22T23:08:11.592ZINFOcrucible: [2] Transition from New to WaitActive
41892023-09-22T23:08:11.592ZINFOcrucible: [2] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Active WaitActive ds_transition to WaitQuorum
41902023-09-22T23:08:11.592ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
41912023-09-22T23:08:11.592ZINFOcrucible: [2] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Active WaitQuorum ds_transition to Active
41922023-09-22T23:08:11.592ZINFOcrucible: [2] Transition from WaitQuorum to Active
41932023-09-22T23:08:11.592ZINFOcrucible: 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 is now active with session: 7e42f015-9c9b-4236-a152-064c8ad97d22
41942023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Active Active ds_transition to Faulted
41952023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from Active to Faulted
41962023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Faulted Active ds_transition to LiveRepairReady
41972023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
41982023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active LiveRepairReady Active ds_transition to LiveRepair
41992023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
42002023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active LiveRepair Active ds_transition to Faulted
42012023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from LiveRepair to Faulted
42022023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active Faulted Active ds_transition to LiveRepairReady
42032023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
42042023-09-22T23:08:11.592ZINFOcrucible: [1] 343c66cc-8bda-4c3b-8e86-1cbe92d25fe5 (7e42f015-9c9b-4236-a152-064c8ad97d22) Active LiveRepairReady Active ds_transition to LiveRepair
42052023-09-22T23:08:11.592ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
42062023-09-22T23:08:11.592ZWARNcrucible: Write to Extent 0:2:9 under repair
42072023-09-22T23:08:11.592ZWARNcrucible: Write 1:0:9 past extent under repair 0
42082023-09-22T23:08:11.592ZWARNcrucible: IO Write 1005 on eur 0 Added deps 1
42092023-09-22T23:08:11.592ZWARNcrucible: Create read repair deps for extent 2
42102023-09-22T23:08:11.592ZWARNcrucible: IO Read 1010 extent 0 added deps 2
4211 test live_repair::repair_test::test_spicy_live_repair ... ok
4212 test mend::test::reconcile_dirty_length_bad - should panic ... ok
42132023-09-22T23:08:11.593ZINFOcrucible: Extents 2 dirty
42142023-09-22T23:08:11.593ZINFOcrucible: First source client ID for extent 2 mrl = dirty
42152023-09-22T23:08:11.593ZINFOcrucible: extent:2 gens: 7 7 7 mrl = dirty
42162023-09-22T23:08:11.593ZINFOcrucible: extent:2 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42172023-09-22T23:08:11.593ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = dirty
42182023-09-22T23:08:11.593ZINFOcrucible: find dest for source 2 for extent at index 2 mrl = dirty
42192023-09-22T23:08:11.593ZINFOcrucible: source 2, add dest 0 flush mrl = dirty
42202023-09-22T23:08:11.593ZINFOcrucible: source 2, add dest 1 flush mrl = dirty
4221 test mend::test::reconcile_dirty_mismatch_c0 ... ok
42222023-09-22T23:08:11.594ZINFOcrucible: Extents 2 dirty
42232023-09-22T23:08:11.594ZINFOcrucible: First source client ID for extent 2 mrl = dirty
42242023-09-22T23:08:11.594ZINFOcrucible: extent:2 gens: 7 7 7 mrl = dirty
42252023-09-22T23:08:11.594ZINFOcrucible: extent:2 flush: 2 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42262023-09-22T23:08:11.594ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42272023-09-22T23:08:11.594ZINFOcrucible: extent:2 dirty: false true false mrl = dirty
42282023-09-22T23:08:11.594ZINFOcrucible: find dest for source 1 for extent at index 2 mrl = dirty
42292023-09-22T23:08:11.594ZINFOcrucible: source 1, add dest 0 source flush mrl = dirty
42302023-09-22T23:08:11.594ZINFOcrucible: source 1, add dest 2 source flush mrl = dirty
4231 test mend::test::reconcile_dirty_mismatch_c1 ... ok
42322023-09-22T23:08:11.595ZINFOcrucible: Extents 1 dirty
42332023-09-22T23:08:11.595ZINFOcrucible: First source client ID for extent 1 mrl = dirty
42342023-09-22T23:08:11.595ZINFOcrucible: extent:1 gens: 8 8 7 mrl = dirty
42352023-09-22T23:08:11.595ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = dirty
42362023-09-22T23:08:11.595ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = dirty
42372023-09-22T23:08:11.595ZINFOcrucible: extent:1 dirty: false false true mrl = dirty
42382023-09-22T23:08:11.595ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = dirty
42392023-09-22T23:08:11.595ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = dirty
42402023-09-22T23:08:11.595ZINFOcrucible: source 0, add dest 2 gen mrl = dirty
4241 test mend::test::reconcile_dirty_mismatch_c2 ... ok
42422023-09-22T23:08:11.595ZINFOcrucible: Extents 0 dirty
42432023-09-22T23:08:11.595ZINFOcrucible: First source client ID for extent 0 mrl = dirty
42442023-09-22T23:08:11.595ZINFOcrucible: extent:0 gens: 9 9 9 mrl = dirty
42452023-09-22T23:08:11.595ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42462023-09-22T23:08:11.596ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42472023-09-22T23:08:11.596ZINFOcrucible: extent:0 dirty: true true true mrl = dirty
42482023-09-22T23:08:11.596ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = dirty
42492023-09-22T23:08:11.596ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
42502023-09-22T23:08:11.596ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
42512023-09-22T23:08:11.596ZINFOcrucible: Extents 3 dirty
42522023-09-22T23:08:11.596ZINFOcrucible: First source client ID for extent 3 mrl = dirty
42532023-09-22T23:08:11.596ZINFOcrucible: extent:3 gens: 7 7 7 mrl = dirty
42542023-09-22T23:08:11.596ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42552023-09-22T23:08:11.596ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
42562023-09-22T23:08:11.596ZINFOcrucible: extent:3 dirty: true true true mrl = dirty
42572023-09-22T23:08:11.596ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = dirty
42582023-09-22T23:08:11.596ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
42592023-09-22T23:08:11.596ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
4260 ef.dest [
4261 ClientId(
4262 1,
4263 ),
4264 ClientId(
4265 2,
4266 ),
4267 ]
4268 test mend::test::reconcile_dirty_true ... ok
42692023-09-22T23:08:11.596ZINFOcrucible: Extent 1 has flush number mismatch
42702023-09-22T23:08:11.596ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
42712023-09-22T23:08:11.596ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
42722023-09-22T23:08:11.596ZINFOcrucible: extent:1 flush: 2 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
42732023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
42742023-09-22T23:08:11.597ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = flush_mismatch
42752023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
42762023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
42772023-09-22T23:08:11.597ZINFOcrucible: Extent 2 has flush number mismatch
42782023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
42792023-09-22T23:08:11.597ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
42802023-09-22T23:08:11.597ZINFOcrucible: extent:2 flush: 3 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
42812023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
42822023-09-22T23:08:11.597ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
42832023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
42842023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
42852023-09-22T23:08:11.597ZINFOcrucible: Extent 3 has flush number mismatch
42862023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
42872023-09-22T23:08:11.597ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
42882023-09-22T23:08:11.597ZINFOcrucible: extent:3 flush: 1 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
42892023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
42902023-09-22T23:08:11.597ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = flush_mismatch
42912023-09-22T23:08:11.597ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
42922023-09-22T23:08:11.597ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
42932023-09-22T23:08:11.597ZINFOcrucible: Extent 4 has flush number mismatch
42942023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
42952023-09-22T23:08:11.597ZINFOcrucible: extent:4 gens: 1 1 1 mrl = flush_mismatch
42962023-09-22T23:08:11.597ZINFOcrucible: extent:4 flush: 2 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
42972023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
42982023-09-22T23:08:11.597ZINFOcrucible: extent:4 dirty: false false false mrl = flush_mismatch
42992023-09-22T23:08:11.597ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
43002023-09-22T23:08:11.597ZINFOcrucible: find dest for source 0 for extent at index 4 mrl = flush_mismatch
43012023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
43022023-09-22T23:08:11.597ZINFOcrucible: Extent 5 has flush number mismatch
43032023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
43042023-09-22T23:08:11.597ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
43052023-09-22T23:08:11.597ZINFOcrucible: extent:5 flush: 3 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43062023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
43072023-09-22T23:08:11.597ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
43082023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
43092023-09-22T23:08:11.597ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
43102023-09-22T23:08:11.597ZINFOcrucible: Extent 6 has flush number mismatch
43112023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
43122023-09-22T23:08:11.597ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
43132023-09-22T23:08:11.597ZINFOcrucible: extent:6 flush: 1 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43142023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
43152023-09-22T23:08:11.597ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
43162023-09-22T23:08:11.597ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
43172023-09-22T23:08:11.597ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
43182023-09-22T23:08:11.597ZINFOcrucible: Extent 7 has flush number mismatch
43192023-09-22T23:08:11.597ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
43202023-09-22T23:08:11.597ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
43212023-09-22T23:08:11.597ZINFOcrucible: extent:7 flush: 2 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43222023-09-22T23:08:11.597ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
43232023-09-22T23:08:11.597ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
43242023-09-22T23:08:11.598ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
43252023-09-22T23:08:11.598ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
43262023-09-22T23:08:11.598ZINFOcrucible: Extent 8 has flush number mismatch
43272023-09-22T23:08:11.598ZINFOcrucible: First source client ID for extent 8 mrl = flush_mismatch
43282023-09-22T23:08:11.598ZINFOcrucible: extent:8 gens: 1 1 1 mrl = flush_mismatch
43292023-09-22T23:08:11.598ZINFOcrucible: extent:8 flush: 3 3 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43302023-09-22T23:08:11.598ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
43312023-09-22T23:08:11.598ZINFOcrucible: extent:8 dirty: false false false mrl = flush_mismatch
43322023-09-22T23:08:11.598ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
43332023-09-22T23:08:11.598ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = flush_mismatch
43342023-09-22T23:08:11.598ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
4335 test mend::test::reconcile_flush_a ... ok
43362023-09-22T23:08:11.598ZINFOcrucible: Extent 0 has flush number mismatch
43372023-09-22T23:08:11.598ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
43382023-09-22T23:08:11.598ZINFOcrucible: extent:0 gens: 1 1 1 mrl = flush_mismatch
43392023-09-22T23:08:11.598ZINFOcrucible: extent:0 flush: 1 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43402023-09-22T23:08:11.598ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
43412023-09-22T23:08:11.598ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
43422023-09-22T23:08:11.599ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
43432023-09-22T23:08:11.599ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
43442023-09-22T23:08:11.599ZINFOcrucible: Extent 1 has flush number mismatch
43452023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
43462023-09-22T23:08:11.599ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
43472023-09-22T23:08:11.599ZINFOcrucible: extent:1 flush: 2 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43482023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
43492023-09-22T23:08:11.599ZINFOcrucible: extent:1 dirty: false false false mrl = flush_mismatch
43502023-09-22T23:08:11.599ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
43512023-09-22T23:08:11.599ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = flush_mismatch
43522023-09-22T23:08:11.599ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
43532023-09-22T23:08:11.599ZINFOcrucible: Extent 2 has flush number mismatch
43542023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
43552023-09-22T23:08:11.599ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
43562023-09-22T23:08:11.599ZINFOcrucible: extent:2 flush: 3 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43572023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
43582023-09-22T23:08:11.599ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
43592023-09-22T23:08:11.599ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
43602023-09-22T23:08:11.599ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
43612023-09-22T23:08:11.599ZINFOcrucible: Extent 3 has flush number mismatch
43622023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
43632023-09-22T23:08:11.599ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
43642023-09-22T23:08:11.599ZINFOcrucible: extent:3 flush: 1 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43652023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
43662023-09-22T23:08:11.599ZINFOcrucible: extent:3 dirty: false false false mrl = flush_mismatch
43672023-09-22T23:08:11.599ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
43682023-09-22T23:08:11.599ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = flush_mismatch
43692023-09-22T23:08:11.599ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
43702023-09-22T23:08:11.599ZINFOcrucible: Extent 5 has flush number mismatch
43712023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
43722023-09-22T23:08:11.599ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
43732023-09-22T23:08:11.599ZINFOcrucible: extent:5 flush: 3 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43742023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
43752023-09-22T23:08:11.599ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
43762023-09-22T23:08:11.599ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
43772023-09-22T23:08:11.599ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
43782023-09-22T23:08:11.599ZINFOcrucible: Extent 6 has flush number mismatch
43792023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
43802023-09-22T23:08:11.599ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
43812023-09-22T23:08:11.599ZINFOcrucible: extent:6 flush: 1 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43822023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
43832023-09-22T23:08:11.599ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
43842023-09-22T23:08:11.599ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
43852023-09-22T23:08:11.599ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
43862023-09-22T23:08:11.599ZINFOcrucible: Extent 7 has flush number mismatch
43872023-09-22T23:08:11.599ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
43882023-09-22T23:08:11.599ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
43892023-09-22T23:08:11.599ZINFOcrucible: extent:7 flush: 2 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43902023-09-22T23:08:11.599ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
43912023-09-22T23:08:11.599ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
43922023-09-22T23:08:11.599ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
43932023-09-22T23:08:11.599ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
43942023-09-22T23:08:11.600ZINFOcrucible: Extent 8 has flush number mismatch
43952023-09-22T23:08:11.600ZINFOcrucible: First source client ID for extent 8 mrl = flush_mismatch
43962023-09-22T23:08:11.600ZINFOcrucible: extent:8 gens: 1 1 1 mrl = flush_mismatch
43972023-09-22T23:08:11.600ZINFOcrucible: extent:8 flush: 3 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
43982023-09-22T23:08:11.600ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = flush_mismatch
43992023-09-22T23:08:11.600ZINFOcrucible: extent:8 dirty: false false false mrl = flush_mismatch
44002023-09-22T23:08:11.600ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = flush_mismatch
44012023-09-22T23:08:11.600ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = flush_mismatch
44022023-09-22T23:08:11.600ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
4403 test mend::test::reconcile_flush_b ... ok
44042023-09-22T23:08:11.600ZINFOcrucible: Extent 0 has flush number mismatch
44052023-09-22T23:08:11.600ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
44062023-09-22T23:08:11.600ZINFOcrucible: extent:0 gens: 1 1 1 mrl = flush_mismatch
44072023-09-22T23:08:11.600ZINFOcrucible: extent:0 flush: 1 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44082023-09-22T23:08:11.600ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44092023-09-22T23:08:11.600ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
44102023-09-22T23:08:11.600ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44112023-09-22T23:08:11.600ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44122023-09-22T23:08:11.600ZINFOcrucible: Extent 1 has flush number mismatch
44132023-09-22T23:08:11.600ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
44142023-09-22T23:08:11.600ZINFOcrucible: extent:1 gens: 1 1 1 mrl = flush_mismatch
44152023-09-22T23:08:11.600ZINFOcrucible: extent:1 flush: 2 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44162023-09-22T23:08:11.600ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44172023-09-22T23:08:11.600ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
44182023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44192023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44202023-09-22T23:08:11.601ZINFOcrucible: Extent 2 has flush number mismatch
44212023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
44222023-09-22T23:08:11.601ZINFOcrucible: extent:2 gens: 1 1 1 mrl = flush_mismatch
44232023-09-22T23:08:11.601ZINFOcrucible: extent:2 flush: 3 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44242023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44252023-09-22T23:08:11.601ZINFOcrucible: extent:2 dirty: false false false mrl = flush_mismatch
44262023-09-22T23:08:11.601ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44272023-09-22T23:08:11.601ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
44282023-09-22T23:08:11.601ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44292023-09-22T23:08:11.601ZINFOcrucible: Extent 3 has flush number mismatch
44302023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
44312023-09-22T23:08:11.601ZINFOcrucible: extent:3 gens: 1 1 1 mrl = flush_mismatch
44322023-09-22T23:08:11.601ZINFOcrucible: extent:3 flush: 1 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44332023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44342023-09-22T23:08:11.601ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
44352023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44362023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44372023-09-22T23:08:11.601ZINFOcrucible: Extent 4 has flush number mismatch
44382023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
44392023-09-22T23:08:11.601ZINFOcrucible: extent:4 gens: 1 1 1 mrl = flush_mismatch
44402023-09-22T23:08:11.601ZINFOcrucible: extent:4 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44412023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44422023-09-22T23:08:11.601ZINFOcrucible: find dest for source 2 for extent at index 4 mrl = flush_mismatch
44432023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44442023-09-22T23:08:11.601ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44452023-09-22T23:08:11.601ZINFOcrucible: Extent 5 has flush number mismatch
44462023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
44472023-09-22T23:08:11.601ZINFOcrucible: extent:5 gens: 1 1 1 mrl = flush_mismatch
44482023-09-22T23:08:11.601ZINFOcrucible: extent:5 flush: 3 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44492023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44502023-09-22T23:08:11.601ZINFOcrucible: extent:5 dirty: false false false mrl = flush_mismatch
44512023-09-22T23:08:11.601ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
44522023-09-22T23:08:11.601ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = flush_mismatch
44532023-09-22T23:08:11.601ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
44542023-09-22T23:08:11.601ZINFOcrucible: Extent 6 has flush number mismatch
44552023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 6 mrl = flush_mismatch
44562023-09-22T23:08:11.601ZINFOcrucible: extent:6 gens: 1 1 1 mrl = flush_mismatch
44572023-09-22T23:08:11.601ZINFOcrucible: extent:6 flush: 1 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44582023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44592023-09-22T23:08:11.601ZINFOcrucible: extent:6 dirty: false false false mrl = flush_mismatch
44602023-09-22T23:08:11.601ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44612023-09-22T23:08:11.601ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = flush_mismatch
44622023-09-22T23:08:11.601ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
44632023-09-22T23:08:11.601ZINFOcrucible: Extent 7 has flush number mismatch
44642023-09-22T23:08:11.601ZINFOcrucible: First source client ID for extent 7 mrl = flush_mismatch
44652023-09-22T23:08:11.601ZINFOcrucible: extent:7 gens: 1 1 1 mrl = flush_mismatch
44662023-09-22T23:08:11.601ZINFOcrucible: extent:7 flush: 2 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44672023-09-22T23:08:11.601ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44682023-09-22T23:08:11.601ZINFOcrucible: extent:7 dirty: false false false mrl = flush_mismatch
44692023-09-22T23:08:11.602ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44702023-09-22T23:08:11.602ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = flush_mismatch
44712023-09-22T23:08:11.602ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
4472 test mend::test::reconcile_flush_c ... ok
4473 test mend::test::reconcile_flush_length_bad - should panic ... ok
44742023-09-22T23:08:11.603ZINFOcrucible: Extent 0 has flush number mismatch
44752023-09-22T23:08:11.603ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
44762023-09-22T23:08:11.603ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
44772023-09-22T23:08:11.603ZINFOcrucible: extent:0 flush: 1 2 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44782023-09-22T23:08:11.603ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44792023-09-22T23:08:11.603ZINFOcrucible: extent:0 dirty: false false false mrl = flush_mismatch
44802023-09-22T23:08:11.603ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
44812023-09-22T23:08:11.603ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = flush_mismatch
44822023-09-22T23:08:11.603ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
4483 test mend::test::reconcile_flush_mismatch_c0 ... ok
44842023-09-22T23:08:11.603ZINFOcrucible: Extent 0 has flush number mismatch
44852023-09-22T23:08:11.603ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
44862023-09-22T23:08:11.603ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
44872023-09-22T23:08:11.603ZINFOcrucible: extent:0 flush: 1 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44882023-09-22T23:08:11.603ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44892023-09-22T23:08:11.603ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
44902023-09-22T23:08:11.603ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44912023-09-22T23:08:11.603ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
44922023-09-22T23:08:11.604ZINFOcrucible: Extent 1 has flush number mismatch
44932023-09-22T23:08:11.604ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
44942023-09-22T23:08:11.604ZINFOcrucible: extent:1 gens: 8 8 8 mrl = flush_mismatch
44952023-09-22T23:08:11.604ZINFOcrucible: extent:1 flush: 2 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
44962023-09-22T23:08:11.604ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
44972023-09-22T23:08:11.604ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
44982023-09-22T23:08:11.604ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
44992023-09-22T23:08:11.604ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45002023-09-22T23:08:11.604ZINFOcrucible: Extent 2 has flush number mismatch
45012023-09-22T23:08:11.604ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
45022023-09-22T23:08:11.604ZINFOcrucible: extent:2 gens: 7 7 7 mrl = flush_mismatch
45032023-09-22T23:08:11.604ZINFOcrucible: extent:2 flush: 3 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45042023-09-22T23:08:11.604ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45052023-09-22T23:08:11.604ZINFOcrucible: extent:2 dirty: false false false mrl = flush_mismatch
45062023-09-22T23:08:11.604ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = flush_mismatch
45072023-09-22T23:08:11.604ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = flush_mismatch
45082023-09-22T23:08:11.604ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
45092023-09-22T23:08:11.604ZINFOcrucible: Extent 3 has flush number mismatch
45102023-09-22T23:08:11.604ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
45112023-09-22T23:08:11.604ZINFOcrucible: extent:3 gens: 7 7 7 mrl = flush_mismatch
45122023-09-22T23:08:11.604ZINFOcrucible: extent:3 flush: 3 2 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45132023-09-22T23:08:11.604ZINFOcrucible: max_flush now has: [ClientId(0)] mrl = flush_mismatch
45142023-09-22T23:08:11.604ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = flush_mismatch
45152023-09-22T23:08:11.604ZINFOcrucible: source 0, add dest 1 flush mrl = flush_mismatch
45162023-09-22T23:08:11.604ZINFOcrucible: source 0, add dest 2 flush mrl = flush_mismatch
45172023-09-22T23:08:11.604ZINFOcrucible: Extent 4 has flush number mismatch
45182023-09-22T23:08:11.604ZINFOcrucible: First source client ID for extent 4 mrl = flush_mismatch
45192023-09-22T23:08:11.604ZINFOcrucible: extent:4 gens: 6 6 6 mrl = flush_mismatch
45202023-09-22T23:08:11.604ZINFOcrucible: extent:4 flush: 1 3 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45212023-09-22T23:08:11.604ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = flush_mismatch
45222023-09-22T23:08:11.604ZINFOcrucible: extent:4 dirty: false false false mrl = flush_mismatch
45232023-09-22T23:08:11.604ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = flush_mismatch
45242023-09-22T23:08:11.604ZINFOcrucible: find dest for source 1 for extent at index 4 mrl = flush_mismatch
45252023-09-22T23:08:11.604ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
45262023-09-22T23:08:11.604ZINFOcrucible: Extent 5 has flush number mismatch
45272023-09-22T23:08:11.604ZINFOcrucible: First source client ID for extent 5 mrl = flush_mismatch
45282023-09-22T23:08:11.604ZINFOcrucible: extent:5 gens: 5 5 5 mrl = flush_mismatch
45292023-09-22T23:08:11.604ZINFOcrucible: extent:5 flush: 2 3 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45302023-09-22T23:08:11.604ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
45312023-09-22T23:08:11.604ZINFOcrucible: find dest for source 1 for extent at index 5 mrl = flush_mismatch
45322023-09-22T23:08:11.604ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
45332023-09-22T23:08:11.604ZINFOcrucible: source 1, add dest 2 flush mrl = flush_mismatch
4534 test mend::test::reconcile_flush_mismatch_c1 ... ok
45352023-09-22T23:08:11.605ZINFOcrucible: Extent 0 has flush number mismatch
45362023-09-22T23:08:11.605ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
45372023-09-22T23:08:11.605ZINFOcrucible: extent:0 gens: 9 9 9 mrl = flush_mismatch
45382023-09-22T23:08:11.605ZINFOcrucible: extent:0 flush: 1 1 2 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45392023-09-22T23:08:11.605ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45402023-09-22T23:08:11.605ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = flush_mismatch
45412023-09-22T23:08:11.605ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45422023-09-22T23:08:11.605ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
45432023-09-22T23:08:11.605ZINFOcrucible: Extent 3 has flush number mismatch
45442023-09-22T23:08:11.605ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
45452023-09-22T23:08:11.605ZINFOcrucible: extent:3 gens: 7 7 7 mrl = flush_mismatch
45462023-09-22T23:08:11.605ZINFOcrucible: extent:3 flush: 1 1 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = flush_mismatch
45472023-09-22T23:08:11.605ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
45482023-09-22T23:08:11.605ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
45492023-09-22T23:08:11.605ZINFOcrucible: source 2, add dest 0 flush mrl = flush_mismatch
45502023-09-22T23:08:11.605ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
4551 test mend::test::reconcile_flush_mismatch_c2 ... ok
45522023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 1
45532023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
45542023-09-22T23:08:11.606ZINFOcrucible: extent:1 gens: 2 1 1 mrl = gen_mismatch
45552023-09-22T23:08:11.606ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
45562023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
45572023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
45582023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 2
45592023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
45602023-09-22T23:08:11.606ZINFOcrucible: extent:2 gens: 3 1 1 mrl = gen_mismatch
45612023-09-22T23:08:11.606ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
45622023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
45632023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
45642023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 3
45652023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
45662023-09-22T23:08:11.606ZINFOcrucible: extent:3 gens: 1 2 1 mrl = gen_mismatch
45672023-09-22T23:08:11.606ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = gen_mismatch
45682023-09-22T23:08:11.606ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
45692023-09-22T23:08:11.606ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
45702023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 4
45712023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 4 mrl = gen_mismatch
45722023-09-22T23:08:11.606ZINFOcrucible: extent:4 gens: 2 2 1 mrl = gen_mismatch
45732023-09-22T23:08:11.606ZINFOcrucible: extent:4 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
45742023-09-22T23:08:11.606ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
45752023-09-22T23:08:11.606ZINFOcrucible: extent:4 dirty: false false false mrl = gen_mismatch
45762023-09-22T23:08:11.606ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
45772023-09-22T23:08:11.606ZINFOcrucible: find dest for source 0 for extent at index 4 mrl = gen_mismatch
45782023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
45792023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 5
45802023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
45812023-09-22T23:08:11.606ZINFOcrucible: extent:5 gens: 3 2 1 mrl = gen_mismatch
45822023-09-22T23:08:11.606ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
45832023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
45842023-09-22T23:08:11.606ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
45852023-09-22T23:08:11.606ZINFOcrucible: generation number mismatch 6
45862023-09-22T23:08:11.606ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
45872023-09-22T23:08:11.606ZINFOcrucible: extent:6 gens: 1 3 1 mrl = gen_mismatch
45882023-09-22T23:08:11.606ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
45892023-09-22T23:08:11.606ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
45902023-09-22T23:08:11.607ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
45912023-09-22T23:08:11.607ZINFOcrucible: generation number mismatch 7
45922023-09-22T23:08:11.607ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
45932023-09-22T23:08:11.607ZINFOcrucible: extent:7 gens: 2 3 1 mrl = gen_mismatch
45942023-09-22T23:08:11.607ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
45952023-09-22T23:08:11.607ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
45962023-09-22T23:08:11.607ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
45972023-09-22T23:08:11.607ZINFOcrucible: generation number mismatch 8
45982023-09-22T23:08:11.607ZINFOcrucible: First source client ID for extent 8 mrl = gen_mismatch
45992023-09-22T23:08:11.607ZINFOcrucible: extent:8 gens: 3 3 1 mrl = gen_mismatch
46002023-09-22T23:08:11.607ZINFOcrucible: extent:8 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46012023-09-22T23:08:11.607ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46022023-09-22T23:08:11.607ZINFOcrucible: extent:8 dirty: false false false mrl = gen_mismatch
46032023-09-22T23:08:11.607ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46042023-09-22T23:08:11.607ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = gen_mismatch
46052023-09-22T23:08:11.607ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4606 test mend::test::reconcile_gen_a ... ok
46072023-09-22T23:08:11.607ZINFOcrucible: generation number mismatch 0
46082023-09-22T23:08:11.607ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
46092023-09-22T23:08:11.607ZINFOcrucible: extent:0 gens: 1 1 2 mrl = gen_mismatch
46102023-09-22T23:08:11.607ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = gen_mismatch
46112023-09-22T23:08:11.607ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
46122023-09-22T23:08:11.607ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
46132023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 1
46142023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
46152023-09-22T23:08:11.608ZINFOcrucible: extent:1 gens: 2 1 2 mrl = gen_mismatch
46162023-09-22T23:08:11.608ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46172023-09-22T23:08:11.608ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46182023-09-22T23:08:11.608ZINFOcrucible: extent:1 dirty: false false false mrl = gen_mismatch
46192023-09-22T23:08:11.608ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46202023-09-22T23:08:11.608ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
46212023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
46222023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 2
46232023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
46242023-09-22T23:08:11.608ZINFOcrucible: extent:2 gens: 3 1 2 mrl = gen_mismatch
46252023-09-22T23:08:11.608ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
46262023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
46272023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
46282023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 3
46292023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
46302023-09-22T23:08:11.608ZINFOcrucible: extent:3 gens: 1 2 2 mrl = gen_mismatch
46312023-09-22T23:08:11.608ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
46322023-09-22T23:08:11.608ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
46332023-09-22T23:08:11.608ZINFOcrucible: extent:3 dirty: false false false mrl = gen_mismatch
46342023-09-22T23:08:11.608ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
46352023-09-22T23:08:11.608ZINFOcrucible: find dest for source 1 for extent at index 3 mrl = gen_mismatch
46362023-09-22T23:08:11.608ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
46372023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 5
46382023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
46392023-09-22T23:08:11.608ZINFOcrucible: extent:5 gens: 3 2 2 mrl = gen_mismatch
46402023-09-22T23:08:11.608ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
46412023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
46422023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
46432023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 6
46442023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
46452023-09-22T23:08:11.608ZINFOcrucible: extent:6 gens: 1 3 2 mrl = gen_mismatch
46462023-09-22T23:08:11.608ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
46472023-09-22T23:08:11.608ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
46482023-09-22T23:08:11.608ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
46492023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 7
46502023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
46512023-09-22T23:08:11.608ZINFOcrucible: extent:7 gens: 2 3 2 mrl = gen_mismatch
46522023-09-22T23:08:11.608ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
46532023-09-22T23:08:11.608ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
46542023-09-22T23:08:11.608ZINFOcrucible: source 1, add dest 2 gen mrl = gen_mismatch
46552023-09-22T23:08:11.608ZINFOcrucible: generation number mismatch 8
46562023-09-22T23:08:11.608ZINFOcrucible: First source client ID for extent 8 mrl = gen_mismatch
46572023-09-22T23:08:11.608ZINFOcrucible: extent:8 gens: 3 3 2 mrl = gen_mismatch
46582023-09-22T23:08:11.608ZINFOcrucible: extent:8 flush: 1 1 1 scs: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46592023-09-22T23:08:11.608ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46602023-09-22T23:08:11.608ZINFOcrucible: extent:8 dirty: false false false mrl = gen_mismatch
46612023-09-22T23:08:11.608ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(1)] mrl = gen_mismatch
46622023-09-22T23:08:11.608ZINFOcrucible: find dest for source 0 for extent at index 8 mrl = gen_mismatch
46632023-09-22T23:08:11.608ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4664 test mend::test::reconcile_gen_b ... ok
46652023-09-22T23:08:11.609ZINFOcrucible: generation number mismatch 0
46662023-09-22T23:08:11.609ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
46672023-09-22T23:08:11.609ZINFOcrucible: extent:0 gens: 1 1 3 mrl = gen_mismatch
46682023-09-22T23:08:11.609ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = gen_mismatch
46692023-09-22T23:08:11.609ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
46702023-09-22T23:08:11.609ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
46712023-09-22T23:08:11.609ZINFOcrucible: generation number mismatch 1
46722023-09-22T23:08:11.609ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
46732023-09-22T23:08:11.609ZINFOcrucible: extent:1 gens: 2 1 3 mrl = gen_mismatch
46742023-09-22T23:08:11.609ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = gen_mismatch
46752023-09-22T23:08:11.609ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
46762023-09-22T23:08:11.609ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
46772023-09-22T23:08:11.609ZINFOcrucible: generation number mismatch 2
46782023-09-22T23:08:11.609ZINFOcrucible: First source client ID for extent 2 mrl = gen_mismatch
46792023-09-22T23:08:11.609ZINFOcrucible: extent:2 gens: 3 1 3 mrl = gen_mismatch
46802023-09-22T23:08:11.609ZINFOcrucible: extent:2 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46812023-09-22T23:08:11.609ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46822023-09-22T23:08:11.609ZINFOcrucible: extent:2 dirty: false false false mrl = gen_mismatch
46832023-09-22T23:08:11.609ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
46842023-09-22T23:08:11.609ZINFOcrucible: find dest for source 0 for extent at index 2 mrl = gen_mismatch
46852023-09-22T23:08:11.609ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
46862023-09-22T23:08:11.609ZINFOcrucible: generation number mismatch 3
46872023-09-22T23:08:11.609ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
46882023-09-22T23:08:11.609ZINFOcrucible: extent:3 gens: 1 2 3 mrl = gen_mismatch
46892023-09-22T23:08:11.609ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = gen_mismatch
46902023-09-22T23:08:11.610ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
46912023-09-22T23:08:11.610ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
46922023-09-22T23:08:11.610ZINFOcrucible: generation number mismatch 4
46932023-09-22T23:08:11.610ZINFOcrucible: First source client ID for extent 4 mrl = gen_mismatch
46942023-09-22T23:08:11.610ZINFOcrucible: extent:4 gens: 2 2 3 mrl = gen_mismatch
46952023-09-22T23:08:11.610ZINFOcrucible: find dest for source 2 for extent at index 4 mrl = gen_mismatch
46962023-09-22T23:08:11.610ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
46972023-09-22T23:08:11.610ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
46982023-09-22T23:08:11.610ZINFOcrucible: generation number mismatch 5
46992023-09-22T23:08:11.610ZINFOcrucible: First source client ID for extent 5 mrl = gen_mismatch
47002023-09-22T23:08:11.610ZINFOcrucible: extent:5 gens: 3 2 3 mrl = gen_mismatch
47012023-09-22T23:08:11.610ZINFOcrucible: extent:5 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47022023-09-22T23:08:11.610ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47032023-09-22T23:08:11.610ZINFOcrucible: extent:5 dirty: false false false mrl = gen_mismatch
47042023-09-22T23:08:11.610ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47052023-09-22T23:08:11.610ZINFOcrucible: find dest for source 0 for extent at index 5 mrl = gen_mismatch
47062023-09-22T23:08:11.610ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47072023-09-22T23:08:11.610ZINFOcrucible: generation number mismatch 6
47082023-09-22T23:08:11.610ZINFOcrucible: First source client ID for extent 6 mrl = gen_mismatch
47092023-09-22T23:08:11.610ZINFOcrucible: extent:6 gens: 1 3 3 mrl = gen_mismatch
47102023-09-22T23:08:11.610ZINFOcrucible: extent:6 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47112023-09-22T23:08:11.610ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47122023-09-22T23:08:11.610ZINFOcrucible: extent:6 dirty: false false false mrl = gen_mismatch
47132023-09-22T23:08:11.610ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47142023-09-22T23:08:11.610ZINFOcrucible: find dest for source 1 for extent at index 6 mrl = gen_mismatch
47152023-09-22T23:08:11.610ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47162023-09-22T23:08:11.610ZINFOcrucible: generation number mismatch 7
47172023-09-22T23:08:11.610ZINFOcrucible: First source client ID for extent 7 mrl = gen_mismatch
47182023-09-22T23:08:11.610ZINFOcrucible: extent:7 gens: 2 3 3 mrl = gen_mismatch
47192023-09-22T23:08:11.610ZINFOcrucible: extent:7 flush: 1 1 1 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47202023-09-22T23:08:11.610ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47212023-09-22T23:08:11.610ZINFOcrucible: extent:7 dirty: false false false mrl = gen_mismatch
47222023-09-22T23:08:11.610ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47232023-09-22T23:08:11.610ZINFOcrucible: find dest for source 1 for extent at index 7 mrl = gen_mismatch
47242023-09-22T23:08:11.610ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
4725 test mend::test::reconcile_gen_c ... ok
4726 test mend::test::reconcile_gen_length_bad - should panic ... ok
47272023-09-22T23:08:11.611ZINFOcrucible: generation number mismatch 0
47282023-09-22T23:08:11.611ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
47292023-09-22T23:08:11.611ZINFOcrucible: extent:0 gens: 9 8 8 mrl = gen_mismatch
47302023-09-22T23:08:11.611ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = gen_mismatch
47312023-09-22T23:08:11.611ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47322023-09-22T23:08:11.611ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4733 test mend::test::reconcile_generation_mismatch_c0 ... ok
47342023-09-22T23:08:11.612ZINFOcrucible: generation number mismatch 0
47352023-09-22T23:08:11.612ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
47362023-09-22T23:08:11.612ZINFOcrucible: extent:0 gens: 9 8 9 mrl = gen_mismatch
47372023-09-22T23:08:11.612ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47382023-09-22T23:08:11.612ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47392023-09-22T23:08:11.612ZINFOcrucible: extent:0 dirty: false false false mrl = gen_mismatch
47402023-09-22T23:08:11.612ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
47412023-09-22T23:08:11.612ZINFOcrucible: find dest for source 0 for extent at index 0 mrl = gen_mismatch
47422023-09-22T23:08:11.612ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
4743 my ef is: ExtentFix { source: ClientId(0), dest: [ClientId(1)] }
4744 test mend::test::reconcile_generation_mismatch_c1 ... ok
47452023-09-22T23:08:11.613ZINFOcrucible: generation number mismatch 0
47462023-09-22T23:08:11.613ZINFOcrucible: First source client ID for extent 0 mrl = gen_mismatch
47472023-09-22T23:08:11.613ZINFOcrucible: extent:0 gens: 7 8 8 mrl = gen_mismatch
47482023-09-22T23:08:11.613ZINFOcrucible: extent:0 flush: 2 2 2 scs: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47492023-09-22T23:08:11.613ZINFOcrucible: max_flush now has: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47502023-09-22T23:08:11.613ZINFOcrucible: extent:0 dirty: false false false mrl = gen_mismatch
47512023-09-22T23:08:11.613ZINFOcrucible: No maxes found, left with: [ClientId(1), ClientId(2)] mrl = gen_mismatch
47522023-09-22T23:08:11.613ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = gen_mismatch
47532023-09-22T23:08:11.613ZINFOcrucible: source 1, add dest 0 gen mrl = gen_mismatch
47542023-09-22T23:08:11.613ZINFOcrucible: generation number mismatch 1
47552023-09-22T23:08:11.613ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
47562023-09-22T23:08:11.613ZINFOcrucible: extent:1 gens: 8 9 10 mrl = gen_mismatch
47572023-09-22T23:08:11.613ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = gen_mismatch
47582023-09-22T23:08:11.613ZINFOcrucible: source 2, add dest 0 gen mrl = gen_mismatch
47592023-09-22T23:08:11.613ZINFOcrucible: source 2, add dest 1 gen mrl = gen_mismatch
47602023-09-22T23:08:11.613ZINFOcrucible: generation number mismatch 3
47612023-09-22T23:08:11.613ZINFOcrucible: First source client ID for extent 3 mrl = gen_mismatch
47622023-09-22T23:08:11.613ZINFOcrucible: extent:3 gens: 5 4 3 mrl = gen_mismatch
47632023-09-22T23:08:11.613ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = gen_mismatch
47642023-09-22T23:08:11.613ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
47652023-09-22T23:08:11.613ZINFOcrucible: source 0, add dest 2 gen mrl = gen_mismatch
4766 test mend::test::reconcile_generation_mismatch_c2 ... ok
4767 test mend::test::reconcile_length_mismatch - should panic ... ok
47682023-09-22T23:08:11.614ZINFOcrucible: Extent 0 has flush number mismatch
47692023-09-22T23:08:11.614ZINFOcrucible: First source client ID for extent 0 mrl = flush_mismatch
47702023-09-22T23:08:11.614ZINFOcrucible: extent:0 gens: 9 9 8 mrl = flush_mismatch
47712023-09-22T23:08:11.614ZINFOcrucible: extent:0 flush: 1 2 3 scs: [ClientId(0), ClientId(1)] mrl = flush_mismatch
47722023-09-22T23:08:11.614ZINFOcrucible: max_flush now has: [ClientId(1)] mrl = flush_mismatch
47732023-09-22T23:08:11.614ZINFOcrucible: find dest for source 1 for extent at index 0 mrl = flush_mismatch
47742023-09-22T23:08:11.614ZINFOcrucible: source 1, add dest 0 flush mrl = flush_mismatch
47752023-09-22T23:08:11.614ZINFOcrucible: source 1, add dest 2 gen mrl = flush_mismatch
47762023-09-22T23:08:11.614ZINFOcrucible: Extent 1 has flush number mismatch
47772023-09-22T23:08:11.614ZINFOcrucible: First source client ID for extent 1 mrl = flush_mismatch
47782023-09-22T23:08:11.614ZINFOcrucible: extent:1 gens: 7 8 8 mrl = flush_mismatch
47792023-09-22T23:08:11.614ZINFOcrucible: extent:1 flush: 1 1 2 scs: [ClientId(1), ClientId(2)] mrl = flush_mismatch
47802023-09-22T23:08:11.614ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = flush_mismatch
47812023-09-22T23:08:11.614ZINFOcrucible: find dest for source 2 for extent at index 1 mrl = flush_mismatch
47822023-09-22T23:08:11.614ZINFOcrucible: source 2, add dest 0 gen mrl = flush_mismatch
47832023-09-22T23:08:11.614ZINFOcrucible: source 2, add dest 1 flush mrl = flush_mismatch
47842023-09-22T23:08:11.614ZINFOcrucible: Extent 2 has flush number mismatch
47852023-09-22T23:08:11.614ZINFOcrucible: First source client ID for extent 2 mrl = flush_mismatch
47862023-09-22T23:08:11.615ZINFOcrucible: extent:2 gens: 7 9 7 mrl = flush_mismatch
47872023-09-22T23:08:11.615ZINFOcrucible: find dest for source 1 for extent at index 2 mrl = flush_mismatch
47882023-09-22T23:08:11.615ZINFOcrucible: source 1, add dest 0 gen mrl = flush_mismatch
47892023-09-22T23:08:11.615ZINFOcrucible: source 1, add dest 2 gen mrl = flush_mismatch
47902023-09-22T23:08:11.615ZINFOcrucible: Extent 3 has flush number mismatch
47912023-09-22T23:08:11.615ZINFOcrucible: First source client ID for extent 3 mrl = flush_mismatch
47922023-09-22T23:08:11.615ZINFOcrucible: extent:3 gens: 7 8 9 mrl = flush_mismatch
47932023-09-22T23:08:11.615ZINFOcrucible: find dest for source 2 for extent at index 3 mrl = flush_mismatch
47942023-09-22T23:08:11.615ZINFOcrucible: source 2, add dest 0 gen mrl = flush_mismatch
47952023-09-22T23:08:11.615ZINFOcrucible: source 2, add dest 1 gen mrl = flush_mismatch
4796 test mend::test::reconcile_multiple_source ... ok
4797 test mend::test::reconcile_one ... ok
47982023-09-22T23:08:11.616ZINFOcrucible: Extents 0 dirty
47992023-09-22T23:08:11.616ZINFOcrucible: First source client ID for extent 0 mrl = dirty
48002023-09-22T23:08:11.616ZINFOcrucible: extent:0 gens: 9 9 9 mrl = dirty
48012023-09-22T23:08:11.616ZINFOcrucible: extent:0 flush: 2 2 3 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
48022023-09-22T23:08:11.616ZINFOcrucible: max_flush now has: [ClientId(2)] mrl = dirty
48032023-09-22T23:08:11.616ZINFOcrucible: find dest for source 2 for extent at index 0 mrl = dirty
48042023-09-22T23:08:11.616ZINFOcrucible: source 2, add dest 0 flush mrl = dirty
48052023-09-22T23:08:11.616ZINFOcrucible: source 2, add dest 1 flush mrl = dirty
48062023-09-22T23:08:11.616ZINFOcrucible: Extents 2 dirty
48072023-09-22T23:08:11.616ZINFOcrucible: First source client ID for extent 2 mrl = dirty
48082023-09-22T23:08:11.616ZINFOcrucible: extent:2 gens: 7 7 8 mrl = dirty
48092023-09-22T23:08:11.616ZINFOcrucible: find dest for source 2 for extent at index 2 mrl = dirty
48102023-09-22T23:08:11.616ZINFOcrucible: source 2, add dest 0 gen mrl = dirty
48112023-09-22T23:08:11.616ZINFOcrucible: source 2, add dest 1 gen mrl = dirty
48122023-09-22T23:08:11.616ZINFOcrucible: Extents 3 dirty
48132023-09-22T23:08:11.616ZINFOcrucible: First source client ID for extent 3 mrl = dirty
48142023-09-22T23:08:11.616ZINFOcrucible: extent:3 gens: 7 7 7 mrl = dirty
48152023-09-22T23:08:11.616ZINFOcrucible: extent:3 flush: 1 1 1 scs: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
48162023-09-22T23:08:11.616ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(1), ClientId(2)] mrl = dirty
48172023-09-22T23:08:11.616ZINFOcrucible: extent:3 dirty: true true true mrl = dirty
48182023-09-22T23:08:11.616ZINFOcrucible: find dest for source 0 for extent at index 3 mrl = dirty
48192023-09-22T23:08:11.616ZINFOcrucible: source 0, add dest 1 source flush mrl = dirty
48202023-09-22T23:08:11.616ZINFOcrucible: source 0, add dest 2 source flush mrl = dirty
48212023-09-22T23:08:11.616ZINFOcrucible: generation number mismatch 1
48222023-09-22T23:08:11.616ZINFOcrucible: First source client ID for extent 1 mrl = gen_mismatch
48232023-09-22T23:08:11.616ZINFOcrucible: extent:1 gens: 8 7 8 mrl = gen_mismatch
48242023-09-22T23:08:11.616ZINFOcrucible: extent:1 flush: 1 1 1 scs: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48252023-09-22T23:08:11.616ZINFOcrucible: max_flush now has: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48262023-09-22T23:08:11.616ZINFOcrucible: extent:1 dirty: false false false mrl = gen_mismatch
48272023-09-22T23:08:11.616ZINFOcrucible: No maxes found, left with: [ClientId(0), ClientId(2)] mrl = gen_mismatch
48282023-09-22T23:08:11.616ZINFOcrucible: find dest for source 0 for extent at index 1 mrl = gen_mismatch
48292023-09-22T23:08:11.616ZINFOcrucible: source 0, add dest 1 gen mrl = gen_mismatch
4830 test mend::test::reconcile_one_of_each ... ok
4831 test mend::test::reconcile_to_repair ... ok
48322023-09-22T23:08:11.617ZINFOcrucible: Crucible stats registered with UUID: 39c08d56-d632-40e8-a607-2d3ab8106ac1
48332023-09-22T23:08:11.617ZINFOcrucible: Crucible 39c08d56-d632-40e8-a607-2d3ab8106ac1 has session id: e10d0efe-35f3-4ac4-9215-8d9ca8ed9712
48342023-09-22T23:08:11.617ZINFOcrucible: 39c08d56-d632-40e8-a607-2d3ab8106ac1 is now active with session: f5b05529-5456-441f-a40c-bb1539235cdd
48352023-09-22T23:08:11.618ZWARNcrucible: Decryption failed even though integrity hash matched! = downstairs
48362023-09-22T23:08:11.618ZERROcrucible: Decryption failed with correct hash = downstairs
48372023-09-22T23:08:11.618ZERROcrucible: [0] Reports error DecryptionError on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
4838 test test::up_test::bad_decryption_means_panic ... ok
48392023-09-22T23:08:11.619ZERROcrucible: No match for integrity hash = downstairs
48402023-09-22T23:08:11.619ZERROcrucible: Expected: 0x2710 != Computed: 0xf0c5cd5c81177cbb = downstairs
48412023-09-22T23:08:11.619ZERROcrucible: [0] Reports error HashMismatch on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
4842 test test::up_test::bad_hash_on_encrypted_read_panic ... ok
48432023-09-22T23:08:11.620ZINFOcrucible: Crucible stats registered with UUID: 5b8a6a51-7c13-4127-bc35-9bb8891d5eb0
48442023-09-22T23:08:11.620ZINFOcrucible: Crucible 5b8a6a51-7c13-4127-bc35-9bb8891d5eb0 has session id: be8e0e0b-31a3-43d6-8c20-f3f5d22372cb
48452023-09-22T23:08:11.620ZINFOcrucible: 5b8a6a51-7c13-4127-bc35-9bb8891d5eb0 is now active with session: 1bb8800d-f95c-49b5-9cb3-d4ffa956aad3
48462023-09-22T23:08:11.620ZERROcrucible: No match computed hash:0x78fc2d7d9eaf9bbf = downstairs
48472023-09-22T23:08:11.620ZERROcrucible: No match hash:0x2710 = downstairs
48482023-09-22T23:08:11.620ZERROcrucible: Data from hash: = downstairs
48492023-09-22T23:08:11.620ZERROcrucible: [0]:1 = downstairs
48502023-09-22T23:08:11.620ZERROcrucible: [1]:1 = downstairs
48512023-09-22T23:08:11.620ZERROcrucible: [2]:1 = downstairs
48522023-09-22T23:08:11.620ZERROcrucible: [3]:1 = downstairs
48532023-09-22T23:08:11.620ZERROcrucible: [4]:1 = downstairs
48542023-09-22T23:08:11.620ZERROcrucible: [5]:1 = downstairs
48552023-09-22T23:08:11.620ZERROcrucible: [0] Reports error HashMismatch on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
4856 test test::up_test::bad_read_hash_means_panic ... ok
48572023-09-22T23:08:11.620ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
48582023-09-22T23:08:11.620ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 2a83ae12-f4fc-42f6-b107-998ee5c4b845
48592023-09-22T23:08:11.620ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: e6c4a2df-5e4e-4055-83af-21f523de50ed
48602023-09-22T23:08:11.620ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
48612023-09-22T23:08:11.621ZINFOcrucible: [0] check deactivate YES
48622023-09-22T23:08:11.621ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 (e6c4a2df-5e4e-4055-83af-21f523de50ed) Active Active Active ds_transition to Deactivated
48632023-09-22T23:08:11.621ZINFOcrucible: [0] Transition from Active to Deactivated
48642023-09-22T23:08:11.621ZINFOcrucible: [2] check deactivate YES
48652023-09-22T23:08:11.621ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 (e6c4a2df-5e4e-4055-83af-21f523de50ed) Deactivated Active Active ds_transition to Deactivated
48662023-09-22T23:08:11.621ZINFOcrucible: [2] Transition from Active to Deactivated
48672023-09-22T23:08:11.621ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
48682023-09-22T23:08:11.621ZINFOcrucible: deactivate transition checking...
48692023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition Deactivated NO
48702023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition Active NO
48712023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition Deactivated NO
48722023-09-22T23:08:11.621ZINFOcrucible: [1] check deactivate YES
48732023-09-22T23:08:11.621ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 (e6c4a2df-5e4e-4055-83af-21f523de50ed) Deactivated Active Deactivated ds_transition to Deactivated
48742023-09-22T23:08:11.621ZINFOcrucible: [1] Transition from Active to Deactivated
48752023-09-22T23:08:11.621ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
48762023-09-22T23:08:11.621ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
48772023-09-22T23:08:11.621ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
48782023-09-22T23:08:11.621ZINFOcrucible: deactivate transition checking...
48792023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition New Maybe
48802023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition New Maybe
48812023-09-22T23:08:11.621ZINFOcrucible: deactivate_transition New Maybe
48822023-09-22T23:08:11.621ZINFOcrucible: All DS in the proper state! -> INIT
4883 test test::up_test::deactivate_after_work_completed_write ... ok
48842023-09-22T23:08:11.622ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
48852023-09-22T23:08:11.622ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: b689aa0f-2697-4d8e-bc0f-51129a721e77
48862023-09-22T23:08:11.622ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: e6090615-6d96-4f4c-9110-b1e9f5922673
48872023-09-22T23:08:11.622ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
48882023-09-22T23:08:11.622ZINFOcrucible: [0] check deactivate YES
48892023-09-22T23:08:11.622ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 (e6090615-6d96-4f4c-9110-b1e9f5922673) Active Active Active ds_transition to Deactivated
48902023-09-22T23:08:11.622ZINFOcrucible: [0] Transition from Active to Deactivated
48912023-09-22T23:08:11.622ZINFOcrucible: [2] check deactivate YES
48922023-09-22T23:08:11.622ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 (e6090615-6d96-4f4c-9110-b1e9f5922673) Deactivated Active Active ds_transition to Deactivated
48932023-09-22T23:08:11.622ZINFOcrucible: [2] Transition from Active to Deactivated
48942023-09-22T23:08:11.622ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
48952023-09-22T23:08:11.622ZINFOcrucible: deactivate transition checking...
48962023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition Deactivated NO
48972023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition Active NO
48982023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition Deactivated NO
48992023-09-22T23:08:11.622ZINFOcrucible: [1] check deactivate YES
49002023-09-22T23:08:11.622ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 (e6090615-6d96-4f4c-9110-b1e9f5922673) Deactivated Active Deactivated ds_transition to Deactivated
49012023-09-22T23:08:11.622ZINFOcrucible: [1] Transition from Active to Deactivated
49022023-09-22T23:08:11.622ZINFOcrucible: [0] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
49032023-09-22T23:08:11.622ZINFOcrucible: [1] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
49042023-09-22T23:08:11.622ZINFOcrucible: [2] 00000000-0000-0000-0000-000000000000 Gone missing, transition from Deactivated to New
49052023-09-22T23:08:11.622ZINFOcrucible: deactivate transition checking...
49062023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition New Maybe
49072023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition New Maybe
49082023-09-22T23:08:11.622ZINFOcrucible: deactivate_transition New Maybe
49092023-09-22T23:08:11.622ZINFOcrucible: All DS in the proper state! -> INIT
4910 test test::up_test::deactivate_after_work_completed_write_unwritten ... ok
49112023-09-22T23:08:11.623ZINFOcrucible: Crucible stats registered with UUID: 7c40f313-4b9b-431a-8bc5-4e217b2b2806
49122023-09-22T23:08:11.623ZINFOcrucible: Crucible 7c40f313-4b9b-431a-8bc5-4e217b2b2806 has session id: b12049c9-7e3d-41db-864c-b847f7405e20
49132023-09-22T23:08:11.623ZINFOcrucible: 7c40f313-4b9b-431a-8bc5-4e217b2b2806 is now active with session: 4a9e9c9c-ea59-404d-ac9a-e3b3c137be22
4914 test test::up_test::deactivate_ds_not_when_active ... ok
49152023-09-22T23:08:11.623ZINFOcrucible: Crucible stats registered with UUID: 47268ea8-52a4-4142-bcaa-302a9423a8fe
49162023-09-22T23:08:11.623ZINFOcrucible: Crucible 47268ea8-52a4-4142-bcaa-302a9423a8fe has session id: 8470d040-f972-41fb-b673-d422f8314eef
4917 test test::up_test::deactivate_ds_not_when_initializing ... ok
49182023-09-22T23:08:11.624ZINFOcrucible: Crucible stats registered with UUID: 73975c6f-8deb-48ac-a65e-8e75d0cd386f
49192023-09-22T23:08:11.624ZINFOcrucible: Crucible 73975c6f-8deb-48ac-a65e-8e75d0cd386f has session id: 7ae18f99-3dac-4ea1-ba26-72f927fe3b3c
49202023-09-22T23:08:11.624ZINFOcrucible: 73975c6f-8deb-48ac-a65e-8e75d0cd386f is now active with session: 86331d61-a1ad-4418-a2e9-7028dcf66f35
49212023-09-22T23:08:11.624ZINFOcrucible: 73975c6f-8deb-48ac-a65e-8e75d0cd386f set deactivating.
4922 test test::up_test::deactivate_not_when_active ... ok
49232023-09-22T23:08:11.624ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
49242023-09-22T23:08:11.625ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 36e7bc94-548c-425f-a8b5-a631300e85f2
49252023-09-22T23:08:11.625ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 54f115b4-a287-44a4-b8c4-398744c6c761
49262023-09-22T23:08:11.625ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
49272023-09-22T23:08:11.625ZINFOcrucible: [0] deactivate job 1001 not New flush, NO
49282023-09-22T23:08:11.625ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
49292023-09-22T23:08:11.625ZINFOcrucible: [2] deactivate job 1001 not New flush, NO
49302023-09-22T23:08:11.625ZINFOcrucible: deactivate transition checking...
49312023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
49322023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
49332023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
4934 test test::up_test::deactivate_not_without_flush_write ... ok
49352023-09-22T23:08:11.625ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
49362023-09-22T23:08:11.625ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: c2254082-863e-4bff-9654-370c18a50ef8
49372023-09-22T23:08:11.625ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 0bc0db6e-7184-4297-a860-75fd98a494bc
49382023-09-22T23:08:11.625ZINFOcrucible: 00000000-0000-0000-0000-000000000000 set deactivating.
49392023-09-22T23:08:11.625ZINFOcrucible: [0] deactivate job 1001 not New flush, NO
49402023-09-22T23:08:11.625ZINFOcrucible: [1] deactivate job 1001 not New flush, NO
49412023-09-22T23:08:11.625ZINFOcrucible: [2] deactivate job 1001 not New flush, NO
49422023-09-22T23:08:11.625ZINFOcrucible: deactivate transition checking...
49432023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
49442023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
49452023-09-22T23:08:11.625ZINFOcrucible: deactivate_transition Active NO
4946 test test::up_test::deactivate_not_without_flush_write_unwritten ... ok
49472023-09-22T23:08:11.626ZINFOcrucible: Crucible stats registered with UUID: e272e714-f2cd-4519-8e52-011bc9f14ce1
49482023-09-22T23:08:11.626ZINFOcrucible: Crucible e272e714-f2cd-4519-8e52-011bc9f14ce1 has session id: bfdaad5f-47fe-4862-838c-e1f630e0e373
49492023-09-22T23:08:11.626ZINFOcrucible: e272e714-f2cd-4519-8e52-011bc9f14ce1 is now active with session: 6614485b-fa35-4ce7-9cd1-b06d76215aaa
49502023-09-22T23:08:11.626ZINFOcrucible: e272e714-f2cd-4519-8e52-011bc9f14ce1 set deactivating.
49512023-09-22T23:08:11.626ZINFOcrucible: [0] deactivate, no work so YES
49522023-09-22T23:08:11.626ZINFOcrucible: [0] e272e714-f2cd-4519-8e52-011bc9f14ce1 (6614485b-fa35-4ce7-9cd1-b06d76215aaa) Active Active Active ds_transition to Deactivated
49532023-09-22T23:08:11.626ZINFOcrucible: [0] Transition from Active to Deactivated
49542023-09-22T23:08:11.626ZINFOcrucible: [1] deactivate, no work so YES
49552023-09-22T23:08:11.626ZINFOcrucible: [1] e272e714-f2cd-4519-8e52-011bc9f14ce1 (6614485b-fa35-4ce7-9cd1-b06d76215aaa) Deactivated Active Active ds_transition to Deactivated
49562023-09-22T23:08:11.626ZINFOcrucible: [1] Transition from Active to Deactivated
49572023-09-22T23:08:11.626ZINFOcrucible: [2] deactivate, no work so YES
49582023-09-22T23:08:11.626ZINFOcrucible: [2] e272e714-f2cd-4519-8e52-011bc9f14ce1 (6614485b-fa35-4ce7-9cd1-b06d76215aaa) Deactivated Deactivated Active ds_transition to Deactivated
49592023-09-22T23:08:11.626ZINFOcrucible: [2] Transition from Active to Deactivated
49602023-09-22T23:08:11.626ZINFOcrucible: [0] e272e714-f2cd-4519-8e52-011bc9f14ce1 Gone missing, transition from Deactivated to New
49612023-09-22T23:08:11.626ZINFOcrucible: [1] e272e714-f2cd-4519-8e52-011bc9f14ce1 Gone missing, transition from Deactivated to New
49622023-09-22T23:08:11.626ZINFOcrucible: [2] e272e714-f2cd-4519-8e52-011bc9f14ce1 Gone missing, transition from Deactivated to New
49632023-09-22T23:08:11.626ZINFOcrucible: deactivate transition checking...
49642023-09-22T23:08:11.626ZINFOcrucible: deactivate_transition New Maybe
49652023-09-22T23:08:11.626ZINFOcrucible: deactivate_transition New Maybe
49662023-09-22T23:08:11.626ZINFOcrucible: deactivate_transition New Maybe
49672023-09-22T23:08:11.626ZINFOcrucible: All DS in the proper state! -> INIT
4968 test test::up_test::deactivate_when_empty ... ok
49692023-09-22T23:08:11.627ZINFOcrucible: Crucible stats registered with UUID: e9df70e4-2285-4c18-959f-b6883a32cd53
49702023-09-22T23:08:11.627ZINFOcrucible: Crucible e9df70e4-2285-4c18-959f-b6883a32cd53 has session id: 8afbce38-5825-4804-b8f5-0c8f23207fef
49712023-09-22T23:08:11.627ZINFOcrucible: [0] e9df70e4-2285-4c18-959f-b6883a32cd53 (4d20ac4f-cf0d-423a-bf8d-e1296ac427a8) New New New ds_transition to WaitQuorum
4972 test test::up_test::downstairs_bad_transition_wq - should panic ... ok
49732023-09-22T23:08:11.628ZINFOcrucible: Crucible stats registered with UUID: 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510
49742023-09-22T23:08:11.628ZINFOcrucible: Crucible 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510 has session id: f06bf8bb-0546-4a85-82b8-a56305cb9ec1
49752023-09-22T23:08:11.628ZINFOcrucible: [0] 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510 (3fa475c2-a6fe-4f75-8518-36f337feca6a) New New New ds_transition to WaitActive
49762023-09-22T23:08:11.628ZINFOcrucible: [0] Transition from New to WaitActive
49772023-09-22T23:08:11.628ZINFOcrucible: [0] 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510 (3fa475c2-a6fe-4f75-8518-36f337feca6a) WaitActive New New ds_transition to WaitQuorum
49782023-09-22T23:08:11.628ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
49792023-09-22T23:08:11.628ZINFOcrucible: [0] 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510 (3fa475c2-a6fe-4f75-8518-36f337feca6a) WaitQuorum New New ds_transition to Active
49802023-09-22T23:08:11.628ZINFOcrucible: [0] Transition from WaitQuorum to Active
49812023-09-22T23:08:11.628ZINFOcrucible: [0] 76d2a4b1-8b63-4bd7-a38a-9aa2230ee510 (3fa475c2-a6fe-4f75-8518-36f337feca6a) Active New New ds_transition to Faulted
49822023-09-22T23:08:11.628ZINFOcrucible: [0] Transition from Active to Faulted
4983 test test::up_test::downstairs_transition_active_faulted ... ok
49842023-09-22T23:08:11.629ZINFOcrucible: Crucible stats registered with UUID: 345ad8eb-d2f8-45ed-ac5c-022696dd3420
49852023-09-22T23:08:11.629ZINFOcrucible: Crucible 345ad8eb-d2f8-45ed-ac5c-022696dd3420 has session id: bb4c7da7-615d-461b-805a-7472074e8ca2
49862023-09-22T23:08:11.629ZINFOcrucible: [0] 345ad8eb-d2f8-45ed-ac5c-022696dd3420 (0906270a-8d2a-40c7-98ee-734d146362aa) New New New ds_transition to WaitActive
49872023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from New to WaitActive
49882023-09-22T23:08:11.629ZINFOcrucible: [0] 345ad8eb-d2f8-45ed-ac5c-022696dd3420 (0906270a-8d2a-40c7-98ee-734d146362aa) WaitActive New New ds_transition to WaitQuorum
49892023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
49902023-09-22T23:08:11.629ZINFOcrucible: [0] 345ad8eb-d2f8-45ed-ac5c-022696dd3420 (0906270a-8d2a-40c7-98ee-734d146362aa) WaitQuorum New New ds_transition to Active
49912023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from WaitQuorum to Active
49922023-09-22T23:08:11.629ZINFOcrucible: [0] 345ad8eb-d2f8-45ed-ac5c-022696dd3420 (0906270a-8d2a-40c7-98ee-734d146362aa) Active New New ds_transition to Faulted
49932023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from Active to Faulted
4994 test test::up_test::downstairs_transition_active_to_faulted ... ok
49952023-09-22T23:08:11.629ZINFOcrucible: Crucible stats registered with UUID: 7a88b5eb-5f23-4308-81cb-e06f5a336ac5
49962023-09-22T23:08:11.629ZINFOcrucible: Crucible 7a88b5eb-5f23-4308-81cb-e06f5a336ac5 has session id: 6f0d1fb7-fa9c-498f-b6eb-8bd9ae7fccb5
49972023-09-22T23:08:11.629ZINFOcrucible: [0] 7a88b5eb-5f23-4308-81cb-e06f5a336ac5 (a2d56bbb-f5a7-4244-b1fb-1a80a00fcc28) New New New ds_transition to WaitActive
49982023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from New to WaitActive
49992023-09-22T23:08:11.629ZINFOcrucible: [0] 7a88b5eb-5f23-4308-81cb-e06f5a336ac5 (a2d56bbb-f5a7-4244-b1fb-1a80a00fcc28) WaitActive New New ds_transition to WaitQuorum
50002023-09-22T23:08:11.629ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50012023-09-22T23:08:11.629ZINFOcrucible: [0] 7a88b5eb-5f23-4308-81cb-e06f5a336ac5 (a2d56bbb-f5a7-4244-b1fb-1a80a00fcc28) WaitQuorum New New ds_transition to WaitActive
5002 test test::up_test::downstairs_transition_backwards - should panic ... ok
50032023-09-22T23:08:11.630ZINFOcrucible: Crucible stats registered with UUID: 7a05383d-e1df-40f2-9c5a-a11413c20abc
50042023-09-22T23:08:11.630ZINFOcrucible: Crucible 7a05383d-e1df-40f2-9c5a-a11413c20abc has session id: b81aebcc-ae6c-4a8c-81fe-e5ceb3a0e0e9
50052023-09-22T23:08:11.630ZINFOcrucible: [0] 7a05383d-e1df-40f2-9c5a-a11413c20abc (f2055bc4-e802-429a-b8db-9cc1be84dde0) New New New ds_transition to WaitActive
50062023-09-22T23:08:11.630ZINFOcrucible: [0] Transition from New to WaitActive
50072023-09-22T23:08:11.630ZINFOcrucible: [0] 7a05383d-e1df-40f2-9c5a-a11413c20abc (f2055bc4-e802-429a-b8db-9cc1be84dde0) WaitActive New New ds_transition to WaitQuorum
50082023-09-22T23:08:11.630ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50092023-09-22T23:08:11.630ZINFOcrucible: [0] 7a05383d-e1df-40f2-9c5a-a11413c20abc (f2055bc4-e802-429a-b8db-9cc1be84dde0) WaitQuorum New New ds_transition to Active
50102023-09-22T23:08:11.630ZINFOcrucible: [0] Transition from WaitQuorum to Active
50112023-09-22T23:08:11.630ZINFOcrucible: [0] 7a05383d-e1df-40f2-9c5a-a11413c20abc (f2055bc4-e802-429a-b8db-9cc1be84dde0) Active New New ds_transition to WaitQuorum
5012 test test::up_test::downstairs_transition_bad_active - should panic ... ok
50132023-09-22T23:08:11.631ZINFOcrucible: Crucible stats registered with UUID: 6e224231-e7bb-41fb-ab05-7d5803f086a9
50142023-09-22T23:08:11.631ZINFOcrucible: Crucible 6e224231-e7bb-41fb-ab05-7d5803f086a9 has session id: 0ec9a497-79c3-4ad0-b142-bc2335c3becc
50152023-09-22T23:08:11.631ZINFOcrucible: [0] 6e224231-e7bb-41fb-ab05-7d5803f086a9 (e6ef2e6a-c2b1-430a-b04d-3f7ad93d8fb5) New New New ds_transition to WaitActive
50162023-09-22T23:08:11.631ZINFOcrucible: [0] Transition from New to WaitActive
50172023-09-22T23:08:11.631ZINFOcrucible: [0] 6e224231-e7bb-41fb-ab05-7d5803f086a9 (e6ef2e6a-c2b1-430a-b04d-3f7ad93d8fb5) WaitActive New New ds_transition to WaitQuorum
50182023-09-22T23:08:11.631ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50192023-09-22T23:08:11.631ZINFOcrucible: [0] 6e224231-e7bb-41fb-ab05-7d5803f086a9 (e6ef2e6a-c2b1-430a-b04d-3f7ad93d8fb5) WaitQuorum New New ds_transition to Active
50202023-09-22T23:08:11.631ZINFOcrucible: [0] Transition from WaitQuorum to Active
50212023-09-22T23:08:11.631ZINFOcrucible: [0] 6e224231-e7bb-41fb-ab05-7d5803f086a9 (e6ef2e6a-c2b1-430a-b04d-3f7ad93d8fb5) Active New New ds_transition to Offline
50222023-09-22T23:08:11.631ZINFOcrucible: [0] Transition from Active to Offline
50232023-09-22T23:08:11.631ZINFOcrucible: [0] 6e224231-e7bb-41fb-ab05-7d5803f086a9 (e6ef2e6a-c2b1-430a-b04d-3f7ad93d8fb5) Offline New New ds_transition to WaitQuorum
5024 test test::up_test::downstairs_transition_bad_offline - should panic ... ok
50252023-09-22T23:08:11.632ZINFOcrucible: Crucible stats registered with UUID: 749ed771-276f-481e-9643-b0214e228df1
50262023-09-22T23:08:11.632ZINFOcrucible: Crucible 749ed771-276f-481e-9643-b0214e228df1 has session id: 77b322fe-2418-4558-9366-1709743b4d80
50272023-09-22T23:08:11.632ZINFOcrucible: [0] 749ed771-276f-481e-9643-b0214e228df1 (6d741d92-52c3-4732-913b-3f6c75095aab) New New New ds_transition to Replay
5028 test test::up_test::downstairs_transition_bad_replay - should panic ... ok
50292023-09-22T23:08:11.632ZINFOcrucible: Crucible stats registered with UUID: 1450174a-c9b8-44d8-9ece-9f3ed352a136
50302023-09-22T23:08:11.632ZINFOcrucible: Crucible 1450174a-c9b8-44d8-9ece-9f3ed352a136 has session id: 98a6c46c-bb1f-4a68-817f-9f70df6eb538
50312023-09-22T23:08:11.632ZINFOcrucible: [0] 1450174a-c9b8-44d8-9ece-9f3ed352a136 (f53bc8ad-c575-4a97-9221-26650f3c9328) New New New ds_transition to WaitActive
50322023-09-22T23:08:11.632ZINFOcrucible: [0] Transition from New to WaitActive
50332023-09-22T23:08:11.632ZINFOcrucible: [0] 1450174a-c9b8-44d8-9ece-9f3ed352a136 (f53bc8ad-c575-4a97-9221-26650f3c9328) WaitActive New New ds_transition to WaitQuorum
50342023-09-22T23:08:11.632ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50352023-09-22T23:08:11.632ZINFOcrucible: [0] 1450174a-c9b8-44d8-9ece-9f3ed352a136 (f53bc8ad-c575-4a97-9221-26650f3c9328) WaitQuorum New New ds_transition to Active
50362023-09-22T23:08:11.632ZINFOcrucible: [0] Transition from WaitQuorum to Active
50372023-09-22T23:08:11.632ZINFOcrucible: 1450174a-c9b8-44d8-9ece-9f3ed352a136 is now active with session: f53bc8ad-c575-4a97-9221-26650f3c9328
50382023-09-22T23:08:11.632ZINFOcrucible: [0] 1450174a-c9b8-44d8-9ece-9f3ed352a136 (f53bc8ad-c575-4a97-9221-26650f3c9328) Active New New ds_transition to Deactivated
50392023-09-22T23:08:11.632ZINFOcrucible: [0] Transition from Active to Deactivated
50402023-09-22T23:08:11.632ZINFOcrucible: [0] 1450174a-c9b8-44d8-9ece-9f3ed352a136 (f53bc8ad-c575-4a97-9221-26650f3c9328) Deactivated New New ds_transition to New
50412023-09-22T23:08:11.633ZINFOcrucible: [0] Transition from Deactivated to New
5042 test test::up_test::downstairs_transition_deactivate_new ... ok
50432023-09-22T23:08:11.633ZINFOcrucible: Crucible stats registered with UUID: 71fc23d9-91e0-4c81-b9af-810c36135e15
50442023-09-22T23:08:11.633ZINFOcrucible: Crucible 71fc23d9-91e0-4c81-b9af-810c36135e15 has session id: e9c5041a-5f8b-49b8-9ae5-f7e975a39549
50452023-09-22T23:08:11.633ZINFOcrucible: [0] 71fc23d9-91e0-4c81-b9af-810c36135e15 (90a19801-74d1-4aed-b5cb-6a0df88d651c) New New New ds_transition to Deactivated
5046 test test::up_test::downstairs_transition_deactivate_not_new - should panic ... ok
50472023-09-22T23:08:11.634ZINFOcrucible: Crucible stats registered with UUID: e51d3849-32fd-4d3f-8571-9d95ec08c424
50482023-09-22T23:08:11.634ZINFOcrucible: Crucible e51d3849-32fd-4d3f-8571-9d95ec08c424 has session id: 6cdcf234-ce3b-4bd5-91f6-a184b9341c83
50492023-09-22T23:08:11.634ZINFOcrucible: [0] e51d3849-32fd-4d3f-8571-9d95ec08c424 (76bb2160-b19c-440e-ae54-4301d50f5e0e) New New New ds_transition to WaitActive
50502023-09-22T23:08:11.634ZINFOcrucible: [0] Transition from New to WaitActive
50512023-09-22T23:08:11.634ZINFOcrucible: [0] e51d3849-32fd-4d3f-8571-9d95ec08c424 (76bb2160-b19c-440e-ae54-4301d50f5e0e) WaitActive New New ds_transition to Deactivated
5052 test test::up_test::downstairs_transition_deactivate_not_wa - should panic ... ok
50532023-09-22T23:08:11.634ZINFOcrucible: Crucible stats registered with UUID: 19160081-922d-4e17-878b-bbb0b9008597
50542023-09-22T23:08:11.634ZINFOcrucible: Crucible 19160081-922d-4e17-878b-bbb0b9008597 has session id: c79bf584-6377-4cf1-b71b-e553b46e6ec8
50552023-09-22T23:08:11.634ZINFOcrucible: [0] 19160081-922d-4e17-878b-bbb0b9008597 (b1dd3b46-aac7-4b32-b9e4-5b904af67683) New New New ds_transition to WaitActive
50562023-09-22T23:08:11.634ZINFOcrucible: [0] Transition from New to WaitActive
50572023-09-22T23:08:11.634ZINFOcrucible: [0] 19160081-922d-4e17-878b-bbb0b9008597 (b1dd3b46-aac7-4b32-b9e4-5b904af67683) WaitActive New New ds_transition to WaitQuorum
50582023-09-22T23:08:11.634ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50592023-09-22T23:08:11.635ZINFOcrucible: [0] 19160081-922d-4e17-878b-bbb0b9008597 (b1dd3b46-aac7-4b32-b9e4-5b904af67683) WaitQuorum New New ds_transition to Deactivated
5060 test test::up_test::downstairs_transition_deactivate_not_wq - should panic ... ok
50612023-09-22T23:08:11.635ZINFOcrucible: Crucible stats registered with UUID: 31c3dd37-38a2-4236-92ff-a8dfac26c022
50622023-09-22T23:08:11.635ZINFOcrucible: Crucible 31c3dd37-38a2-4236-92ff-a8dfac26c022 has session id: 97897687-7855-45af-8b4b-00a9c45fbb66
50632023-09-22T23:08:11.635ZINFOcrucible: [0] 31c3dd37-38a2-4236-92ff-a8dfac26c022 (fe9c2b0a-c6f2-4ec3-a7b7-e3fd494cc14e) New New New ds_transition to WaitActive
50642023-09-22T23:08:11.635ZINFOcrucible: [0] Transition from New to WaitActive
50652023-09-22T23:08:11.635ZINFOcrucible: [0] 31c3dd37-38a2-4236-92ff-a8dfac26c022 (fe9c2b0a-c6f2-4ec3-a7b7-e3fd494cc14e) WaitActive New New ds_transition to WaitQuorum
50662023-09-22T23:08:11.635ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50672023-09-22T23:08:11.635ZINFOcrucible: [0] 31c3dd37-38a2-4236-92ff-a8dfac26c022 (fe9c2b0a-c6f2-4ec3-a7b7-e3fd494cc14e) WaitQuorum New New ds_transition to Deactivated
5068 test test::up_test::downstairs_transition_disconnect_no_active - should panic ... ok
50692023-09-22T23:08:11.636ZINFOcrucible: Crucible stats registered with UUID: 86ba2ad7-5cb3-44f1-a126-2cb4b4d215f5
50702023-09-22T23:08:11.636ZINFOcrucible: Crucible 86ba2ad7-5cb3-44f1-a126-2cb4b4d215f5 has session id: 577bacc8-951e-4686-be55-6c769efb4f5d
50712023-09-22T23:08:11.636ZINFOcrucible: [0] 86ba2ad7-5cb3-44f1-a126-2cb4b4d215f5 (f180244a-cb04-4921-a34a-9654b24a63c8) New New New ds_transition to Offline
5072 test test::up_test::downstairs_transition_no_new_to_offline - should panic ... ok
50732023-09-22T23:08:11.636ZINFOcrucible: Crucible stats registered with UUID: 0fe70785-506d-4660-ab80-bbb7de2b0b2a
50742023-09-22T23:08:11.637ZINFOcrucible: Crucible 0fe70785-506d-4660-ab80-bbb7de2b0b2a has session id: 0166ba75-9a2e-4969-8f6d-c295ff66a1b3
50752023-09-22T23:08:11.637ZINFOcrucible: [0] 0fe70785-506d-4660-ab80-bbb7de2b0b2a (74f5fac3-72d8-45bf-8e50-f8a9502b7636) New New New ds_transition to WaitActive
50762023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from New to WaitActive
50772023-09-22T23:08:11.637ZINFOcrucible: [0] 0fe70785-506d-4660-ab80-bbb7de2b0b2a (74f5fac3-72d8-45bf-8e50-f8a9502b7636) WaitActive New New ds_transition to WaitQuorum
50782023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50792023-09-22T23:08:11.637ZINFOcrucible: [0] 0fe70785-506d-4660-ab80-bbb7de2b0b2a (74f5fac3-72d8-45bf-8e50-f8a9502b7636) WaitQuorum New New ds_transition to Active
50802023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from WaitQuorum to Active
5081 test test::up_test::downstairs_transition_normal ... ok
50822023-09-22T23:08:11.637ZINFOcrucible: Crucible stats registered with UUID: 0db93807-24cb-479d-a7e5-cfaccfb35c64
50832023-09-22T23:08:11.637ZINFOcrucible: Crucible 0db93807-24cb-479d-a7e5-cfaccfb35c64 has session id: 1771c71d-8f3f-4715-a2ae-e2bb61bdae3c
50842023-09-22T23:08:11.637ZINFOcrucible: [0] 0db93807-24cb-479d-a7e5-cfaccfb35c64 (3fadfdd8-680f-47fd-a927-4dc587f6e2db) New New New ds_transition to WaitActive
50852023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from New to WaitActive
50862023-09-22T23:08:11.637ZINFOcrucible: [0] 0db93807-24cb-479d-a7e5-cfaccfb35c64 (3fadfdd8-680f-47fd-a927-4dc587f6e2db) WaitActive New New ds_transition to WaitQuorum
50872023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
50882023-09-22T23:08:11.637ZINFOcrucible: [0] 0db93807-24cb-479d-a7e5-cfaccfb35c64 (3fadfdd8-680f-47fd-a927-4dc587f6e2db) WaitQuorum New New ds_transition to Active
50892023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from WaitQuorum to Active
50902023-09-22T23:08:11.637ZINFOcrucible: [0] 0db93807-24cb-479d-a7e5-cfaccfb35c64 (3fadfdd8-680f-47fd-a927-4dc587f6e2db) Active New New ds_transition to Offline
50912023-09-22T23:08:11.637ZINFOcrucible: [0] Transition from Active to Offline
50922023-09-22T23:08:11.637ZINFOcrucible: [0] 0db93807-24cb-479d-a7e5-cfaccfb35c64 (3fadfdd8-680f-47fd-a927-4dc587f6e2db) Offline New New ds_transition to Active
5093 test test::up_test::downstairs_transition_offline_no_active - should panic ... ok
50942023-09-22T23:08:11.638ZINFOcrucible: Crucible stats registered with UUID: ffcb4455-79ad-4fa6-bc30-85dd4266da80
50952023-09-22T23:08:11.638ZINFOcrucible: Crucible ffcb4455-79ad-4fa6-bc30-85dd4266da80 has session id: 2d180d76-07d3-4b3c-a8d2-bb9d00ba48fe
50962023-09-22T23:08:11.638ZINFOcrucible: [0] ffcb4455-79ad-4fa6-bc30-85dd4266da80 (e72a4722-8393-4970-9058-71709c18d6d4) New New New ds_transition to WaitActive
50972023-09-22T23:08:11.638ZINFOcrucible: [0] Transition from New to WaitActive
50982023-09-22T23:08:11.638ZINFOcrucible: [0] ffcb4455-79ad-4fa6-bc30-85dd4266da80 (e72a4722-8393-4970-9058-71709c18d6d4) WaitActive New New ds_transition to WaitQuorum
50992023-09-22T23:08:11.638ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51002023-09-22T23:08:11.638ZINFOcrucible: ffcb4455-79ad-4fa6-bc30-85dd4266da80 is now active with session: e72a4722-8393-4970-9058-71709c18d6d4
51012023-09-22T23:08:11.638ZINFOcrucible: [0] ffcb4455-79ad-4fa6-bc30-85dd4266da80 (e72a4722-8393-4970-9058-71709c18d6d4) WaitQuorum New New ds_transition to Active
51022023-09-22T23:08:11.638ZINFOcrucible: [0] Transition from WaitQuorum to Active
51032023-09-22T23:08:11.638ZINFOcrucible: [0] ffcb4455-79ad-4fa6-bc30-85dd4266da80 (e72a4722-8393-4970-9058-71709c18d6d4) Active New New ds_transition to Offline
51042023-09-22T23:08:11.638ZINFOcrucible: [0] Transition from Active to Offline
51052023-09-22T23:08:11.638ZINFOcrucible: [0] ffcb4455-79ad-4fa6-bc30-85dd4266da80 (e72a4722-8393-4970-9058-71709c18d6d4) Offline New New ds_transition to Replay
51062023-09-22T23:08:11.638ZINFOcrucible: [0] Transition from Offline to Replay
5107 test test::up_test::downstairs_transition_replay ... ok
51082023-09-22T23:08:11.639ZINFOcrucible: Crucible stats registered with UUID: 89776979-2662-40aa-a9f1-044189059988
51092023-09-22T23:08:11.639ZINFOcrucible: Crucible 89776979-2662-40aa-a9f1-044189059988 has session id: 21c08393-c5da-4e80-b02e-bf50a22c6912
51102023-09-22T23:08:11.639ZINFOcrucible: [0] 89776979-2662-40aa-a9f1-044189059988 (4c6078b9-6175-4cb8-b8ef-d98478e11fec) New New New ds_transition to WaitActive
51112023-09-22T23:08:11.639ZINFOcrucible: [0] Transition from New to WaitActive
51122023-09-22T23:08:11.639ZINFOcrucible: [0] 89776979-2662-40aa-a9f1-044189059988 (4c6078b9-6175-4cb8-b8ef-d98478e11fec) WaitActive New New ds_transition to WaitQuorum
51132023-09-22T23:08:11.639ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51142023-09-22T23:08:11.639ZINFOcrucible: [0] 89776979-2662-40aa-a9f1-044189059988 (4c6078b9-6175-4cb8-b8ef-d98478e11fec) WaitQuorum New New ds_transition to Active
51152023-09-22T23:08:11.639ZINFOcrucible: [0] Transition from WaitQuorum to Active
51162023-09-22T23:08:11.639ZINFOcrucible: [0] 89776979-2662-40aa-a9f1-044189059988 (4c6078b9-6175-4cb8-b8ef-d98478e11fec) Active New New ds_transition to Active
5117 test test::up_test::downstairs_transition_same_active - should panic ... ok
51182023-09-22T23:08:11.640ZINFOcrucible: Crucible stats registered with UUID: 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b
51192023-09-22T23:08:11.640ZINFOcrucible: Crucible 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b has session id: 748a65f6-2812-4f9c-bc13-a06ce04e9490
51202023-09-22T23:08:11.640ZINFOcrucible: [0] 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b (0a4120a9-dea3-4dca-8555-e9791a197bbd) New New New ds_transition to WaitActive
51212023-09-22T23:08:11.640ZINFOcrucible: [0] Transition from New to WaitActive
51222023-09-22T23:08:11.640ZINFOcrucible: [0] 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b (0a4120a9-dea3-4dca-8555-e9791a197bbd) WaitActive New New ds_transition to WaitQuorum
51232023-09-22T23:08:11.640ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51242023-09-22T23:08:11.640ZINFOcrucible: [0] 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b (0a4120a9-dea3-4dca-8555-e9791a197bbd) WaitQuorum New New ds_transition to Active
51252023-09-22T23:08:11.640ZINFOcrucible: [0] Transition from WaitQuorum to Active
51262023-09-22T23:08:11.640ZINFOcrucible: [0] 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b (0a4120a9-dea3-4dca-8555-e9791a197bbd) Active New New ds_transition to Offline
51272023-09-22T23:08:11.640ZINFOcrucible: [0] Transition from Active to Offline
51282023-09-22T23:08:11.640ZINFOcrucible: [0] 5d2fceb8-8ac5-46e1-a268-e3d9138ee48b (0a4120a9-dea3-4dca-8555-e9791a197bbd) Offline New New ds_transition to Offline
5129 test test::up_test::downstairs_transition_same_offline - should panic ... ok
51302023-09-22T23:08:11.640ZINFOcrucible: Crucible stats registered with UUID: ea2dab8a-1336-463d-98a4-e05d78c9b678
51312023-09-22T23:08:11.641ZINFOcrucible: Crucible ea2dab8a-1336-463d-98a4-e05d78c9b678 has session id: afdf035a-a12f-491c-b567-fdf52af90228
51322023-09-22T23:08:11.641ZINFOcrucible: [0] ea2dab8a-1336-463d-98a4-e05d78c9b678 (9c7cab95-9236-449e-8dd8-96c0eba91581) New New New ds_transition to WaitActive
51332023-09-22T23:08:11.641ZINFOcrucible: [0] Transition from New to WaitActive
51342023-09-22T23:08:11.641ZINFOcrucible: [0] ea2dab8a-1336-463d-98a4-e05d78c9b678 (9c7cab95-9236-449e-8dd8-96c0eba91581) WaitActive New New ds_transition to WaitActive
5135 test test::up_test::downstairs_transition_same_wa - should panic ... ok
51362023-09-22T23:08:11.641ZINFOcrucible: Crucible stats registered with UUID: 1f7f81c6-a72d-421f-a514-8f633a876aea
51372023-09-22T23:08:11.641ZINFOcrucible: Crucible 1f7f81c6-a72d-421f-a514-8f633a876aea has session id: 5f366355-35aa-4591-a6de-88df03551d17
51382023-09-22T23:08:11.641ZINFOcrucible: [0] 1f7f81c6-a72d-421f-a514-8f633a876aea (4a786c80-d276-4d84-a740-e98a20ae047b) New New New ds_transition to WaitActive
51392023-09-22T23:08:11.641ZINFOcrucible: [0] Transition from New to WaitActive
51402023-09-22T23:08:11.641ZINFOcrucible: [0] 1f7f81c6-a72d-421f-a514-8f633a876aea (4a786c80-d276-4d84-a740-e98a20ae047b) WaitActive New New ds_transition to WaitQuorum
51412023-09-22T23:08:11.641ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51422023-09-22T23:08:11.641ZINFOcrucible: [0] 1f7f81c6-a72d-421f-a514-8f633a876aea (4a786c80-d276-4d84-a740-e98a20ae047b) WaitQuorum New New ds_transition to WaitQuorum
5143 test test::up_test::downstairs_transition_same_wq - should panic ... ok
51442023-09-22T23:08:11.642ZINFOcrucible: Crucible stats registered with UUID: be7ed2d4-01b8-4997-8b73-86b6eec8763a
51452023-09-22T23:08:11.642ZINFOcrucible: Crucible be7ed2d4-01b8-4997-8b73-86b6eec8763a has session id: 5919cf68-5c7d-43c5-9f19-693bfc5ce6bd
51462023-09-22T23:08:11.642ZINFOcrucible: [0] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) New New New ds_transition to WaitActive
51472023-09-22T23:08:11.642ZINFOcrucible: [0] Transition from New to WaitActive
51482023-09-22T23:08:11.642ZINFOcrucible: [0] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) WaitActive New New ds_transition to WaitQuorum
51492023-09-22T23:08:11.642ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51502023-09-22T23:08:11.642ZINFOcrucible: [0] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) WaitQuorum New New ds_transition to Active
51512023-09-22T23:08:11.642ZINFOcrucible: [0] Transition from WaitQuorum to Active
51522023-09-22T23:08:11.642ZINFOcrucible: [1] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active New New ds_transition to WaitActive
51532023-09-22T23:08:11.642ZINFOcrucible: [1] Transition from New to WaitActive
51542023-09-22T23:08:11.642ZINFOcrucible: [1] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active WaitActive New ds_transition to WaitQuorum
51552023-09-22T23:08:11.642ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
51562023-09-22T23:08:11.642ZINFOcrucible: [1] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active WaitQuorum New ds_transition to Active
51572023-09-22T23:08:11.642ZINFOcrucible: [1] Transition from WaitQuorum to Active
51582023-09-22T23:08:11.642ZINFOcrucible: [2] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active Active New ds_transition to WaitActive
51592023-09-22T23:08:11.642ZINFOcrucible: [2] Transition from New to WaitActive
51602023-09-22T23:08:11.642ZINFOcrucible: [2] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active Active WaitActive ds_transition to WaitQuorum
51612023-09-22T23:08:11.642ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
51622023-09-22T23:08:11.642ZINFOcrucible: [2] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active Active WaitQuorum ds_transition to Active
51632023-09-22T23:08:11.642ZINFOcrucible: [2] Transition from WaitQuorum to Active
51642023-09-22T23:08:11.642ZINFOcrucible: be7ed2d4-01b8-4997-8b73-86b6eec8763a is now active with session: 5985a313-72b4-4515-b4d2-a311c9846e1d
51652023-09-22T23:08:11.642ZINFOcrucible: [0] be7ed2d4-01b8-4997-8b73-86b6eec8763a (5985a313-72b4-4515-b4d2-a311c9846e1d) Active Active Active ds_transition to Faulted
51662023-09-22T23:08:11.642ZINFOcrucible: [0] Transition from Active to Faulted
5167 test test::up_test::faulted_downstairs_skips_but_still_does_work ... ok
51682023-09-22T23:08:11.643ZINFOcrucible: Crucible stats registered with UUID: a07fe4f6-3a09-461f-b7b7-4cb79508ffc4
51692023-09-22T23:08:11.643ZINFOcrucible: Crucible a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 has session id: f6ce5f3c-9be1-44a4-b83b-54f18e22fd78
51702023-09-22T23:08:11.643ZINFOcrucible: [0] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) New New New ds_transition to WaitActive
51712023-09-22T23:08:11.643ZINFOcrucible: [0] Transition from New to WaitActive
51722023-09-22T23:08:11.643ZINFOcrucible: [0] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) WaitActive New New ds_transition to WaitQuorum
51732023-09-22T23:08:11.643ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51742023-09-22T23:08:11.643ZINFOcrucible: [0] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) WaitQuorum New New ds_transition to Active
51752023-09-22T23:08:11.643ZINFOcrucible: [0] Transition from WaitQuorum to Active
51762023-09-22T23:08:11.643ZINFOcrucible: [1] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active New New ds_transition to WaitActive
51772023-09-22T23:08:11.643ZINFOcrucible: [1] Transition from New to WaitActive
51782023-09-22T23:08:11.643ZINFOcrucible: [1] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active WaitActive New ds_transition to WaitQuorum
51792023-09-22T23:08:11.643ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
51802023-09-22T23:08:11.643ZINFOcrucible: [1] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active WaitQuorum New ds_transition to Active
51812023-09-22T23:08:11.643ZINFOcrucible: [1] Transition from WaitQuorum to Active
51822023-09-22T23:08:11.643ZINFOcrucible: [2] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active Active New ds_transition to WaitActive
51832023-09-22T23:08:11.643ZINFOcrucible: [2] Transition from New to WaitActive
51842023-09-22T23:08:11.643ZINFOcrucible: [2] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active Active WaitActive ds_transition to WaitQuorum
51852023-09-22T23:08:11.643ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
51862023-09-22T23:08:11.643ZINFOcrucible: [2] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active Active WaitQuorum ds_transition to Active
51872023-09-22T23:08:11.643ZINFOcrucible: [2] Transition from WaitQuorum to Active
51882023-09-22T23:08:11.643ZINFOcrucible: a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 is now active with session: 731572d1-814f-4180-b2cf-7e69c0608a59
51892023-09-22T23:08:11.643ZINFOcrucible: [0] a07fe4f6-3a09-461f-b7b7-4cb79508ffc4 (731572d1-814f-4180-b2cf-7e69c0608a59) Active Active Active ds_transition to Faulted
51902023-09-22T23:08:11.643ZINFOcrucible: [0] Transition from Active to Faulted
5191 test test::up_test::faulted_downstairs_skips_work ... ok
51922023-09-22T23:08:11.644ZINFOcrucible: Crucible stats registered with UUID: a46292f6-8b2f-47df-8c30-c1bec8094915
51932023-09-22T23:08:11.644ZINFOcrucible: Crucible a46292f6-8b2f-47df-8c30-c1bec8094915 has session id: 6031d7a3-0344-41e9-abba-455bb1501038
51942023-09-22T23:08:11.644ZINFOcrucible: a46292f6-8b2f-47df-8c30-c1bec8094915 is now active with session: 379ac299-7319-4aac-9b9e-66085dbccaf1
51952023-09-22T23:08:11.644ZINFOcrucible: [0] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) New New New ds_transition to WaitActive
51962023-09-22T23:08:11.644ZINFOcrucible: [0] Transition from New to WaitActive
51972023-09-22T23:08:11.644ZINFOcrucible: [0] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) WaitActive New New ds_transition to WaitQuorum
51982023-09-22T23:08:11.644ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
51992023-09-22T23:08:11.644ZINFOcrucible: [0] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) WaitQuorum New New ds_transition to Active
52002023-09-22T23:08:11.644ZINFOcrucible: [0] Transition from WaitQuorum to Active
52012023-09-22T23:08:11.644ZINFOcrucible: [1] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active New New ds_transition to WaitActive
52022023-09-22T23:08:11.644ZINFOcrucible: [1] Transition from New to WaitActive
52032023-09-22T23:08:11.644ZINFOcrucible: [1] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active WaitActive New ds_transition to WaitQuorum
52042023-09-22T23:08:11.644ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
52052023-09-22T23:08:11.644ZINFOcrucible: [1] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active WaitQuorum New ds_transition to Active
52062023-09-22T23:08:11.644ZINFOcrucible: [1] Transition from WaitQuorum to Active
52072023-09-22T23:08:11.644ZINFOcrucible: [2] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active Active New ds_transition to WaitActive
52082023-09-22T23:08:11.644ZINFOcrucible: [2] Transition from New to WaitActive
52092023-09-22T23:08:11.644ZINFOcrucible: [2] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active Active WaitActive ds_transition to WaitQuorum
52102023-09-22T23:08:11.644ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
52112023-09-22T23:08:11.644ZINFOcrucible: [2] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active Active WaitQuorum ds_transition to Active
52122023-09-22T23:08:11.644ZINFOcrucible: [2] Transition from WaitQuorum to Active
52132023-09-22T23:08:11.644ZINFOcrucible: [1] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active Active Active ds_transition to Faulted
52142023-09-22T23:08:11.644ZINFOcrucible: [1] Transition from Active to Faulted
52152023-09-22T23:08:11.644ZINFOcrucible: [2] a46292f6-8b2f-47df-8c30-c1bec8094915 (379ac299-7319-4aac-9b9e-66085dbccaf1) Active Faulted Active ds_transition to Faulted
52162023-09-22T23:08:11.644ZINFOcrucible: [2] Transition from Active to Faulted
5217 test test::up_test::flush_io_double_skip ... ok
52182023-09-22T23:08:11.645ZINFOcrucible: Crucible stats registered with UUID: 034b3592-4704-4869-95dd-da40923e5770
52192023-09-22T23:08:11.645ZINFOcrucible: Crucible 034b3592-4704-4869-95dd-da40923e5770 has session id: 002d4345-ef8c-4ec7-b825-1089e35be56a
52202023-09-22T23:08:11.645ZINFOcrucible: 034b3592-4704-4869-95dd-da40923e5770 is now active with session: beb18472-6f47-4f48-bd85-e0b787ecd825
52212023-09-22T23:08:11.645ZINFOcrucible: [0] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) New New New ds_transition to WaitActive
52222023-09-22T23:08:11.645ZINFOcrucible: [0] Transition from New to WaitActive
52232023-09-22T23:08:11.645ZINFOcrucible: [0] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) WaitActive New New ds_transition to WaitQuorum
52242023-09-22T23:08:11.645ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52252023-09-22T23:08:11.645ZINFOcrucible: [0] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) WaitQuorum New New ds_transition to Active
52262023-09-22T23:08:11.645ZINFOcrucible: [0] Transition from WaitQuorum to Active
52272023-09-22T23:08:11.645ZINFOcrucible: [1] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active New New ds_transition to WaitActive
52282023-09-22T23:08:11.645ZINFOcrucible: [1] Transition from New to WaitActive
52292023-09-22T23:08:11.645ZINFOcrucible: [1] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active WaitActive New ds_transition to WaitQuorum
52302023-09-22T23:08:11.645ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
52312023-09-22T23:08:11.645ZINFOcrucible: [1] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active WaitQuorum New ds_transition to Active
52322023-09-22T23:08:11.645ZINFOcrucible: [1] Transition from WaitQuorum to Active
52332023-09-22T23:08:11.645ZINFOcrucible: [2] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active Active New ds_transition to WaitActive
52342023-09-22T23:08:11.645ZINFOcrucible: [2] Transition from New to WaitActive
52352023-09-22T23:08:11.645ZINFOcrucible: [2] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active Active WaitActive ds_transition to WaitQuorum
52362023-09-22T23:08:11.645ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
52372023-09-22T23:08:11.645ZINFOcrucible: [2] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active Active WaitQuorum ds_transition to Active
52382023-09-22T23:08:11.645ZINFOcrucible: [2] Transition from WaitQuorum to Active
52392023-09-22T23:08:11.645ZINFOcrucible: [0] 034b3592-4704-4869-95dd-da40923e5770 (beb18472-6f47-4f48-bd85-e0b787ecd825) Active Active Active ds_transition to Faulted
52402023-09-22T23:08:11.645ZINFOcrucible: [0] Transition from Active to Faulted
52412023-09-22T23:08:11.645ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Flush { dependencies: [], flush_number: 22, gen_number: 11, snapshot_details: None, extent_limit: None }, state: ClientData([Skipped, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
52422023-09-22T23:08:11.646ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Flush { dependencies: [], flush_number: 22, gen_number: 11, snapshot_details: None, extent_limit: None }, state: ClientData([Skipped, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
5243 test test::up_test::flush_io_fail_and_skip ... ok
52442023-09-22T23:08:11.646ZINFOcrucible: Crucible stats registered with UUID: 1e270f52-861c-4e7c-a049-3129084987a3
52452023-09-22T23:08:11.646ZINFOcrucible: Crucible 1e270f52-861c-4e7c-a049-3129084987a3 has session id: 60af427f-6e62-41ee-9f96-43ccc0600e56
52462023-09-22T23:08:11.646ZINFOcrucible: 1e270f52-861c-4e7c-a049-3129084987a3 is now active with session: f8936756-3f9b-4036-be33-913f93aa3488
52472023-09-22T23:08:11.646ZINFOcrucible: [0] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) New New New ds_transition to WaitActive
52482023-09-22T23:08:11.646ZINFOcrucible: [0] Transition from New to WaitActive
52492023-09-22T23:08:11.646ZINFOcrucible: [0] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) WaitActive New New ds_transition to WaitQuorum
52502023-09-22T23:08:11.646ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
52512023-09-22T23:08:11.646ZINFOcrucible: [0] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) WaitQuorum New New ds_transition to Active
52522023-09-22T23:08:11.646ZINFOcrucible: [0] Transition from WaitQuorum to Active
52532023-09-22T23:08:11.646ZINFOcrucible: [1] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active New New ds_transition to WaitActive
52542023-09-22T23:08:11.646ZINFOcrucible: [1] Transition from New to WaitActive
52552023-09-22T23:08:11.646ZINFOcrucible: [1] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active WaitActive New ds_transition to WaitQuorum
52562023-09-22T23:08:11.646ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
52572023-09-22T23:08:11.646ZINFOcrucible: [1] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active WaitQuorum New ds_transition to Active
52582023-09-22T23:08:11.646ZINFOcrucible: [1] Transition from WaitQuorum to Active
52592023-09-22T23:08:11.646ZINFOcrucible: [2] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active Active New ds_transition to WaitActive
52602023-09-22T23:08:11.646ZINFOcrucible: [2] Transition from New to WaitActive
52612023-09-22T23:08:11.646ZINFOcrucible: [2] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active Active WaitActive ds_transition to WaitQuorum
52622023-09-22T23:08:11.646ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
52632023-09-22T23:08:11.646ZINFOcrucible: [2] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active Active WaitQuorum ds_transition to Active
52642023-09-22T23:08:11.646ZINFOcrucible: [2] Transition from WaitQuorum to Active
52652023-09-22T23:08:11.646ZINFOcrucible: [1] 1e270f52-861c-4e7c-a049-3129084987a3 (f8936756-3f9b-4036-be33-913f93aa3488) Active Active Active ds_transition to Faulted
52662023-09-22T23:08:11.647ZINFOcrucible: [1] Transition from Active to Faulted
5267 test test::up_test::flush_io_single_skip ... ok
52682023-09-22T23:08:11.647ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52692023-09-22T23:08:11.647ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: dff94e1e-c8dd-487d-aa88-79807384cfec
5270 test test::up_test::not_right_block_size - should panic ... ok
52712023-09-22T23:08:11.648ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52722023-09-22T23:08:11.648ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 93afa2a1-f575-4ce6-80eb-79dbe509fde7
5273 test test::up_test::off_to_extent_bridge ... ok
52742023-09-22T23:08:11.651ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52752023-09-22T23:08:11.651ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: c819c307-acff-4270-a283-272e7a71e185
5276 test test::up_test::off_to_extent_length_almost_too_big ... ok
52772023-09-22T23:08:11.651ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52782023-09-22T23:08:11.651ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 516222bf-6c67-41ec-a836-a85a4490ab11
5279 test test::up_test::off_to_extent_length_and_offset_almost_too_big ... ok
52802023-09-22T23:08:11.652ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52812023-09-22T23:08:11.652ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 10bfea0a-096f-4f83-84be-e28e35f1ad33
5282 test test::up_test::off_to_extent_length_and_offset_too_big - should panic ... ok
52832023-09-22T23:08:11.653ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52842023-09-22T23:08:11.653ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: fdf734ce-2166-470c-b2b1-0bdfec50aa0d
5285 test test::up_test::off_to_extent_length_too_big - should panic ... ok
52862023-09-22T23:08:11.653ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52872023-09-22T23:08:11.653ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: d028b1d7-85d1-4006-b18f-ffdc6cc650b5
5288 test test::up_test::off_to_extent_length_zero ... ok
52892023-09-22T23:08:11.654ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52902023-09-22T23:08:11.654ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: fd318d37-3755-49e4-9e8b-7942d7e5d089
5291 test test::up_test::off_to_extent_one_block ... ok
52922023-09-22T23:08:11.655ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
52932023-09-22T23:08:11.655ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: b2f32480-30f4-4c73-8f7c-29ee92b1df26
5294 test test::up_test::off_to_extent_two_blocks ... ok
52952023-09-22T23:08:11.656ZINFOcrucible: Crucible stats registered with UUID: 141c4e5d-2218-4f3c-b3e8-d77c3da711d5
52962023-09-22T23:08:11.656ZINFOcrucible: Crucible 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 has session id: 484404fb-896d-4caa-aa02-dd6788eb1bb9
52972023-09-22T23:08:11.656ZINFOcrucible: [0] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) New New New ds_transition to WaitActive
52982023-09-22T23:08:11.656ZINFOcrucible: [0] Transition from New to WaitActive
52992023-09-22T23:08:11.656ZINFOcrucible: [0] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) WaitActive New New ds_transition to WaitQuorum
53002023-09-22T23:08:11.656ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53012023-09-22T23:08:11.656ZINFOcrucible: [0] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) WaitQuorum New New ds_transition to Active
53022023-09-22T23:08:11.656ZINFOcrucible: [0] Transition from WaitQuorum to Active
53032023-09-22T23:08:11.656ZINFOcrucible: [1] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active New New ds_transition to WaitActive
53042023-09-22T23:08:11.656ZINFOcrucible: [1] Transition from New to WaitActive
53052023-09-22T23:08:11.656ZINFOcrucible: [1] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active WaitActive New ds_transition to WaitQuorum
53062023-09-22T23:08:11.656ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53072023-09-22T23:08:11.656ZINFOcrucible: [1] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active WaitQuorum New ds_transition to Active
53082023-09-22T23:08:11.656ZINFOcrucible: [1] Transition from WaitQuorum to Active
53092023-09-22T23:08:11.656ZINFOcrucible: [2] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active Active New ds_transition to WaitActive
53102023-09-22T23:08:11.656ZINFOcrucible: [2] Transition from New to WaitActive
53112023-09-22T23:08:11.656ZINFOcrucible: [2] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active Active WaitActive ds_transition to WaitQuorum
53122023-09-22T23:08:11.656ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53132023-09-22T23:08:11.656ZINFOcrucible: [2] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active Active WaitQuorum ds_transition to Active
53142023-09-22T23:08:11.656ZINFOcrucible: [2] Transition from WaitQuorum to Active
53152023-09-22T23:08:11.656ZINFOcrucible: 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 is now active with session: 86bb4911-337c-49d8-ab07-a5d33d70b336
53162023-09-22T23:08:11.656ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53172023-09-22T23:08:11.656ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53182023-09-22T23:08:11.656ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
53192023-09-22T23:08:11.656ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
53202023-09-22T23:08:11.656ZINFOcrucible: [0] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Active Active Active ds_transition to Faulted
53212023-09-22T23:08:11.656ZINFOcrucible: [0] Transition from Active to Faulted
53222023-09-22T23:08:11.656ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53232023-09-22T23:08:11.656ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53242023-09-22T23:08:11.656ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
53252023-09-22T23:08:11.656ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
53262023-09-22T23:08:11.656ZINFOcrucible: [1] 141c4e5d-2218-4f3c-b3e8-d77c3da711d5 (86bb4911-337c-49d8-ab07-a5d33d70b336) Faulted Active Active ds_transition to Faulted
53272023-09-22T23:08:11.656ZINFOcrucible: [1] Transition from Active to Faulted
5328 test test::up_test::read_after_two_write_fail_is_alright ... ok
53292023-09-22T23:08:11.657ZINFOcrucible: Crucible stats registered with UUID: b1b74c4b-9f65-4d10-b833-07ab3e622bae
53302023-09-22T23:08:11.657ZINFOcrucible: Crucible b1b74c4b-9f65-4d10-b833-07ab3e622bae has session id: 52258f1b-1814-4e9f-9cd6-7c52d8d8cfa3
53312023-09-22T23:08:11.657ZINFOcrucible: [0] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) New New New ds_transition to WaitActive
53322023-09-22T23:08:11.657ZINFOcrucible: [0] Transition from New to WaitActive
53332023-09-22T23:08:11.657ZINFOcrucible: [0] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) WaitActive New New ds_transition to WaitQuorum
53342023-09-22T23:08:11.657ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53352023-09-22T23:08:11.657ZINFOcrucible: [0] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) WaitQuorum New New ds_transition to Active
53362023-09-22T23:08:11.657ZINFOcrucible: [0] Transition from WaitQuorum to Active
53372023-09-22T23:08:11.657ZINFOcrucible: [1] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active New New ds_transition to WaitActive
53382023-09-22T23:08:11.657ZINFOcrucible: [1] Transition from New to WaitActive
53392023-09-22T23:08:11.657ZINFOcrucible: [1] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active WaitActive New ds_transition to WaitQuorum
53402023-09-22T23:08:11.657ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53412023-09-22T23:08:11.657ZINFOcrucible: [1] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active WaitQuorum New ds_transition to Active
53422023-09-22T23:08:11.657ZINFOcrucible: [1] Transition from WaitQuorum to Active
53432023-09-22T23:08:11.657ZINFOcrucible: [2] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active Active New ds_transition to WaitActive
53442023-09-22T23:08:11.657ZINFOcrucible: [2] Transition from New to WaitActive
53452023-09-22T23:08:11.657ZINFOcrucible: [2] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active Active WaitActive ds_transition to WaitQuorum
53462023-09-22T23:08:11.657ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
53472023-09-22T23:08:11.657ZINFOcrucible: [2] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active Active WaitQuorum ds_transition to Active
53482023-09-22T23:08:11.657ZINFOcrucible: [2] Transition from WaitQuorum to Active
53492023-09-22T23:08:11.657ZINFOcrucible: b1b74c4b-9f65-4d10-b833-07ab3e622bae is now active with session: e5df52ad-f85c-4cf3-b7ea-b0c869edb935
53502023-09-22T23:08:11.657ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53512023-09-22T23:08:11.658ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
53522023-09-22T23:08:11.658ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
53532023-09-22T23:08:11.658ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
53542023-09-22T23:08:11.658ZINFOcrucible: [0] b1b74c4b-9f65-4d10-b833-07ab3e622bae (e5df52ad-f85c-4cf3-b7ea-b0c869edb935) Active Active Active ds_transition to Faulted
53552023-09-22T23:08:11.658ZINFOcrucible: [0] Transition from Active to Faulted
5356 test test::up_test::read_after_write_fail_is_alright ... ok
53572023-09-22T23:08:11.658ZINFOcrucible: Crucible stats registered with UUID: 1f4bf574-9a2c-47ce-ad31-29fae5234bd8
53582023-09-22T23:08:11.658ZINFOcrucible: Crucible 1f4bf574-9a2c-47ce-ad31-29fae5234bd8 has session id: 900a3c87-43dc-46e0-875a-37c9b014533a
53592023-09-22T23:08:11.658ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
53602023-09-22T23:08:11.658ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
53612023-09-22T23:08:11.658ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
53622023-09-22T23:08:11.658ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
53632023-09-22T23:08:11.658ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5364 test test::up_test::reconcile_leave_no_job_behind - should panic ... ok
53652023-09-22T23:08:11.659ZINFOcrucible: Crucible stats registered with UUID: 6d5bfbb2-1b79-4c05-91a2-fac7055be051
53662023-09-22T23:08:11.659ZINFOcrucible: Crucible 6d5bfbb2-1b79-4c05-91a2-fac7055be051 has session id: 49ab1ee1-1712-4e07-a804-9b4a80711fd0
53672023-09-22T23:08:11.659ZINFOcrucible: [0] 6d5bfbb2-1b79-4c05-91a2-fac7055be051 (6bc62fb0-d4eb-4f0b-b738-a380a4407d9e) New New New ds_transition to WaitActive
53682023-09-22T23:08:11.659ZINFOcrucible: [0] Transition from New to WaitActive
53692023-09-22T23:08:11.659ZINFOcrucible: [0] 6d5bfbb2-1b79-4c05-91a2-fac7055be051 (6bc62fb0-d4eb-4f0b-b738-a380a4407d9e) WaitActive New New ds_transition to WaitQuorum
53702023-09-22T23:08:11.659ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
53712023-09-22T23:08:11.659ZINFOcrucible: [1] 6d5bfbb2-1b79-4c05-91a2-fac7055be051 (6bc62fb0-d4eb-4f0b-b738-a380a4407d9e) WaitQuorum New New ds_transition to WaitActive
53722023-09-22T23:08:11.659ZINFOcrucible: [1] Transition from New to WaitActive
53732023-09-22T23:08:11.659ZINFOcrucible: [1] 6d5bfbb2-1b79-4c05-91a2-fac7055be051 (6bc62fb0-d4eb-4f0b-b738-a380a4407d9e) WaitQuorum WaitActive New ds_transition to WaitQuorum
53742023-09-22T23:08:11.659ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
53752023-09-22T23:08:11.659ZINFOcrucible: Waiting for 1 more clients to be ready
5376 test test::up_test::reconcile_not_ready ... ok
53772023-09-22T23:08:11.660ZINFOcrucible: Crucible stats registered with UUID: ca3987a1-46c1-434b-a038-228a35da78c7
53782023-09-22T23:08:11.660ZINFOcrucible: Crucible ca3987a1-46c1-434b-a038-228a35da78c7 has session id: 9d1682f1-07f7-44d9-aef4-5775641b29e0
53792023-09-22T23:08:11.660ZINFOcrucible: Full repair list: {9: ExtentFix { source: ClientId(0), dest: [ClientId(1), ClientId(2)] }} = downstairs
53802023-09-22T23:08:11.660ZINFOcrucible: Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 9, client_id: ClientId(0), flush_number: 22, gen_number: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 9, source_client_id: ClientId(0), source_repair_address: 127.0.0.1:801, dest_clients: [ClientId(1), ClientId(2)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 9 }, state: ClientData([New, New, New]) }] = downstairs
5381 test test::up_test::reconcile_rc_to_message ... ok
53822023-09-22T23:08:11.660ZINFOcrucible: Crucible stats registered with UUID: fa02e367-7d9d-4c0f-b919-184b0105c3df
53832023-09-22T23:08:11.660ZINFOcrucible: Crucible fa02e367-7d9d-4c0f-b919-184b0105c3df has session id: bc6e9d4b-bccc-4d02-a336-26fe6bdec7b4
53842023-09-22T23:08:11.660ZINFOcrucible: Full repair list: {5: ExtentFix { source: ClientId(2), dest: [ClientId(0), ClientId(1)] }} = downstairs
53852023-09-22T23:08:11.660ZINFOcrucible: Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 5, client_id: ClientId(2), flush_number: 66, gen_number: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 5, source_client_id: ClientId(2), source_repair_address: 127.0.0.1:803, dest_clients: [ClientId(0), ClientId(1)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 5 }, state: ClientData([New, New, New]) }] = downstairs
5386 test test::up_test::reconcile_rc_to_message_two ... ok
53872023-09-22T23:08:11.661ZINFOcrucible: Crucible stats registered with UUID: 069ec54d-55f6-4ab4-be42-cf366bacc424
53882023-09-22T23:08:11.661ZINFOcrucible: Crucible 069ec54d-55f6-4ab4-be42-cf366bacc424 has session id: e2c0f0cf-f258-4781-a5ae-c6516dfed062
53892023-09-22T23:08:11.661ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5390 test test::up_test::reconcile_rep_done_too_soon - should panic ... ok
53912023-09-22T23:08:11.662ZINFOcrucible: Crucible stats registered with UUID: 97f6234d-70f6-49a4-86ce-e1f1d7dc2972
53922023-09-22T23:08:11.662ZINFOcrucible: Crucible 97f6234d-70f6-49a4-86ce-e1f1d7dc2972 has session id: bd6eeb18-14e9-4c0b-8a9d-55ce4c8e907a
53932023-09-22T23:08:11.662ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
53942023-09-22T23:08:11.662ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
53952023-09-22T23:08:11.662ZINFOcrucible: [0] rep_in_progress ignore submitted job ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
5396 test test::up_test::reconcile_rep_in_progress_bad1 - should panic ... ok
53972023-09-22T23:08:11.662ZINFOcrucible: Crucible stats registered with UUID: 17a9ab2e-ac06-4d72-8d3c-04f05eee61d0
53982023-09-22T23:08:11.662ZINFOcrucible: Crucible 17a9ab2e-ac06-4d72-8d3c-04f05eee61d0 has session id: 22fd7790-4122-4577-a195-dc808d47ccd5
5399 test test::up_test::reconcile_rep_in_progress_none ... ok
54002023-09-22T23:08:11.663ZINFOcrucible: Crucible stats registered with UUID: ac26e8b1-20d0-4338-89db-07298ddce80f
54012023-09-22T23:08:11.663ZINFOcrucible: Crucible ac26e8b1-20d0-4338-89db-07298ddce80f has session id: 0600a358-4e81-43a6-bca2-cadd735b3342
54022023-09-22T23:08:11.663ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5403 test test::up_test::reconcile_repair_inprogress_not_done - should panic ... ok
54042023-09-22T23:08:11.664ZINFOcrucible: Crucible stats registered with UUID: 4caf04f9-0583-4298-940c-baa864363578
54052023-09-22T23:08:11.664ZINFOcrucible: Crucible 4caf04f9-0583-4298-940c-baa864363578 has session id: c128e14b-5326-488d-990d-30965f450cdd
54062023-09-22T23:08:11.664ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
54072023-09-22T23:08:11.664ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
54082023-09-22T23:08:11.664ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
54092023-09-22T23:08:11.664ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
54102023-09-22T23:08:11.664ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
54112023-09-22T23:08:11.664ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
54122023-09-22T23:08:11.664ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
54132023-09-22T23:08:11.664ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
5414 test test::up_test::reconcile_repair_workflow_1 ... ok
54152023-09-22T23:08:11.664ZINFOcrucible: Crucible stats registered with UUID: 67a60535-788d-4500-b81d-d0a82822f89a
54162023-09-22T23:08:11.664ZINFOcrucible: Crucible 67a60535-788d-4500-b81d-d0a82822f89a has session id: 12f33e27-3743-4af6-8c96-d9b9bee42ab8
54172023-09-22T23:08:11.664ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
54182023-09-22T23:08:11.664ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
54192023-09-22T23:08:11.664ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, Skipped, InProgress]) } = downstairs
5420 test test::up_test::reconcile_repair_workflow_2 ... ok
54212023-09-22T23:08:11.665ZINFOcrucible: Crucible stats registered with UUID: 1d64b798-4118-44cd-999f-1b34b470c1c5
54222023-09-22T23:08:11.665ZINFOcrucible: Crucible 1d64b798-4118-44cd-999f-1b34b470c1c5 has session id: f89ef70f-6d50-41cc-81ba-90cdf0b6f139
54232023-09-22T23:08:11.665ZERROcrucible: Mark 0 as FAILED REPAIR = downstairs
54242023-09-22T23:08:11.665ZERROcrucible: Mark 2 as FAILED REPAIR = downstairs
54252023-09-22T23:08:11.665ZINFOcrucible: Clear out existing repair work queue = downstairs
5426 test test::up_test::reconcile_repair_workflow_not_repair ... ok
54272023-09-22T23:08:11.666ZINFOcrucible: Crucible stats registered with UUID: a9b6d4e4-1a36-401a-a0ca-275c6c7578b5
54282023-09-22T23:08:11.666ZINFOcrucible: Crucible a9b6d4e4-1a36-401a-a0ca-275c6c7578b5 has session id: 9d51c81d-31db-4664-a3b9-830d11d61906
54292023-09-22T23:08:11.666ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
54302023-09-22T23:08:11.666ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
54312023-09-22T23:08:11.666ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
54322023-09-22T23:08:11.666ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
54332023-09-22T23:08:11.666ZERROcrucible: Mark 0 as FAILED REPAIR = downstairs
54342023-09-22T23:08:11.666ZERROcrucible: Mark 2 as FAILED REPAIR = downstairs
54352023-09-22T23:08:11.666ZINFOcrucible: Clear out existing repair work queue = downstairs
5436 test test::up_test::reconcile_repair_workflow_not_repair_later ... ok
54372023-09-22T23:08:11.666ZINFOcrucible: Crucible stats registered with UUID: 02990135-0f1c-4862-aa7b-18aae9f44298
54382023-09-22T23:08:11.666ZINFOcrucible: Crucible 02990135-0f1c-4862-aa7b-18aae9f44298 has session id: 4e58b218-acbe-4114-82fb-ff057449c776
54392023-09-22T23:08:11.666ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
54402023-09-22T23:08:11.666ZINFOcrucible: [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, New, New]) } = downstairs
54412023-09-22T23:08:11.666ZINFOcrucible: [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) } = downstairs
54422023-09-22T23:08:11.666ZINFOcrucible: [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) } = downstairs
5443 test test::up_test::reconcile_repair_workflow_repair_later ... ok
54442023-09-22T23:08:11.667ZINFOcrucible: Crucible stats registered with UUID: 9dac809d-cf03-4c57-baf9-7a5d9a4d890b
54452023-09-22T23:08:11.667ZINFOcrucible: Crucible 9dac809d-cf03-4c57-baf9-7a5d9a4d890b has session id: 82751d91-e856-4a21-91e7-8fe9f9112f2f
54462023-09-22T23:08:11.667ZINFOcrucible: Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentClose { repair_id: ReconciliationId(0), extent_id: 1 }, state: ClientData([New, New, New]) }
5447 test test::up_test::reconcile_repair_workflow_too_soon - should panic ... ok
54482023-09-22T23:08:11.668ZINFOcrucible: Crucible stats registered with UUID: ad352343-27a7-4725-9872-c157f535c275
54492023-09-22T23:08:11.668ZINFOcrucible: Crucible ad352343-27a7-4725-9872-c157f535c275 has session id: d746cca1-bce1-4e32-93a9-4b9d5c136adb
54502023-09-22T23:08:11.668ZINFOcrucible: ad352343-27a7-4725-9872-c157f535c275 is now active with session: 78993387-0c5c-4fe6-88b0-16fc8f959e40
54512023-09-22T23:08:11.668ZWARNcrucible: [rc] leave job 1000 on the queue when removing 1001 WorkCounts { active: 2, error: 0, skipped: 0, done: 1 } = downstairs
5452 test test::up_test::retire_dont_retire_everything ... ok
5453 test test::up_test::send_io_live_repair_read ... ok
5454 test test::up_test::send_io_live_repair_unwritten_write ... ok
5455 test test::up_test::send_io_live_repair_write ... ok
54562023-09-22T23:08:11.669ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54572023-09-22T23:08:11.669ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: ad07efa3-3de2-4189-bc6a-b09c04a495e7
54582023-09-22T23:08:11.669ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: ceba3bc0-7dc4-44df-bc80-c7cf0880210b
5459 test test::up_test::test_deps_all_writes_depend_on_flushes ... ok
54602023-09-22T23:08:11.670ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54612023-09-22T23:08:11.670ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 618c5b6b-d33b-4afe-ad82-cf77a7a83de6
54622023-09-22T23:08:11.670ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 3531a35f-222b-44c8-a877-e791425e6b83
5463 test test::up_test::test_deps_big_read_depends_on_little_writes ... ok
54642023-09-22T23:08:11.671ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54652023-09-22T23:08:11.671ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 31dc3ed7-1ad2-4a98-952c-1f8161910277
54662023-09-22T23:08:11.671ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 94ef0595-d784-48ff-b4ed-fc1899b9063f
5467 test test::up_test::test_deps_big_write_depends_on_little_writes ... ok
54682023-09-22T23:08:11.672ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54692023-09-22T23:08:11.672ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: b388ce47-5823-4038-9d95-896435a0b663
54702023-09-22T23:08:11.672ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: c948856d-1826-4b6f-81ae-c01afbc7fc00
5471 test test::up_test::test_deps_depend_on_acked_work ... ok
54722023-09-22T23:08:11.672ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54732023-09-22T23:08:11.672ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: e6f69a31-a233-4bbe-a2ab-4d80a28a2311
54742023-09-22T23:08:11.672ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: d3721654-98f1-46a0-a6a8-eb39aadad20b
5475 test test::up_test::test_deps_flushes_depend_on_flushes ... ok
54762023-09-22T23:08:11.673ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54772023-09-22T23:08:11.673ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: f23d638d-669b-4254-9ccd-38027db9ed82
54782023-09-22T23:08:11.673ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: ef05052f-c079-4ccc-bc74-45387586e065
5479 test test::up_test::test_deps_flushes_depend_on_flushes_and_all_writes ... ok
54802023-09-22T23:08:11.674ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54812023-09-22T23:08:11.674ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 0bff49f4-5635-420f-b796-2dfcf490acd9
54822023-09-22T23:08:11.674ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 5d166cba-7c96-43c0-a592-3798947445d8
5483 test test::up_test::test_deps_little_writes_depend_on_big_write ... ok
54842023-09-22T23:08:11.675ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54852023-09-22T23:08:11.675ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 807ee680-d4fb-4886-8b4e-bfd2c3fb8ab2
54862023-09-22T23:08:11.675ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 0cba9a20-f1a7-429e-9250-230ecb23c8f5
5487 test test::up_test::test_deps_little_writes_depend_on_big_write_chain ... ok
54882023-09-22T23:08:11.676ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54892023-09-22T23:08:11.676ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: dc2a0aee-f660-43a8-ae9e-b72ab5d152fb
54902023-09-22T23:08:11.676ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: aadd3d6a-038e-4c9b-a2a2-74a82ee4c9e8
5491 test test::up_test::test_deps_multi_extent_batman ... ok
54922023-09-22T23:08:11.676ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54932023-09-22T23:08:11.676ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 122e8b2e-3906-4dac-8d1f-c7464fc31067
54942023-09-22T23:08:11.676ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 7d00589e-546c-4829-a81e-10828ef398cf
5495 test test::up_test::test_deps_multi_extent_there_and_back_again ... ok
54962023-09-22T23:08:11.677ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
54972023-09-22T23:08:11.677ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 26560027-2102-4c25-b236-ba8526b40216
54982023-09-22T23:08:11.677ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 1a594dd3-ee0f-4008-bb83-e7d1e631db83
5499 test test::up_test::test_deps_multi_extent_write ... ok
55002023-09-22T23:08:11.678ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55012023-09-22T23:08:11.678ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 535cbc81-fc9b-4386-aa56-07ab9dc76033
55022023-09-22T23:08:11.678ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 1f47685e-52c8-4c03-9915-fcf8c7dbe1c2
5503 test test::up_test::test_deps_multiple_reads_depend_on_write ... ok
55042023-09-22T23:08:11.679ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55052023-09-22T23:08:11.679ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 8f651a7a-cf1f-4936-bc29-f132695fbc42
55062023-09-22T23:08:11.679ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 6fabe596-c1dc-4a19-b4db-97108722e481
5507 test test::up_test::test_deps_read_depends_on_flush ... ok
55082023-09-22T23:08:11.679ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55092023-09-22T23:08:11.679ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 32236327-9fd5-44b2-9553-13aa1be1d568
55102023-09-22T23:08:11.679ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 8842029d-0daf-43ea-abd6-cafe80f8071a
5511 test test::up_test::test_deps_read_depends_on_write ... ok
55122023-09-22T23:08:11.680ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55132023-09-22T23:08:11.680ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: b2a26172-2a8e-462f-88bd-d44f36402cc3
55142023-09-22T23:08:11.680ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: d5162543-b365-4a8a-83a1-a4a2614001f0
5515 test test::up_test::test_deps_read_no_depend_on_read ... ok
55162023-09-22T23:08:11.681ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55172023-09-22T23:08:11.681ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: e0e730af-125e-4891-bcce-67baa3f14a83
55182023-09-22T23:08:11.681ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: a4a0763d-5745-40c2-9fea-748308d5166b
5519 test test::up_test::test_deps_read_write_batman ... ok
55202023-09-22T23:08:11.682ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55212023-09-22T23:08:11.682ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: e14a6fd5-813d-41c9-a19f-edd31c25b3a9
55222023-09-22T23:08:11.682ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: dc2df2a1-ca4d-41d5-8862-d9ed27e1a066
5523 test test::up_test::test_deps_read_write_ladder_1 ... ok
55242023-09-22T23:08:11.682ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55252023-09-22T23:08:11.682ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: c463f55f-b32e-4ced-b75b-0d61ec26249b
55262023-09-22T23:08:11.682ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: c8e7706b-8929-4422-9a66-1b3c76a30e88
5527 test test::up_test::test_deps_read_write_ladder_2 ... ok
55282023-09-22T23:08:11.683ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55292023-09-22T23:08:11.683ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 362907f2-5304-4253-81c6-faf16908d778
55302023-09-22T23:08:11.683ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 6248b885-469f-46ea-9f4e-d04639c16a5b
5531 test test::up_test::test_deps_read_write_ladder_3 ... ok
55322023-09-22T23:08:11.684ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55332023-09-22T23:08:11.684ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 0ab7ae9e-fa1a-4dd5-8eee-e23a0b9c2538
55342023-09-22T23:08:11.684ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 881418b6-b76c-41c8-afad-38189754b350
5535 test test::up_test::test_deps_write_unwrittens_depend_on_read ... ok
55362023-09-22T23:08:11.685ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55372023-09-22T23:08:11.685ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 44a942e4-1b72-40e3-8c58-dfcb639b2a56
55382023-09-22T23:08:11.685ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 485f8d12-ff26-4977-9b18-4a354c1cfe02
5539 test test::up_test::test_deps_writes_depend_on_overlapping_writes ... ok
55402023-09-22T23:08:11.685ZINFOcrucible: Now move the NoOp job forward
55412023-09-22T23:08:11.685ZINFOcrucible: Now ACK the NoOp job
55422023-09-22T23:08:11.685ZINFOcrucible: Finally, move the ReOpen job forward
55432023-09-22T23:08:11.685ZINFOcrucible: Now ACK the Reopen job
5544 "msg{":""msg":"Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000"Extent 0 close id:1002 Failed: Error: bad","v,"":v"0:,"0name":,""cruciblename"":,""crucible"level":,"50level":30,"time":"2023-09-22T23:08:11.685841971Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
5545 {,""time":msg"":"2023-09-22T23:08:11.685844035Z",RE:0 Wait for result from reopen command 1003:4"","hostname"v":":0,"name":ip-10-150-1-74.us-west-2.compute.internal"","crucible"pid,"":level"4291:30}
5546 ,"time":"2023-09-22T23:08:11.685883654Z","hostname{":"ip-10-150-1-74.us-west-2.compute.internal",""pid":msg4291":"}
5547 Crucible 00000000-0000-0000-0000-000000000000 has session id: c2ce6ac0-24a2-47bc-9b90-3a3ca5271ef3","v":0{,"name":""crucible"msg":,""level":30Extent 0 close id:1003 Failed: Error: bad","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:08:11.685920643Z",",time":""hostname":"2023-09-22T23:08:11.685926926Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4291ip-10-150-1-74.us-west-2.compute.internal","pid"}:
5548 4291}
5549 {"{msg":""msg":"RE:0 Bailing with error"00000000-0000-0000-0000-000000000000 is now active with session: e5b04902-9ab3-41e0-9229-d2240b1e580f,""v",:"0v",:"0name",:""name":"crucible"crucible",","level"level"::4030,"time":"2023-09-22T23:08:11.685984773Z",",time"":"hostname":"2023-09-22T23:08:11.68598527Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",":pid"":4291ip-10-150-1-74.us-west-2.compute.internal"},
5550 "pid":4291}
5551 test test::up_test::test_deps_writes_depend_on_overlapping_writes_and_flushes ... ok
55522023-09-22T23:08:11.686ZINFOcrucible: Crucible stats registered with UUID: c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e
55532023-09-22T23:08:11.686ZINFOcrucible: Crucible c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e has session id: f0f8df2c-4bdf-4ea7-a801-fb139630ee82
55542023-09-22T23:08:11.686ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) New New New ds_transition to WaitActive
55552023-09-22T23:08:11.686ZINFOcrucible: [0] Transition from New to WaitActive
55562023-09-22T23:08:11.686ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) WaitActive New New ds_transition to WaitQuorum
55572023-09-22T23:08:11.686ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
55582023-09-22T23:08:11.686ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) WaitQuorum New New ds_transition to Active
5559 {"msg":"[0] Transition from WaitQuorum to Active","v":0,"name":"crucible","level":30,"{time":"2023-09-22T23:08:11.686742799Z","hostname":""msgip-10-150-1-74.us-west-2.compute.internal"",:""pid":4291}
5560 Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000","v{":0,""name"msg"::""crucible","level":30[1] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:08:11.686789248Z"time":,""hostname":"2023-09-22T23:08:11.686782543Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
5561 "pid":4291{}
5562 "msg":"[1] Transition from New to WaitActive"{,"v":0,""name"msg"::""crucible","level":30Crucible 00000000-0000-0000-0000-000000000000 has session id: dd2e573b-5f55-4d37-ac90-b4479734e66a","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.68684055Z","hostname":","time"ip-10-150-1-74.us-west-2.compute.internal:"","pid":42912023-09-22T23:08:11.686848147Z",}"
5563 hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291"msg"}:
5564 "{"msg":"[1] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active WaitActive New ds_transition to WaitQuorum","v":0,"name":00000000-0000-0000-0000-000000000000 is now active with session: a9e06fa5-3978-4b13-a30e-436871db1b0a"","cruciblev"":0,,""level"name"::"30crucible","level":30,"time":"2023-09-22T23:08:11.686900134Z",,""time"hostname":":"2023-09-22T23:08:11.686897953Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid":":4291}
5565 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
55662023-09-22T23:08:11.686ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
55672023-09-22T23:08:11.687ZINFOcrucible: [1] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active WaitQuorum New ds_transition to Active
55682023-09-22T23:08:11.687ZINFOcrucible: [1] Transition from WaitQuorum to Active
55692023-09-22T23:08:11.687ZINFOcrucible: [2] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active Active New ds_transition to WaitActive
55702023-09-22T23:08:11.687ZINFOcrucible: [2] Transition from New to WaitActive
55712023-09-22T23:08:11.687ZINFOcrucible: [2] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active Active WaitActive ds_transition to WaitQuorum
55722023-09-22T23:08:11.687ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
55732023-09-22T23:08:11.687ZINFOcrucible: [2] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active Active WaitQuorum ds_transition to Active
55742023-09-22T23:08:11.687ZINFOcrucible: [2] Transition from WaitQuorum to Active
55752023-09-22T23:08:11.687ZINFOcrucible: c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e is now active with session: c90c1fcd-3603-46fd-b4f7-87867e35920a
55762023-09-22T23:08:11.687ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Active Active Active ds_transition to Faulted
5577 test test::up_test::test_deps_writes_depend_on_overlapping_writes_chain ... {ok"
5578 msg":"[0] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:11.687365644Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
55792023-09-22T23:08:11.687ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) Faulted Active Active ds_transition to LiveRepairReady
55802023-09-22T23:08:11.687ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
55812023-09-22T23:08:11.687ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) LiveRepairReady Active Active ds_transition to LiveRepair
55822023-09-22T23:08:11.687ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
55832023-09-22T23:08:11.687ZINFOcrucible: Waiting for Close + ReOpen jobs
55842023-09-22T23:08:11.687ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
55852023-09-22T23:08:11.687ZINFOcrucible: RE:0 close id:1000 queued, notify DS
55862023-09-22T23:08:11.687ZINFOcrucible: RE:0 Wait for result from close command 1000:1
55872023-09-22T23:08:11.687ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55882023-09-22T23:08:11.687ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 3d043886-bbe0-4ac8-a93a-9043cf7c1b25
55892023-09-22T23:08:11.687ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 60ccb8e7-7dbb-42d8-bef7-a0f8f8a16283
5590 test test::up_test::test_deps_writes_depend_on_read ... ok
5591 test test::up_test::test_flush_does_not_consume_bw ... ok
5592 test test::up_test::test_flush_does_not_consume_iops ... ok
5593 test test::up_test::test_impossible_io ... ok
5594 test test::up_test::test_iop_and_bw_limit ... ok
5595 test test::up_test::test_iospan ... ok
5596 test test::up_test::test_iospan_buffer_read_write ... ok
5597 test test::up_test::test_no_iop_limit ... ok
55982023-09-22T23:08:11.738ZINFOcrucible: Crucible stats registered with UUID: 00000000-0000-0000-0000-000000000000
55992023-09-22T23:08:11.738ZINFOcrucible: Crucible 00000000-0000-0000-0000-000000000000 has session id: 512cafe5-b904-40b2-acda-3b38200b25bd
56002023-09-22T23:08:11.738ZINFOcrucible: 00000000-0000-0000-0000-000000000000 is now active with session: 4532d3f5-d6bd-4758-b8e8-875e64f31f49
5601 test test::up_test::test_read_flush_write_hash_mismatch ... ok
5602 test test::up_test::test_set_bw_limit ... ok
5603 test test::up_test::test_set_iop_limit ... ok
5604 test test::up_test::test_upstairs_encryption_context_ok ... ok
5605 test test::up_test::test_upstairs_encryption_context_wrong_nonce ... ok
5606 test test::up_test::test_upstairs_encryption_context_wrong_tag ... ok
5607 test test::up_test::test_upstairs_validate_encrypted_read_response ... ok
5608 test test::up_test::test_upstairs_validate_encrypted_read_response_blank_block ... ok
5609 test test::up_test::test_upstairs_validate_encrypted_read_response_multiple_contexts ... ok
5610 test test::up_test::test_upstairs_validate_unencrypted_read_response ... ok
5611 test test::up_test::test_upstairs_validate_unencrypted_read_response_blank_block ... ok
5612 test test::up_test::test_upstairs_validate_unencrypted_read_response_multiple_contexts ... ok
5613 test test::up_test::test_upstairs_validate_unencrypted_read_response_multiple_hashes ... ok
56142023-09-22T23:08:11.747ZINFOcrucible: Crucible stats registered with UUID: aff9dff6-ec62-4129-9120-a7e33dd182ad
56152023-09-22T23:08:11.747ZINFOcrucible: Crucible aff9dff6-ec62-4129-9120-a7e33dd182ad has session id: 6d3aa3c0-7944-4585-accc-c3aa0d2e66fb
56162023-09-22T23:08:11.747ZINFOcrucible: [0] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) New New New ds_transition to WaitActive
56172023-09-22T23:08:11.747ZINFOcrucible: [0] Transition from New to WaitActive
56182023-09-22T23:08:11.747ZINFOcrucible: [0] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) WaitActive New New ds_transition to WaitQuorum
56192023-09-22T23:08:11.747ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
56202023-09-22T23:08:11.747ZINFOcrucible: [0] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) WaitQuorum New New ds_transition to Active
56212023-09-22T23:08:11.747ZINFOcrucible: [0] Transition from WaitQuorum to Active
56222023-09-22T23:08:11.747ZINFOcrucible: [1] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active New New ds_transition to WaitActive
56232023-09-22T23:08:11.747ZINFOcrucible: [1] Transition from New to WaitActive
56242023-09-22T23:08:11.747ZINFOcrucible: [1] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active WaitActive New ds_transition to WaitQuorum
56252023-09-22T23:08:11.747ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
56262023-09-22T23:08:11.747ZINFOcrucible: [1] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active WaitQuorum New ds_transition to Active
56272023-09-22T23:08:11.747ZINFOcrucible: [1] Transition from WaitQuorum to Active
56282023-09-22T23:08:11.747ZINFOcrucible: [2] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active Active New ds_transition to WaitActive
56292023-09-22T23:08:11.747ZINFOcrucible: [2] Transition from New to WaitActive
56302023-09-22T23:08:11.747ZINFOcrucible: [2] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active Active WaitActive ds_transition to WaitQuorum
56312023-09-22T23:08:11.747ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
56322023-09-22T23:08:11.747ZINFOcrucible: [2] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active Active WaitQuorum ds_transition to Active
56332023-09-22T23:08:11.747ZINFOcrucible: [2] Transition from WaitQuorum to Active
56342023-09-22T23:08:11.747ZINFOcrucible: aff9dff6-ec62-4129-9120-a7e33dd182ad is now active with session: 21a6bd28-e2c9-430e-b5fc-24e458ce8b1e
56352023-09-22T23:08:11.747ZINFOcrucible: [0] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Active Active Active ds_transition to Faulted
56362023-09-22T23:08:11.747ZINFOcrucible: [0] Transition from Active to Faulted
56372023-09-22T23:08:11.747ZINFOcrucible: [1] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Faulted Active Active ds_transition to Faulted
56382023-09-22T23:08:11.747ZINFOcrucible: [1] Transition from Active to Faulted
56392023-09-22T23:08:11.747ZINFOcrucible: [2] aff9dff6-ec62-4129-9120-a7e33dd182ad (21a6bd28-e2c9-430e-b5fc-24e458ce8b1e) Faulted Faulted Active ds_transition to Faulted
56402023-09-22T23:08:11.747ZINFOcrucible: [2] Transition from Active to Faulted
56412023-09-22T23:08:11.747ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
56422023-09-22T23:08:11.747ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5643 test test::up_test::three_faulted_enqueue_will_handle_flush ... ok
56442023-09-22T23:08:11.748ZINFOcrucible: Crucible stats registered with UUID: c48fedac-9929-4766-93c2-f75a1bb45a29
56452023-09-22T23:08:11.748ZINFOcrucible: Crucible c48fedac-9929-4766-93c2-f75a1bb45a29 has session id: 8a456627-c0ec-44bd-b09a-b433e5213d9f
56462023-09-22T23:08:11.748ZINFOcrucible: [0] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) New New New ds_transition to WaitActive
56472023-09-22T23:08:11.748ZINFOcrucible: [0] Transition from New to WaitActive
56482023-09-22T23:08:11.748ZINFOcrucible: [0] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) WaitActive New New ds_transition to WaitQuorum
56492023-09-22T23:08:11.748ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
56502023-09-22T23:08:11.748ZINFOcrucible: [0] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) WaitQuorum New New ds_transition to Active
56512023-09-22T23:08:11.748ZINFOcrucible: [0] Transition from WaitQuorum to Active
56522023-09-22T23:08:11.748ZINFOcrucible: [1] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active New New ds_transition to WaitActive
56532023-09-22T23:08:11.748ZINFOcrucible: [1] Transition from New to WaitActive
56542023-09-22T23:08:11.748ZINFOcrucible: [1] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active WaitActive New ds_transition to WaitQuorum
56552023-09-22T23:08:11.748ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
56562023-09-22T23:08:11.748ZINFOcrucible: [1] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active WaitQuorum New ds_transition to Active
56572023-09-22T23:08:11.748ZINFOcrucible: [1] Transition from WaitQuorum to Active
56582023-09-22T23:08:11.748ZINFOcrucible: [2] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active Active New ds_transition to WaitActive
56592023-09-22T23:08:11.748ZINFOcrucible: [2] Transition from New to WaitActive
56602023-09-22T23:08:11.748ZINFOcrucible: [2] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active Active WaitActive ds_transition to WaitQuorum
56612023-09-22T23:08:11.748ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
56622023-09-22T23:08:11.748ZINFOcrucible: [2] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active Active WaitQuorum ds_transition to Active
56632023-09-22T23:08:11.748ZINFOcrucible: [2] Transition from WaitQuorum to Active
56642023-09-22T23:08:11.748ZINFOcrucible: c48fedac-9929-4766-93c2-f75a1bb45a29 is now active with session: a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a
56652023-09-22T23:08:11.748ZINFOcrucible: [0] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Active Active Active ds_transition to Faulted
56662023-09-22T23:08:11.748ZINFOcrucible: [0] Transition from Active to Faulted
56672023-09-22T23:08:11.748ZINFOcrucible: [1] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Faulted Active Active ds_transition to Faulted
56682023-09-22T23:08:11.749ZINFOcrucible: [1] Transition from Active to Faulted
56692023-09-22T23:08:11.749ZINFOcrucible: [2] c48fedac-9929-4766-93c2-f75a1bb45a29 (a35ce5f9-6cf5-4fe6-a11d-68cc05477a1a) Faulted Faulted Active ds_transition to Faulted
56702023-09-22T23:08:11.749ZINFOcrucible: [2] Transition from Active to Faulted
56712023-09-22T23:08:11.749ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
56722023-09-22T23:08:11.749ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
56732023-09-22T23:08:11.749ZWARNcrucible: job 1001 skipped on all downstairs = downstairs
56742023-09-22T23:08:11.749ZINFOcrucible: Enqueue job 1001 goes straight to AckReady = downstairs
56752023-09-22T23:08:11.749ZWARNcrucible: job 1002 skipped on all downstairs = downstairs
56762023-09-22T23:08:11.749ZINFOcrucible: Enqueue job 1002 goes straight to AckReady = downstairs
5677 test test::up_test::three_faulted_enqueue_will_handle_many_ios ... ok
56782023-09-22T23:08:11.749ZINFOcrucible: Crucible stats registered with UUID: cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8
56792023-09-22T23:08:11.749ZINFOcrucible: Crucible cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 has session id: a3ae0350-4779-47cc-aadc-1326e13058a7
56802023-09-22T23:08:11.749ZINFOcrucible: [0] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) New New New ds_transition to WaitActive
56812023-09-22T23:08:11.749ZINFOcrucible: [0] Transition from New to WaitActive
56822023-09-22T23:08:11.749ZINFOcrucible: [0] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) WaitActive New New ds_transition to WaitQuorum
56832023-09-22T23:08:11.749ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
56842023-09-22T23:08:11.749ZINFOcrucible: [0] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) WaitQuorum New New ds_transition to Active
56852023-09-22T23:08:11.749ZINFOcrucible: [0] Transition from WaitQuorum to Active
56862023-09-22T23:08:11.749ZINFOcrucible: [1] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active New New ds_transition to WaitActive
56872023-09-22T23:08:11.749ZINFOcrucible: [1] Transition from New to WaitActive
56882023-09-22T23:08:11.749ZINFOcrucible: [1] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active WaitActive New ds_transition to WaitQuorum
56892023-09-22T23:08:11.749ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
56902023-09-22T23:08:11.749ZINFOcrucible: [1] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active WaitQuorum New ds_transition to Active
56912023-09-22T23:08:11.750ZINFOcrucible: [1] Transition from WaitQuorum to Active
56922023-09-22T23:08:11.750ZINFOcrucible: [2] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active Active New ds_transition to WaitActive
56932023-09-22T23:08:11.750ZINFOcrucible: [2] Transition from New to WaitActive
56942023-09-22T23:08:11.750ZINFOcrucible: [2] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active Active WaitActive ds_transition to WaitQuorum
56952023-09-22T23:08:11.750ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
56962023-09-22T23:08:11.750ZINFOcrucible: [2] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active Active WaitQuorum ds_transition to Active
56972023-09-22T23:08:11.750ZINFOcrucible: [2] Transition from WaitQuorum to Active
56982023-09-22T23:08:11.750ZINFOcrucible: cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 is now active with session: 47f7b332-dccf-4cd7-a278-f9f25a6aa948
56992023-09-22T23:08:11.750ZINFOcrucible: [0] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Active Active Active ds_transition to Faulted
57002023-09-22T23:08:11.750ZINFOcrucible: [0] Transition from Active to Faulted
57012023-09-22T23:08:11.750ZINFOcrucible: [1] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Faulted Active Active ds_transition to Faulted
57022023-09-22T23:08:11.750ZINFOcrucible: [1] Transition from Active to Faulted
57032023-09-22T23:08:11.750ZINFOcrucible: [2] cc0b1b6d-0d14-49fe-8849-9ff7b600a9b8 (47f7b332-dccf-4cd7-a278-f9f25a6aa948) Faulted Faulted Active ds_transition to Faulted
57042023-09-22T23:08:11.750ZINFOcrucible: [2] Transition from Active to Faulted
57052023-09-22T23:08:11.750ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
57062023-09-22T23:08:11.750ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5707 test test::up_test::three_faulted_enqueue_will_handle_read ... ok
57082023-09-22T23:08:11.750ZINFOcrucible: Crucible stats registered with UUID: 140318df-e512-46f0-b064-327b8930ef8f
57092023-09-22T23:08:11.750ZINFOcrucible: Crucible 140318df-e512-46f0-b064-327b8930ef8f has session id: e343a590-4abc-41bd-bf7e-2a97e8a090f3
57102023-09-22T23:08:11.750ZINFOcrucible: [0] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) New New New ds_transition to WaitActive
57112023-09-22T23:08:11.750ZINFOcrucible: [0] Transition from New to WaitActive
57122023-09-22T23:08:11.750ZINFOcrucible: [0] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) WaitActive New New ds_transition to WaitQuorum
57132023-09-22T23:08:11.750ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57142023-09-22T23:08:11.750ZINFOcrucible: [0] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) WaitQuorum New New ds_transition to Active
57152023-09-22T23:08:11.750ZINFOcrucible: [0] Transition from WaitQuorum to Active
57162023-09-22T23:08:11.751ZINFOcrucible: [1] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active New New ds_transition to WaitActive
57172023-09-22T23:08:11.751ZINFOcrucible: [1] Transition from New to WaitActive
57182023-09-22T23:08:11.751ZINFOcrucible: [1] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active WaitActive New ds_transition to WaitQuorum
57192023-09-22T23:08:11.751ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
57202023-09-22T23:08:11.751ZINFOcrucible: [1] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active WaitQuorum New ds_transition to Active
57212023-09-22T23:08:11.751ZINFOcrucible: [1] Transition from WaitQuorum to Active
57222023-09-22T23:08:11.751ZINFOcrucible: [2] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active Active New ds_transition to WaitActive
57232023-09-22T23:08:11.751ZINFOcrucible: [2] Transition from New to WaitActive
57242023-09-22T23:08:11.751ZINFOcrucible: [2] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active Active WaitActive ds_transition to WaitQuorum
57252023-09-22T23:08:11.751ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
57262023-09-22T23:08:11.751ZINFOcrucible: [2] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active Active WaitQuorum ds_transition to Active
57272023-09-22T23:08:11.751ZINFOcrucible: [2] Transition from WaitQuorum to Active
57282023-09-22T23:08:11.751ZINFOcrucible: 140318df-e512-46f0-b064-327b8930ef8f is now active with session: 34028cb9-05d2-4203-a645-177faee3d76c
57292023-09-22T23:08:11.751ZINFOcrucible: [0] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Active Active Active ds_transition to Faulted
57302023-09-22T23:08:11.751ZINFOcrucible: [0] Transition from Active to Faulted
57312023-09-22T23:08:11.751ZINFOcrucible: [1] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Faulted Active Active ds_transition to Faulted
57322023-09-22T23:08:11.751ZINFOcrucible: [1] Transition from Active to Faulted
57332023-09-22T23:08:11.751ZINFOcrucible: [2] 140318df-e512-46f0-b064-327b8930ef8f (34028cb9-05d2-4203-a645-177faee3d76c) Faulted Faulted Active ds_transition to Faulted
57342023-09-22T23:08:11.751ZINFOcrucible: [2] Transition from Active to Faulted
57352023-09-22T23:08:11.751ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
57362023-09-22T23:08:11.751ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
5737 test test::up_test::three_faulted_enqueue_will_handle_write ... ok
57382023-09-22T23:08:11.751ZINFOcrucible: Crucible stats registered with UUID: 6d77b913-92ca-428b-a400-ba4b35c9827c
57392023-09-22T23:08:11.751ZINFOcrucible: Crucible 6d77b913-92ca-428b-a400-ba4b35c9827c has session id: 74dcbbc2-b4c0-4251-8701-6c7657e2728f
57402023-09-22T23:08:11.751ZINFOcrucible: [0] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) New New New ds_transition to WaitActive
57412023-09-22T23:08:11.751ZINFOcrucible: [0] Transition from New to WaitActive
57422023-09-22T23:08:11.752ZINFOcrucible: [0] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) WaitActive New New ds_transition to WaitQuorum
57432023-09-22T23:08:11.752ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57442023-09-22T23:08:11.752ZINFOcrucible: [0] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) WaitQuorum New New ds_transition to Active
57452023-09-22T23:08:11.752ZINFOcrucible: [0] Transition from WaitQuorum to Active
57462023-09-22T23:08:11.752ZINFOcrucible: [1] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active New New ds_transition to WaitActive
57472023-09-22T23:08:11.752ZINFOcrucible: [1] Transition from New to WaitActive
57482023-09-22T23:08:11.752ZINFOcrucible: [1] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active WaitActive New ds_transition to WaitQuorum
57492023-09-22T23:08:11.752ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
57502023-09-22T23:08:11.752ZINFOcrucible: [1] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active WaitQuorum New ds_transition to Active
57512023-09-22T23:08:11.752ZINFOcrucible: [1] Transition from WaitQuorum to Active
57522023-09-22T23:08:11.752ZINFOcrucible: [2] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active Active New ds_transition to WaitActive
57532023-09-22T23:08:11.752ZINFOcrucible: [2] Transition from New to WaitActive
57542023-09-22T23:08:11.752ZINFOcrucible: [2] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active Active WaitActive ds_transition to WaitQuorum
57552023-09-22T23:08:11.752ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
57562023-09-22T23:08:11.752ZINFOcrucible: [2] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active Active WaitQuorum ds_transition to Active
57572023-09-22T23:08:11.752ZINFOcrucible: [2] Transition from WaitQuorum to Active
57582023-09-22T23:08:11.752ZINFOcrucible: 6d77b913-92ca-428b-a400-ba4b35c9827c is now active with session: 79976138-c5dd-4544-aaf9-3f7aa290bf7f
57592023-09-22T23:08:11.752ZINFOcrucible: [0] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Active Active Active ds_transition to Faulted
57602023-09-22T23:08:11.752ZINFOcrucible: [0] Transition from Active to Faulted
57612023-09-22T23:08:11.752ZINFOcrucible: [1] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Faulted Active Active ds_transition to Faulted
57622023-09-22T23:08:11.752ZINFOcrucible: [1] Transition from Active to Faulted
57632023-09-22T23:08:11.752ZINFOcrucible: [2] 6d77b913-92ca-428b-a400-ba4b35c9827c (79976138-c5dd-4544-aaf9-3f7aa290bf7f) Faulted Faulted Active ds_transition to Faulted
57642023-09-22T23:08:11.752ZINFOcrucible: [2] Transition from Active to Faulted
57652023-09-22T23:08:11.752ZWARNcrucible: job 1000 skipped on all downstairs = downstairs
57662023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1000 goes straight to AckReady = downstairs
57672023-09-22T23:08:11.752ZWARNcrucible: job 1001 skipped on all downstairs = downstairs
57682023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1001 goes straight to AckReady = downstairs
57692023-09-22T23:08:11.752ZWARNcrucible: job 1002 skipped on all downstairs = downstairs
57702023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1002 goes straight to AckReady = downstairs
57712023-09-22T23:08:11.752ZWARNcrucible: job 1003 skipped on all downstairs = downstairs
57722023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1003 goes straight to AckReady = downstairs
57732023-09-22T23:08:11.752ZWARNcrucible: job 1004 skipped on all downstairs = downstairs
57742023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1004 goes straight to AckReady = downstairs
57752023-09-22T23:08:11.752ZWARNcrucible: job 1005 skipped on all downstairs = downstairs
57762023-09-22T23:08:11.752ZINFOcrucible: Enqueue job 1005 goes straight to AckReady = downstairs
5777 test test::up_test::three_faulted_retire_skipped_some_leave_some ... ok
57782023-09-22T23:08:11.753ZINFOcrucible: Crucible stats registered with UUID: fc14036a-f2c9-4490-9298-18ffb7e63afc
57792023-09-22T23:08:11.753ZINFOcrucible: Crucible fc14036a-f2c9-4490-9298-18ffb7e63afc has session id: fb167c65-c87d-4e51-b020-2ba8c05435e0
57802023-09-22T23:08:11.753ZINFOcrucible: [0] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) New New New ds_transition to WaitActive
57812023-09-22T23:08:11.753ZINFOcrucible: [0] Transition from New to WaitActive
57822023-09-22T23:08:11.753ZINFOcrucible: [0] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) WaitActive New New ds_transition to WaitQuorum
57832023-09-22T23:08:11.753ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
57842023-09-22T23:08:11.753ZINFOcrucible: [0] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) WaitQuorum New New ds_transition to Active
57852023-09-22T23:08:11.753ZINFOcrucible: [0] Transition from WaitQuorum to Active
57862023-09-22T23:08:11.753ZINFOcrucible: [1] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active New New ds_transition to WaitActive
57872023-09-22T23:08:11.753ZINFOcrucible: [1] Transition from New to WaitActive
57882023-09-22T23:08:11.753ZINFOcrucible: [1] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active WaitActive New ds_transition to WaitQuorum
57892023-09-22T23:08:11.753ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
57902023-09-22T23:08:11.753ZINFOcrucible: [1] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active WaitQuorum New ds_transition to Active
57912023-09-22T23:08:11.753ZINFOcrucible: [1] Transition from WaitQuorum to Active
57922023-09-22T23:08:11.753ZINFOcrucible: [2] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active Active New ds_transition to WaitActive
57932023-09-22T23:08:11.753ZINFOcrucible: [2] Transition from New to WaitActive
57942023-09-22T23:08:11.753ZINFOcrucible: [2] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active Active WaitActive ds_transition to WaitQuorum
57952023-09-22T23:08:11.753ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
57962023-09-22T23:08:11.753ZINFOcrucible: [2] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active Active WaitQuorum ds_transition to Active
57972023-09-22T23:08:11.753ZINFOcrucible: [2] Transition from WaitQuorum to Active
57982023-09-22T23:08:11.753ZINFOcrucible: fc14036a-f2c9-4490-9298-18ffb7e63afc is now active with session: a61dd7d2-dd63-4e11-888a-505db3a9a408
57992023-09-22T23:08:11.753ZINFOcrucible: [0] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Active Active Active ds_transition to Faulted
58002023-09-22T23:08:11.753ZINFOcrucible: [0] Transition from Active to Faulted
58012023-09-22T23:08:11.753ZINFOcrucible: [2] fc14036a-f2c9-4490-9298-18ffb7e63afc (a61dd7d2-dd63-4e11-888a-505db3a9a408) Faulted Active Active ds_transition to Faulted
58022023-09-22T23:08:11.753ZINFOcrucible: [2] Transition from Active to Faulted
5803 test test::up_test::two_faulted_downstairs_can_still_read ... ok
58042023-09-22T23:08:11.754ZINFOcrucible: Crucible stats registered with UUID: 9e235c1f-09d1-4720-b751-3c1400b7944e
58052023-09-22T23:08:11.754ZINFOcrucible: Crucible 9e235c1f-09d1-4720-b751-3c1400b7944e has session id: 727a6b0e-52f2-451c-9060-bb324d36d8fd
58062023-09-22T23:08:11.754ZINFOcrucible: 9e235c1f-09d1-4720-b751-3c1400b7944e is now active with session: 9c7f28ef-46ce-4ea9-ad55-ea907e85426e
58072023-09-22T23:08:11.754ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58082023-09-22T23:08:11.754ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58092023-09-22T23:08:11.754ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58102023-09-22T23:08:11.754ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58112023-09-22T23:08:11.754ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58122023-09-22T23:08:11.754ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58132023-09-22T23:08:11.754ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58142023-09-22T23:08:11.754ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58152023-09-22T23:08:11.754ZERROcrucible: [0] 1001 read error GenericError("bad") DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58162023-09-22T23:08:11.754ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58172023-09-22T23:08:11.754ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
58182023-09-22T23:08:11.754ZERROcrucible: [1] 1001 read error GenericError("bad") DownstairsIO { ds_id: JobId(1001), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5819 test test::up_test::work_assert_reads_do_not_cause_failure_state_transition ... ok
58202023-09-22T23:08:11.755ZINFOcrucible: Crucible stats registered with UUID: 3d847606-c75c-46cf-82c9-8794a622434c
58212023-09-22T23:08:11.755ZINFOcrucible: Crucible 3d847606-c75c-46cf-82c9-8794a622434c has session id: 69bcabe1-4670-4eca-ae5c-d082c072d3d0
58222023-09-22T23:08:11.755ZINFOcrucible: 3d847606-c75c-46cf-82c9-8794a622434c is now active with session: 69524606-a143-4db3-8351-b92a36cf2321
58232023-09-22T23:08:11.755ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
5824 test test::up_test::work_completed_ack_read_replay ... ok
58252023-09-22T23:08:11.755ZINFOcrucible: Crucible stats registered with UUID: 32742f7c-515e-43dd-bdeb-51ff74c46398
58262023-09-22T23:08:11.755ZINFOcrucible: Crucible 32742f7c-515e-43dd-bdeb-51ff74c46398 has session id: 33d6b101-42d9-4730-8445-e576abff9839
58272023-09-22T23:08:11.755ZINFOcrucible: 32742f7c-515e-43dd-bdeb-51ff74c46398 is now active with session: f0daa040-168e-4f27-9d99-6e7a270e32bb
58282023-09-22T23:08:11.755ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
58292023-09-22T23:08:11.756ZINFOcrucible: REPLAY [0] read hash mismatch on id 1000 Expected [Some(48a5a7677a8e488)] Computed [Some(ea9ca750a094f609)] guest_id:10 request:[ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] job state:ClientData([Done, InProgress, InProgress]) = downstairs
5830 test test::up_test::work_completed_ack_read_replay_hash_mismatch ... ok
58312023-09-22T23:08:11.756ZINFOcrucible: Crucible stats registered with UUID: 487cc194-afc0-45e3-8c68-666b799132d5
58322023-09-22T23:08:11.756ZINFOcrucible: Crucible 487cc194-afc0-45e3-8c68-666b799132d5 has session id: 0a9c0c50-3be5-4fdc-91f5-aa6d365b9fc8
58332023-09-22T23:08:11.756ZINFOcrucible: 487cc194-afc0-45e3-8c68-666b799132d5 is now active with session: c2032703-82ac-4414-a09a-b23f7e88407b
58342023-09-22T23:08:11.756ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
58352023-09-22T23:08:11.756ZINFOcrucible: REPLAY [1] read hash mismatch on id 1000 Expected [Some(48a5a7677a8e488)] Computed [Some(ea9ca750a094f609)] guest_id:10 request:[ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] job state:ClientData([Done, Done, InProgress]) = downstairs
5836 test test::up_test::work_completed_ack_read_replay_two_hash_mismatch ... ok
58372023-09-22T23:08:11.757ZINFOcrucible: Crucible stats registered with UUID: d10943d5-f081-48a9-9755-5b69eff5a43a
58382023-09-22T23:08:11.757ZINFOcrucible: Crucible d10943d5-f081-48a9-9755-5b69eff5a43a has session id: 64671ce6-8a86-49b8-9f17-3184d1b12fd5
58392023-09-22T23:08:11.757ZINFOcrucible: d10943d5-f081-48a9-9755-5b69eff5a43a is now active with session: 5ef75342-87bc-420c-8b5d-c1f883e82c5a
5840 test test::up_test::work_completed_read_flush ... ok
58412023-09-22T23:08:11.757ZINFOcrucible: Crucible stats registered with UUID: 46e3d841-473a-4479-8f4b-80bab25e2dc9
58422023-09-22T23:08:11.757ZINFOcrucible: Crucible 46e3d841-473a-4479-8f4b-80bab25e2dc9 has session id: e4ad383b-1180-4835-91a0-3c842f91f24c
58432023-09-22T23:08:11.757ZINFOcrucible: 46e3d841-473a-4479-8f4b-80bab25e2dc9 is now active with session: fa8fa0f3-1bb3-4442-bf94-cadaad9d806e
58442023-09-22T23:08:11.757ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
58452023-09-22T23:08:11.758ZINFOcrucible: Remove read data for 1000 = downstairs
5846 test test::up_test::work_completed_read_replay ... ok
58472023-09-22T23:08:11.758ZINFOcrucible: Crucible stats registered with UUID: ca559093-9d7d-488b-a4ba-f958fb30775b
58482023-09-22T23:08:11.758ZINFOcrucible: Crucible ca559093-9d7d-488b-a4ba-f958fb30775b has session id: 0eca10ae-efc8-4813-8795-14dcc01bc5fc
58492023-09-22T23:08:11.758ZINFOcrucible: ca559093-9d7d-488b-a4ba-f958fb30775b is now active with session: a6a9f1ee-69a5-4b6e-bffb-ea3a79d6305a
58502023-09-22T23:08:11.758ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
58512023-09-22T23:08:11.758ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
58522023-09-22T23:08:11.758ZINFOcrucible: Remove read data for 1000 = downstairs
5853 test test::up_test::work_completed_two_read_replay ... ok
58542023-09-22T23:08:11.759ZINFOcrucible: Crucible stats registered with UUID: 8a7b82aa-7655-47d5-9cc5-a40ab98dd0f7
58552023-09-22T23:08:11.759ZINFOcrucible: Crucible 8a7b82aa-7655-47d5-9cc5-a40ab98dd0f7 has session id: 5888f378-0167-4ed0-87b6-b77df76ef320
58562023-09-22T23:08:11.759ZINFOcrucible: 8a7b82aa-7655-47d5-9cc5-a40ab98dd0f7 is now active with session: 9292b68c-d1fa-4d40-a83d-78aa9fe177c8
58572023-09-22T23:08:11.759ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
5858 test test::up_test::work_completed_write_ack_ready_replay_write ... ok
58592023-09-22T23:08:11.759ZINFOcrucible: Crucible stats registered with UUID: f50f25cc-03f7-4f76-8f7a-654e90068744
58602023-09-22T23:08:11.759ZINFOcrucible: Crucible f50f25cc-03f7-4f76-8f7a-654e90068744 has session id: c3637db0-52e8-4474-9a61-1055cd937119
58612023-09-22T23:08:11.759ZINFOcrucible: f50f25cc-03f7-4f76-8f7a-654e90068744 is now active with session: e026a9da-2ed3-46e3-b787-5cd82887fab3
58622023-09-22T23:08:11.759ZINFOcrucible: [1] client re-new 1 jobs since flush 0 = downstairs
58632023-09-22T23:08:11.759ZINFOcrucible: Remove AckReady for Wu/F 1000 = downstairs
5864 test test::up_test::work_completed_write_ack_ready_replay_write_unwritten ... ok
58652023-09-22T23:08:11.760ZINFOcrucible: Crucible stats registered with UUID: a240b570-e127-43c4-96a3-22778a1fa328
58662023-09-22T23:08:11.760ZINFOcrucible: Crucible a240b570-e127-43c4-96a3-22778a1fa328 has session id: 0069abb3-2e5f-4951-9912-c3d9208f3a26
58672023-09-22T23:08:11.760ZINFOcrucible: a240b570-e127-43c4-96a3-22778a1fa328 is now active with session: d6758c14-eeb5-41b0-9ed6-23e779b95294
58682023-09-22T23:08:11.760ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
5869 test test::up_test::work_completed_write_acked_replay_write ... ok
58702023-09-22T23:08:11.761ZINFOcrucible: Crucible stats registered with UUID: 967bffb3-d157-4029-a4b6-32ab3150ee8f
58712023-09-22T23:08:11.761ZINFOcrucible: Crucible 967bffb3-d157-4029-a4b6-32ab3150ee8f has session id: 5738ab77-8fc4-416f-b1e5-411f532b3563
58722023-09-22T23:08:11.761ZINFOcrucible: 967bffb3-d157-4029-a4b6-32ab3150ee8f is now active with session: 58ae5c4e-225e-4e3c-8714-e0261695db4b
58732023-09-22T23:08:11.761ZINFOcrucible: [0] client re-new 1 jobs since flush 0 = downstairs
5874 test test::up_test::work_completed_write_acked_replay_write_unwritten ... ok
58752023-09-22T23:08:11.761ZINFOcrucible: Crucible stats registered with UUID: f40934a3-fdee-44c9-9d71-a1d1d03fea19
58762023-09-22T23:08:11.761ZINFOcrucible: Crucible f40934a3-fdee-44c9-9d71-a1d1d03fea19 has session id: b73f194c-9d4d-4c8e-9865-c567c649fc38
58772023-09-22T23:08:11.761ZINFOcrucible: f40934a3-fdee-44c9-9d71-a1d1d03fea19 is now active with session: 8418f7e0-43db-406c-9f0b-702bdd2c897f
5878 test test::up_test::work_completed_write_flush ... ok
58792023-09-22T23:08:11.762ZINFOcrucible: Crucible stats registered with UUID: 7bd3b9d3-f60f-42a6-9eda-04d5a1f1c6f1
58802023-09-22T23:08:11.762ZINFOcrucible: Crucible 7bd3b9d3-f60f-42a6-9eda-04d5a1f1c6f1 has session id: b3d5524f-3e8f-4d47-b5bf-8ec35b91f9fa
58812023-09-22T23:08:11.762ZINFOcrucible: 7bd3b9d3-f60f-42a6-9eda-04d5a1f1c6f1 is now active with session: 51ee745f-e6b3-4b19-ab88-b71826522751
5882 test test::up_test::work_completed_write_unwritten_flush ... ok
58832023-09-22T23:08:11.763ZINFOcrucible: Crucible stats registered with UUID: 12461ba7-0953-4115-b9c7-f5f2db94a25f
58842023-09-22T23:08:11.763ZINFOcrucible: Crucible 12461ba7-0953-4115-b9c7-f5f2db94a25f has session id: 19c7c0e9-2722-4c7e-96bd-3e13d708aecb
58852023-09-22T23:08:11.763ZINFOcrucible: 12461ba7-0953-4115-b9c7-f5f2db94a25f is now active with session: f984c317-2afb-4ac5-a655-52c94b263a05
5886 test test::up_test::work_delay_completion_flush_order_write ... ok
58872023-09-22T23:08:11.763ZINFOcrucible: Crucible stats registered with UUID: 42f2d956-5bba-405a-8481-734c4465721c
58882023-09-22T23:08:11.763ZINFOcrucible: Crucible 42f2d956-5bba-405a-8481-734c4465721c has session id: b8d9ce3c-6c5e-4952-972d-b7ecfcaab302
58892023-09-22T23:08:11.763ZINFOcrucible: 42f2d956-5bba-405a-8481-734c4465721c is now active with session: 88ff3002-9740-423a-bbb2-f884972a2f8c
5890 test test::up_test::work_delay_completion_flush_order_write_unwritten ... ok
58912023-09-22T23:08:11.764ZINFOcrucible: Crucible stats registered with UUID: 95feca81-c3d9-4608-80e5-1033c39f45c4
58922023-09-22T23:08:11.764ZINFOcrucible: Crucible 95feca81-c3d9-4608-80e5-1033c39f45c4 has session id: bec7408f-7630-470d-b4a9-8c856037eab6
58932023-09-22T23:08:11.764ZINFOcrucible: 95feca81-c3d9-4608-80e5-1033c39f45c4 is now active with session: c07153a0-e57e-470b-b4b4-c8c88b42ee17
5894 test test::up_test::work_delay_completion_flush_write ... ok
58952023-09-22T23:08:11.765ZINFOcrucible: Crucible stats registered with UUID: 3321ed8d-989a-454d-a5f8-c78cc2732803
58962023-09-22T23:08:11.765ZINFOcrucible: Crucible 3321ed8d-989a-454d-a5f8-c78cc2732803 has session id: 8029d8c0-7998-4a28-88d6-16952816795a
58972023-09-22T23:08:11.765ZINFOcrucible: 3321ed8d-989a-454d-a5f8-c78cc2732803 is now active with session: 0657acee-3aa2-4b8f-9b44-83402e8dad84
5898 test test::up_test::work_delay_completion_flush_write_unwritten ... ok
58992023-09-22T23:08:11.766ZINFOcrucible: Crucible stats registered with UUID: 44b091f9-cc65-47df-9514-3b0c43cb2566
59002023-09-22T23:08:11.766ZINFOcrucible: Crucible 44b091f9-cc65-47df-9514-3b0c43cb2566 has session id: 41c2e9a0-1d6f-43bf-9419-719b43de250d
59012023-09-22T23:08:11.766ZINFOcrucible: 44b091f9-cc65-47df-9514-3b0c43cb2566 is now active with session: 19565874-d9c4-4abf-a855-def36b2b783a
59022023-09-22T23:08:11.766ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
59032023-09-22T23:08:11.766ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
5904 test test::up_test::work_flush_one_error_then_ok ... ok
59052023-09-22T23:08:11.766ZINFOcrucible: Crucible stats registered with UUID: bb5e3b9e-cec5-4a34-9509-2d2d03f676dc
59062023-09-22T23:08:11.766ZINFOcrucible: Crucible bb5e3b9e-cec5-4a34-9509-2d2d03f676dc has session id: 2b846f00-7bfb-4d28-8317-a7993b81af7a
59072023-09-22T23:08:11.766ZINFOcrucible: bb5e3b9e-cec5-4a34-9509-2d2d03f676dc is now active with session: a4a4cfcf-ceb0-4c0b-b665-b118949ef7d1
5908 test test::up_test::work_flush_snapshot_needs_three ... ok
59092023-09-22T23:08:11.767ZINFOcrucible: Crucible stats registered with UUID: bd0c05db-c33d-4701-aed7-cd3200156978
59102023-09-22T23:08:11.767ZINFOcrucible: Crucible bd0c05db-c33d-4701-aed7-cd3200156978 has session id: bceefb14-642d-4368-93e8-ad29c08fbc77
59112023-09-22T23:08:11.767ZINFOcrucible: bd0c05db-c33d-4701-aed7-cd3200156978 is now active with session: bc1ab4ca-8dfa-4006-bcbf-88eede5e3b42
5912 test test::up_test::work_flush_three_ok ... ok
59132023-09-22T23:08:11.767ZINFOcrucible: Crucible stats registered with UUID: e3168393-c2fa-4c5b-a918-146bdacd66ad
59142023-09-22T23:08:11.767ZINFOcrucible: Crucible e3168393-c2fa-4c5b-a918-146bdacd66ad has session id: d6df6f8e-c3a6-436f-b7ab-618ce9b17356
59152023-09-22T23:08:11.767ZINFOcrucible: e3168393-c2fa-4c5b-a918-146bdacd66ad is now active with session: dabdbf9f-c8e8-418c-b850-1c5c51575d75
59162023-09-22T23:08:11.767ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
59172023-09-22T23:08:11.768ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
59182023-09-22T23:08:11.768ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([Error(GenericError("bad")), Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
59192023-09-22T23:08:11.768ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 0, work: Flush { dependencies: [], flush_number: 10, gen_number: 0, snapshot_details: None, extent_limit: None }, state: ClientData([Error(GenericError("bad")), Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: Empty } = downstairs
5920 test test::up_test::work_flush_two_errors_equals_fail ... ok
59212023-09-22T23:08:11.768ZINFOcrucible: Crucible stats registered with UUID: 54379435-01b9-4102-9528-16f19bf3ab86
59222023-09-22T23:08:11.768ZINFOcrucible: Crucible 54379435-01b9-4102-9528-16f19bf3ab86 has session id: 8b86af86-7e60-46d0-bba4-f78f32e15b28
59232023-09-22T23:08:11.768ZINFOcrucible: 54379435-01b9-4102-9528-16f19bf3ab86 is now active with session: 0cd31d9d-58f5-4ad3-9b4f-107e13b35079
5924 test test::up_test::work_read_hash_mismatch ... ok
59252023-09-22T23:08:11.769ZINFOcrucible: Crucible stats registered with UUID: 684a8c0d-d58a-426e-b44f-af7a313a5e8d
59262023-09-22T23:08:11.769ZINFOcrucible: Crucible 684a8c0d-d58a-426e-b44f-af7a313a5e8d has session id: 968fe636-6cc3-4343-8452-40171183a36f
59272023-09-22T23:08:11.769ZINFOcrucible: 684a8c0d-d58a-426e-b44f-af7a313a5e8d is now active with session: 2e418330-6130-4c3f-bc56-73f902aa2b9f
5928 test test::up_test::work_read_hash_mismatch_ack ... ok
59292023-09-22T23:08:11.770ZINFOcrucible: Crucible stats registered with UUID: a8498796-f66a-43ec-9e57-6c73cf8382f7
59302023-09-22T23:08:11.770ZINFOcrucible: Crucible a8498796-f66a-43ec-9e57-6c73cf8382f7 has session id: 3380c477-6e82-43e8-b0fa-bf69ed3e8f1e
59312023-09-22T23:08:11.770ZINFOcrucible: a8498796-f66a-43ec-9e57-6c73cf8382f7 is now active with session: 90280377-8ca1-461d-b335-83e34b288efe
5932 test test::up_test::work_read_hash_mismatch_inside ... ok
59332023-09-22T23:08:11.770ZINFOcrucible: Crucible stats registered with UUID: 6499e0f3-38fd-4fde-bda2-e50b70ff0622
59342023-09-22T23:08:11.770ZINFOcrucible: Crucible 6499e0f3-38fd-4fde-bda2-e50b70ff0622 has session id: 5cb4d9a9-f270-4651-88b7-0d0d57976333
59352023-09-22T23:08:11.771ZINFOcrucible: 6499e0f3-38fd-4fde-bda2-e50b70ff0622 is now active with session: 22793feb-c68c-4a5f-9f86-ba4e929afb5f
5936 test test::up_test::work_read_hash_mismatch_no_data ... ok
59372023-09-22T23:08:11.771ZINFOcrucible: Crucible stats registered with UUID: cc74aa76-3419-4349-a09c-27ed06d1c01f
59382023-09-22T23:08:11.771ZINFOcrucible: Crucible cc74aa76-3419-4349-a09c-27ed06d1c01f has session id: fb7aefd3-8ae7-4f7f-8cf3-6b410a31d9e3
59392023-09-22T23:08:11.771ZINFOcrucible: cc74aa76-3419-4349-a09c-27ed06d1c01f is now active with session: e2f85476-f322-4b08-84d2-094d38df2f08
5940 test test::up_test::work_read_hash_mismatch_no_data_next ... ok
5941 test test::up_test::work_read_hash_mismatch_third ... ok
59422023-09-22T23:08:11.772ZINFOcrucible: Crucible stats registered with UUID: 774e44c7-f9d0-4ab2-bc7b-d62c3b99fd89
59432023-09-22T23:08:11.773ZINFOcrucible: Crucible 774e44c7-f9d0-4ab2-bc7b-d62c3b99fd89 has session id: 5a867bc5-f945-47b1-83c3-65528d4c5967
59442023-09-22T23:08:11.773ZINFOcrucible: 774e44c7-f9d0-4ab2-bc7b-d62c3b99fd89 is now active with session: f1d0da1e-baf4-4ea1-a759-9524dae41f33
5945 test test::up_test::work_read_hash_mismatch_third_ack ... ok
59462023-09-22T23:08:11.773ZINFOcrucible: Crucible stats registered with UUID: 9408d4f5-4f72-4231-be2d-7992e1d0655a
59472023-09-22T23:08:11.773ZINFOcrucible: Crucible 9408d4f5-4f72-4231-be2d-7992e1d0655a has session id: d3ebd7d2-d13d-4651-847a-54fa059af30f
59482023-09-22T23:08:11.773ZINFOcrucible: 9408d4f5-4f72-4231-be2d-7992e1d0655a is now active with session: 7b635bfa-33f1-424f-bbe3-6c11912fa3b9
59492023-09-22T23:08:11.773ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59502023-09-22T23:08:11.773ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59512023-09-22T23:08:11.773ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5952 test test::up_test::work_read_one_bad_two_ok ... ok
59532023-09-22T23:08:11.774ZINFOcrucible: Crucible stats registered with UUID: d11acf42-c71d-4172-8ff1-ebfff311ac98
59542023-09-22T23:08:11.774ZINFOcrucible: Crucible d11acf42-c71d-4172-8ff1-ebfff311ac98 has session id: 353d62dc-25aa-4615-be96-5097c15d0000
59552023-09-22T23:08:11.774ZINFOcrucible: d11acf42-c71d-4172-8ff1-ebfff311ac98 is now active with session: 00811f44-4b83-435a-aab7-eb3086b41cb9
5956 test test::up_test::work_read_one_ok ... ok
59572023-09-22T23:08:11.775ZINFOcrucible: Crucible stats registered with UUID: 867579c1-1c7c-4587-b352-2937db1508c1
59582023-09-22T23:08:11.775ZINFOcrucible: Crucible 867579c1-1c7c-4587-b352-2937db1508c1 has session id: 0aeda27e-f9ab-4818-b5b5-bfc9df18ab4e
59592023-09-22T23:08:11.775ZINFOcrucible: 867579c1-1c7c-4587-b352-2937db1508c1 is now active with session: 28880833-7f1f-47c0-a7db-be972653d01c
59602023-09-22T23:08:11.775ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59612023-09-22T23:08:11.775ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59622023-09-22T23:08:11.775ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59632023-09-22T23:08:11.775ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59642023-09-22T23:08:11.775ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59652023-09-22T23:08:11.775ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59662023-09-22T23:08:11.775ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59672023-09-22T23:08:11.775ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59682023-09-22T23:08:11.775ZERROcrucible: [2] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), Error(GenericError("bad"))]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5969 test test::up_test::work_read_three_bad ... ok
59702023-09-22T23:08:11.776ZINFOcrucible: Crucible stats registered with UUID: 9149a4b9-99e7-4272-8e5a-a8e1d4a70d6b
59712023-09-22T23:08:11.776ZINFOcrucible: Crucible 9149a4b9-99e7-4272-8e5a-a8e1d4a70d6b has session id: a53f09aa-31e4-4f9a-9ef7-c5c2a9fe2f95
59722023-09-22T23:08:11.776ZINFOcrucible: 9149a4b9-99e7-4272-8e5a-a8e1d4a70d6b is now active with session: 7f1b7fd2-3cc2-4a18-a4ac-a2f025f22f53
59732023-09-22T23:08:11.776ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59742023-09-22T23:08:11.776ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59752023-09-22T23:08:11.776ZERROcrucible: [0] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59762023-09-22T23:08:11.776ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59772023-09-22T23:08:11.776ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59782023-09-22T23:08:11.776ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5979 test test::up_test::work_read_two_bad_one_ok ... ok
59802023-09-22T23:08:11.776ZINFOcrucible: Crucible stats registered with UUID: 9de103ab-4de1-4141-b9e2-0d4ad7cdf391
59812023-09-22T23:08:11.776ZINFOcrucible: Crucible 9de103ab-4de1-4141-b9e2-0d4ad7cdf391 has session id: 0bfe2bc3-2226-46ee-b197-26f5feee2d43
59822023-09-22T23:08:11.776ZINFOcrucible: 9de103ab-4de1-4141-b9e2-0d4ad7cdf391 is now active with session: 843e1bca-d671-479d-9c98-5719d9da9ec1
59832023-09-22T23:08:11.776ZWARNcrucible: [2] 9de103ab-4de1-4141-b9e2-0d4ad7cdf391 WARNING finish job 1000 when downstairs state:New
59842023-09-22T23:08:11.776ZWARNcrucible: [0] 9de103ab-4de1-4141-b9e2-0d4ad7cdf391 WARNING finish job 1000 when downstairs state:New
59852023-09-22T23:08:11.776ZWARNcrucible: [1] 9de103ab-4de1-4141-b9e2-0d4ad7cdf391 WARNING finish job 1000 when downstairs state:New
59862023-09-22T23:08:11.776ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, InProgress, Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59872023-09-22T23:08:11.777ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, InProgress, Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59882023-09-22T23:08:11.777ZERROcrucible: [1] 1000 read error GenericError("bad") DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Read { dependencies: [], requests: [ReadRequest { eid: 0, offset: Block { value: 7, shift: 9 } }] }, state: ClientData([Done, Error(GenericError("bad")), Done]), ack_status: Acked, replay: false, data: Some([ReadResponse { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"", block_contexts: [BlockContext { hash: 17241709254077376921, encryption_context: None }] }]), read_response_hashes: [Some(17241709254077376921)], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5989 test test::up_test::work_read_two_ok_one_bad ... ok
59902023-09-22T23:08:11.777ZINFOcrucible: Crucible stats registered with UUID: 388dc96b-446c-4f84-aa2e-4a349a17bdab
59912023-09-22T23:08:11.777ZINFOcrucible: Crucible 388dc96b-446c-4f84-aa2e-4a349a17bdab has session id: 455f5f55-8830-4dab-8a60-cb7a980aa3c8
59922023-09-22T23:08:11.777ZINFOcrucible: 388dc96b-446c-4f84-aa2e-4a349a17bdab is now active with session: 1ca4a2c2-3555-4d83-acfc-2381bc5f10ae
59932023-09-22T23:08:11.777ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59942023-09-22T23:08:11.777ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59952023-09-22T23:08:11.777ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
59962023-09-22T23:08:11.777ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
5997 test test::up_test::work_write_errors_are_counted ... ok
59982023-09-22T23:08:11.778ZINFOcrucible: Crucible stats registered with UUID: 584d6086-2bd6-4a47-9661-162273a4cbd8
59992023-09-22T23:08:11.778ZINFOcrucible: Crucible 584d6086-2bd6-4a47-9661-162273a4cbd8 has session id: f720aa8e-a1f0-4c1e-8107-2d9e04c358ed
60002023-09-22T23:08:11.778ZINFOcrucible: 584d6086-2bd6-4a47-9661-162273a4cbd8 is now active with session: daeed3aa-3720-4ae9-8698-570801ddc87d
60012023-09-22T23:08:11.778ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60022023-09-22T23:08:11.778ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60032023-09-22T23:08:11.778ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60042023-09-22T23:08:11.778ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
6005 test test::up_test::work_write_unwritten_errors_are_counted ... ok
60062023-09-22T23:08:11.779ZINFOcrucible: Crucible stats registered with UUID: c3324075-9a8b-48e2-b877-1a956a371051
60072023-09-22T23:08:11.779ZINFOcrucible: Crucible c3324075-9a8b-48e2-b877-1a956a371051 has session id: 2ec2225d-4d74-4b4d-ab98-ec5e43d1bb0d
60082023-09-22T23:08:11.779ZINFOcrucible: [0] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) New New New ds_transition to WaitActive
60092023-09-22T23:08:11.779ZINFOcrucible: [0] Transition from New to WaitActive
60102023-09-22T23:08:11.779ZINFOcrucible: [0] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) WaitActive New New ds_transition to WaitQuorum
60112023-09-22T23:08:11.779ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
60122023-09-22T23:08:11.779ZINFOcrucible: [0] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) WaitQuorum New New ds_transition to Active
60132023-09-22T23:08:11.779ZINFOcrucible: [0] Transition from WaitQuorum to Active
60142023-09-22T23:08:11.779ZINFOcrucible: [1] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active New New ds_transition to WaitActive
60152023-09-22T23:08:11.779ZINFOcrucible: [1] Transition from New to WaitActive
60162023-09-22T23:08:11.779ZINFOcrucible: [1] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active WaitActive New ds_transition to WaitQuorum
60172023-09-22T23:08:11.779ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
60182023-09-22T23:08:11.779ZINFOcrucible: [1] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active WaitQuorum New ds_transition to Active
60192023-09-22T23:08:11.779ZINFOcrucible: [1] Transition from WaitQuorum to Active
60202023-09-22T23:08:11.779ZINFOcrucible: [2] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active Active New ds_transition to WaitActive
60212023-09-22T23:08:11.779ZINFOcrucible: [2] Transition from New to WaitActive
60222023-09-22T23:08:11.779ZINFOcrucible: [2] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active Active WaitActive ds_transition to WaitQuorum
60232023-09-22T23:08:11.779ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
60242023-09-22T23:08:11.779ZINFOcrucible: [2] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active Active WaitQuorum ds_transition to Active
60252023-09-22T23:08:11.779ZINFOcrucible: [2] Transition from WaitQuorum to Active
60262023-09-22T23:08:11.779ZINFOcrucible: c3324075-9a8b-48e2-b877-1a956a371051 is now active with session: ea5b09c3-02ec-4513-8a6d-def06c87433b
60272023-09-22T23:08:11.779ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60282023-09-22T23:08:11.779ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60292023-09-22T23:08:11.779ZINFOcrucible: [0] client skip 1 in process jobs because fault = downstairs
60302023-09-22T23:08:11.779ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
60312023-09-22T23:08:11.779ZINFOcrucible: [0] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Active Active Active ds_transition to Faulted
60322023-09-22T23:08:11.779ZINFOcrucible: [0] Transition from Active to Faulted
60332023-09-22T23:08:11.779ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60342023-09-22T23:08:11.779ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60352023-09-22T23:08:11.779ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
60362023-09-22T23:08:11.779ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
60372023-09-22T23:08:11.779ZINFOcrucible: [1] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Faulted Active Active ds_transition to Faulted
60382023-09-22T23:08:11.779ZINFOcrucible: [1] Transition from Active to Faulted
60392023-09-22T23:08:11.779ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60402023-09-22T23:08:11.779ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Error(GenericError("bad")), Error(GenericError("bad")), InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60412023-09-22T23:08:11.779ZINFOcrucible: [2] client skip 1 in process jobs because fault = downstairs
60422023-09-22T23:08:11.779ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
60432023-09-22T23:08:11.779ZINFOcrucible: [2] c3324075-9a8b-48e2-b877-1a956a371051 (ea5b09c3-02ec-4513-8a6d-def06c87433b) Faulted Faulted Active ds_transition to Faulted
60442023-09-22T23:08:11.779ZINFOcrucible: [2] Transition from Active to Faulted
6045 test test::up_test::work_writes_bad ... ok
60462023-09-22T23:08:11.780ZINFOcrucible: Crucible stats registered with UUID: c804db91-8cb0-47a8-ae69-7e9036592932
60472023-09-22T23:08:11.780ZINFOcrucible: Crucible c804db91-8cb0-47a8-ae69-7e9036592932 has session id: 062d0348-601a-40fd-8559-b52b9541cd0b
60482023-09-22T23:08:11.780ZINFOcrucible: [0] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) New New New ds_transition to WaitActive
60492023-09-22T23:08:11.780ZINFOcrucible: [0] Transition from New to WaitActive
60502023-09-22T23:08:11.780ZINFOcrucible: [0] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) WaitActive New New ds_transition to WaitQuorum
60512023-09-22T23:08:11.780ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
60522023-09-22T23:08:11.780ZINFOcrucible: [0] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) WaitQuorum New New ds_transition to Active
60532023-09-22T23:08:11.780ZINFOcrucible: [0] Transition from WaitQuorum to Active
60542023-09-22T23:08:11.780ZINFOcrucible: [1] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active New New ds_transition to WaitActive
60552023-09-22T23:08:11.780ZINFOcrucible: [1] Transition from New to WaitActive
60562023-09-22T23:08:11.780ZINFOcrucible: [1] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active WaitActive New ds_transition to WaitQuorum
60572023-09-22T23:08:11.780ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
60582023-09-22T23:08:11.780ZINFOcrucible: [1] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active WaitQuorum New ds_transition to Active
60592023-09-22T23:08:11.780ZINFOcrucible: [1] Transition from WaitQuorum to Active
60602023-09-22T23:08:11.780ZINFOcrucible: [2] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active Active New ds_transition to WaitActive
60612023-09-22T23:08:11.780ZINFOcrucible: [2] Transition from New to WaitActive
60622023-09-22T23:08:11.780ZINFOcrucible: [2] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active Active WaitActive ds_transition to WaitQuorum
60632023-09-22T23:08:11.780ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
60642023-09-22T23:08:11.780ZINFOcrucible: [2] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active Active WaitQuorum ds_transition to Active
60652023-09-22T23:08:11.780ZINFOcrucible: [2] Transition from WaitQuorum to Active
60662023-09-22T23:08:11.780ZINFOcrucible: c804db91-8cb0-47a8-ae69-7e9036592932 is now active with session: 15a45cb2-9167-4006-9ad8-cbdd2abb55c1
60672023-09-22T23:08:11.780ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60682023-09-22T23:08:11.781ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
60692023-09-22T23:08:11.781ZINFOcrucible: [1] client skip 1 in process jobs because fault = downstairs
60702023-09-22T23:08:11.781ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
60712023-09-22T23:08:11.781ZINFOcrucible: [1] c804db91-8cb0-47a8-ae69-7e9036592932 (15a45cb2-9167-4006-9ad8-cbdd2abb55c1) Active Active Active ds_transition to Faulted
60722023-09-22T23:08:11.781ZINFOcrucible: [1] Transition from Active to Faulted
6073 test test::up_test::write_after_write_fail_is_alright ... ok
60742023-09-22T23:08:11.781ZINFOcrucible: Crucible stats registered with UUID: 1d0fc563-24c1-4002-a210-f2cb52557866
60752023-09-22T23:08:11.781ZINFOcrucible: Crucible 1d0fc563-24c1-4002-a210-f2cb52557866 has session id: 400507b3-3433-46f1-97f9-6f69f8272a87
60762023-09-22T23:08:11.781ZINFOcrucible: 1d0fc563-24c1-4002-a210-f2cb52557866 is now active with session: 35fd1c32-1e91-431a-be5b-1e2e51f05fc7
60772023-09-22T23:08:11.781ZINFOcrucible: [0] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) New New New ds_transition to WaitActive
60782023-09-22T23:08:11.781ZINFOcrucible: [0] Transition from New to WaitActive
60792023-09-22T23:08:11.781ZINFOcrucible: [0] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) WaitActive New New ds_transition to WaitQuorum
60802023-09-22T23:08:11.781ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
60812023-09-22T23:08:11.781ZINFOcrucible: [0] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) WaitQuorum New New ds_transition to Active
60822023-09-22T23:08:11.781ZINFOcrucible: [0] Transition from WaitQuorum to Active
60832023-09-22T23:08:11.781ZINFOcrucible: [1] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active New New ds_transition to WaitActive
60842023-09-22T23:08:11.781ZINFOcrucible: [1] Transition from New to WaitActive
60852023-09-22T23:08:11.781ZINFOcrucible: [1] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active WaitActive New ds_transition to WaitQuorum
60862023-09-22T23:08:11.781ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
60872023-09-22T23:08:11.781ZINFOcrucible: [1] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active WaitQuorum New ds_transition to Active
60882023-09-22T23:08:11.781ZINFOcrucible: [1] Transition from WaitQuorum to Active
60892023-09-22T23:08:11.781ZINFOcrucible: [2] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active Active New ds_transition to WaitActive
60902023-09-22T23:08:11.782ZINFOcrucible: [2] Transition from New to WaitActive
60912023-09-22T23:08:11.782ZINFOcrucible: [2] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active Active WaitActive ds_transition to WaitQuorum
60922023-09-22T23:08:11.782ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
60932023-09-22T23:08:11.782ZINFOcrucible: [2] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active Active WaitQuorum ds_transition to Active
60942023-09-22T23:08:11.782ZINFOcrucible: [2] Transition from WaitQuorum to Active
60952023-09-22T23:08:11.782ZINFOcrucible: [1] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active Active Active ds_transition to Faulted
60962023-09-22T23:08:11.782ZINFOcrucible: [1] Transition from Active to Faulted
60972023-09-22T23:08:11.782ZINFOcrucible: [2] 1d0fc563-24c1-4002-a210-f2cb52557866 (35fd1c32-1e91-431a-be5b-1e2e51f05fc7) Active Faulted Active ds_transition to Faulted
60982023-09-22T23:08:11.782ZINFOcrucible: [2] Transition from Active to Faulted
6099 test test::up_test::write_double_skip ... ok
61002023-09-22T23:08:11.782ZINFOcrucible: Crucible stats registered with UUID: 951798d4-06eb-46b9-b898-b1bc7cdbac03
61012023-09-22T23:08:11.782ZINFOcrucible: Crucible 951798d4-06eb-46b9-b898-b1bc7cdbac03 has session id: 20f02494-f73d-4af2-a509-6d1b03bd10d1
61022023-09-22T23:08:11.782ZINFOcrucible: 951798d4-06eb-46b9-b898-b1bc7cdbac03 is now active with session: ccabf884-3bc2-43ec-876d-1738744a59bb
61032023-09-22T23:08:11.782ZINFOcrucible: [0] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) New New New ds_transition to WaitActive
61042023-09-22T23:08:11.782ZINFOcrucible: [0] Transition from New to WaitActive
61052023-09-22T23:08:11.782ZINFOcrucible: [0] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) WaitActive New New ds_transition to WaitQuorum
61062023-09-22T23:08:11.782ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61072023-09-22T23:08:11.782ZINFOcrucible: [0] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) WaitQuorum New New ds_transition to Active
61082023-09-22T23:08:11.782ZINFOcrucible: [0] Transition from WaitQuorum to Active
61092023-09-22T23:08:11.782ZINFOcrucible: [1] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active New New ds_transition to WaitActive
61102023-09-22T23:08:11.782ZINFOcrucible: [1] Transition from New to WaitActive
61112023-09-22T23:08:11.782ZINFOcrucible: [1] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active WaitActive New ds_transition to WaitQuorum
61122023-09-22T23:08:11.782ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
61132023-09-22T23:08:11.782ZINFOcrucible: [1] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active WaitQuorum New ds_transition to Active
61142023-09-22T23:08:11.783ZINFOcrucible: [1] Transition from WaitQuorum to Active
61152023-09-22T23:08:11.783ZINFOcrucible: [2] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active Active New ds_transition to WaitActive
61162023-09-22T23:08:11.783ZINFOcrucible: [2] Transition from New to WaitActive
61172023-09-22T23:08:11.783ZINFOcrucible: [2] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active Active WaitActive ds_transition to WaitQuorum
61182023-09-22T23:08:11.783ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
61192023-09-22T23:08:11.783ZINFOcrucible: [2] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active Active WaitQuorum ds_transition to Active
61202023-09-22T23:08:11.783ZINFOcrucible: [2] Transition from WaitQuorum to Active
61212023-09-22T23:08:11.783ZINFOcrucible: [2] 951798d4-06eb-46b9-b898-b1bc7cdbac03 (ccabf884-3bc2-43ec-876d-1738744a59bb) Active Active Active ds_transition to Faulted
61222023-09-22T23:08:11.783ZINFOcrucible: [2] Transition from Active to Faulted
61232023-09-22T23:08:11.783ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61242023-09-22T23:08:11.783ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61252023-09-22T23:08:11.783ZINFOcrucible: All done
6126 test test::up_test::write_fail_and_skip ... ok
61272023-09-22T23:08:11.783ZINFOcrucible: Crucible stats registered with UUID: 50316693-20c8-443c-8411-a56850f10150
61282023-09-22T23:08:11.783ZINFOcrucible: Crucible 50316693-20c8-443c-8411-a56850f10150 has session id: 96bec26f-2277-4c5c-93d7-ba39f7d53456
61292023-09-22T23:08:11.783ZINFOcrucible: [0] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) New New New ds_transition to WaitActive
61302023-09-22T23:08:11.783ZINFOcrucible: [0] Transition from New to WaitActive
61312023-09-22T23:08:11.783ZINFOcrucible: [0] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) WaitActive New New ds_transition to WaitQuorum
61322023-09-22T23:08:11.783ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61332023-09-22T23:08:11.783ZINFOcrucible: [0] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) WaitQuorum New New ds_transition to Active
61342023-09-22T23:08:11.783ZINFOcrucible: [0] Transition from WaitQuorum to Active
61352023-09-22T23:08:11.783ZINFOcrucible: [1] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active New New ds_transition to WaitActive
61362023-09-22T23:08:11.784ZINFOcrucible: [1] Transition from New to WaitActive
61372023-09-22T23:08:11.784ZINFOcrucible: [1] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active WaitActive New ds_transition to WaitQuorum
61382023-09-22T23:08:11.784ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
61392023-09-22T23:08:11.784ZINFOcrucible: [1] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active WaitQuorum New ds_transition to Active
61402023-09-22T23:08:11.784ZINFOcrucible: [1] Transition from WaitQuorum to Active
61412023-09-22T23:08:11.784ZINFOcrucible: [2] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active Active New ds_transition to WaitActive
61422023-09-22T23:08:11.784ZINFOcrucible: [2] Transition from New to WaitActive
61432023-09-22T23:08:11.784ZINFOcrucible: [2] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active Active WaitActive ds_transition to WaitQuorum
61442023-09-22T23:08:11.784ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
61452023-09-22T23:08:11.784ZINFOcrucible: [2] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active Active WaitQuorum ds_transition to Active
61462023-09-22T23:08:11.784ZINFOcrucible: [2] Transition from WaitQuorum to Active
61472023-09-22T23:08:11.784ZINFOcrucible: 50316693-20c8-443c-8411-a56850f10150 is now active with session: df9b5363-3c6a-4de3-b7ee-261fe7d436a0
61482023-09-22T23:08:11.784ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61492023-09-22T23:08:11.784ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61502023-09-22T23:08:11.784ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
61512023-09-22T23:08:11.784ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
61522023-09-22T23:08:11.784ZINFOcrucible: [2] 50316693-20c8-443c-8411-a56850f10150 (df9b5363-3c6a-4de3-b7ee-261fe7d436a0) Active Active Active ds_transition to Faulted
61532023-09-22T23:08:11.784ZINFOcrucible: [2] Transition from Active to Faulted
6154 test test::up_test::write_fail_past_present_future ... ok
61552023-09-22T23:08:11.784ZINFOcrucible: Crucible stats registered with UUID: f5dc7f31-30ff-4dc8-8ace-68972e04e382
61562023-09-22T23:08:11.784ZINFOcrucible: Crucible f5dc7f31-30ff-4dc8-8ace-68972e04e382 has session id: e07a7c91-8c68-41d9-938e-06b8a87c4132
61572023-09-22T23:08:11.784ZINFOcrucible: [0] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) New New New ds_transition to WaitActive
61582023-09-22T23:08:11.785ZINFOcrucible: [0] Transition from New to WaitActive
61592023-09-22T23:08:11.785ZINFOcrucible: [0] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) WaitActive New New ds_transition to WaitQuorum
61602023-09-22T23:08:11.785ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61612023-09-22T23:08:11.785ZINFOcrucible: [0] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) WaitQuorum New New ds_transition to Active
61622023-09-22T23:08:11.785ZINFOcrucible: [0] Transition from WaitQuorum to Active
61632023-09-22T23:08:11.785ZINFOcrucible: [1] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active New New ds_transition to WaitActive
61642023-09-22T23:08:11.785ZINFOcrucible: [1] Transition from New to WaitActive
61652023-09-22T23:08:11.785ZINFOcrucible: [1] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active WaitActive New ds_transition to WaitQuorum
61662023-09-22T23:08:11.785ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
61672023-09-22T23:08:11.785ZINFOcrucible: [1] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active WaitQuorum New ds_transition to Active
61682023-09-22T23:08:11.785ZINFOcrucible: [1] Transition from WaitQuorum to Active
61692023-09-22T23:08:11.785ZINFOcrucible: [2] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active Active New ds_transition to WaitActive
61702023-09-22T23:08:11.785ZINFOcrucible: [2] Transition from New to WaitActive
61712023-09-22T23:08:11.785ZINFOcrucible: [2] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active Active WaitActive ds_transition to WaitQuorum
61722023-09-22T23:08:11.785ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
61732023-09-22T23:08:11.785ZINFOcrucible: [2] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active Active WaitQuorum ds_transition to Active
61742023-09-22T23:08:11.785ZINFOcrucible: [2] Transition from WaitQuorum to Active
61752023-09-22T23:08:11.785ZINFOcrucible: f5dc7f31-30ff-4dc8-8ace-68972e04e382 is now active with session: 0140ae24-3bbf-4340-90b6-64ff5c34b9d1
61762023-09-22T23:08:11.785ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61772023-09-22T23:08:11.785ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
61782023-09-22T23:08:11.785ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
61792023-09-22T23:08:11.785ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
61802023-09-22T23:08:11.785ZINFOcrucible: [1] f5dc7f31-30ff-4dc8-8ace-68972e04e382 (0140ae24-3bbf-4340-90b6-64ff5c34b9d1) Active Active Active ds_transition to Faulted
61812023-09-22T23:08:11.785ZINFOcrucible: [1] Transition from Active to Faulted
6182 test test::up_test::write_fail_skips_inprogress_jobs ... ok
61832023-09-22T23:08:11.786ZINFOcrucible: Crucible stats registered with UUID: 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4
61842023-09-22T23:08:11.786ZINFOcrucible: Crucible 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 has session id: f42bc518-963d-42fc-9e5c-737d117844c1
61852023-09-22T23:08:11.786ZINFOcrucible: [0] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) New New New ds_transition to WaitActive
61862023-09-22T23:08:11.786ZINFOcrucible: [0] Transition from New to WaitActive
61872023-09-22T23:08:11.786ZINFOcrucible: [0] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) WaitActive New New ds_transition to WaitQuorum
61882023-09-22T23:08:11.786ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
61892023-09-22T23:08:11.786ZINFOcrucible: [0] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) WaitQuorum New New ds_transition to Active
61902023-09-22T23:08:11.786ZINFOcrucible: [0] Transition from WaitQuorum to Active
61912023-09-22T23:08:11.786ZINFOcrucible: [1] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active New New ds_transition to WaitActive
61922023-09-22T23:08:11.786ZINFOcrucible: [1] Transition from New to WaitActive
61932023-09-22T23:08:11.786ZINFOcrucible: [1] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active WaitActive New ds_transition to WaitQuorum
61942023-09-22T23:08:11.786ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
61952023-09-22T23:08:11.786ZINFOcrucible: [1] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active WaitQuorum New ds_transition to Active
61962023-09-22T23:08:11.786ZINFOcrucible: [1] Transition from WaitQuorum to Active
61972023-09-22T23:08:11.786ZINFOcrucible: [2] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active Active New ds_transition to WaitActive
61982023-09-22T23:08:11.786ZINFOcrucible: [2] Transition from New to WaitActive
61992023-09-22T23:08:11.786ZINFOcrucible: [2] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active Active WaitActive ds_transition to WaitQuorum
62002023-09-22T23:08:11.786ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62012023-09-22T23:08:11.786ZINFOcrucible: [2] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active Active WaitQuorum ds_transition to Active
62022023-09-22T23:08:11.786ZINFOcrucible: [2] Transition from WaitQuorum to Active
62032023-09-22T23:08:11.786ZINFOcrucible: 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 is now active with session: 7b290629-99c5-4421-825c-072ef12b9607
62042023-09-22T23:08:11.786ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62052023-09-22T23:08:11.786ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, Done, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62062023-09-22T23:08:11.786ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
62072023-09-22T23:08:11.786ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
62082023-09-22T23:08:11.786ZINFOcrucible: [2] 96ad6640-83d8-4bf4-ad16-c20b9e10e8b4 (7b290629-99c5-4421-825c-072ef12b9607) Active Active Active ds_transition to Faulted
62092023-09-22T23:08:11.786ZINFOcrucible: [2] Transition from Active to Faulted
6210 test test::up_test::write_fail_skips_many_jobs ... ok
62112023-09-22T23:08:11.787ZINFOcrucible: Crucible stats registered with UUID: 79752912-ce8a-4d2f-a2a8-9a24046a746d
62122023-09-22T23:08:11.787ZINFOcrucible: Crucible 79752912-ce8a-4d2f-a2a8-9a24046a746d has session id: 0012ea56-2d1b-46fd-a597-6f481536df0a
62132023-09-22T23:08:11.787ZINFOcrucible: [0] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) New New New ds_transition to WaitActive
62142023-09-22T23:08:11.787ZINFOcrucible: [0] Transition from New to WaitActive
62152023-09-22T23:08:11.787ZINFOcrucible: [0] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) WaitActive New New ds_transition to WaitQuorum
62162023-09-22T23:08:11.787ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62172023-09-22T23:08:11.787ZINFOcrucible: [0] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) WaitQuorum New New ds_transition to Active
62182023-09-22T23:08:11.787ZINFOcrucible: [0] Transition from WaitQuorum to Active
62192023-09-22T23:08:11.787ZINFOcrucible: [1] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active New New ds_transition to WaitActive
62202023-09-22T23:08:11.787ZINFOcrucible: [1] Transition from New to WaitActive
62212023-09-22T23:08:11.787ZINFOcrucible: [1] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active WaitActive New ds_transition to WaitQuorum
62222023-09-22T23:08:11.787ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62232023-09-22T23:08:11.787ZINFOcrucible: [1] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active WaitQuorum New ds_transition to Active
62242023-09-22T23:08:11.787ZINFOcrucible: [1] Transition from WaitQuorum to Active
62252023-09-22T23:08:11.787ZINFOcrucible: [2] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active Active New ds_transition to WaitActive
62262023-09-22T23:08:11.787ZINFOcrucible: [2] Transition from New to WaitActive
62272023-09-22T23:08:11.787ZINFOcrucible: [2] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active Active WaitActive ds_transition to WaitQuorum
62282023-09-22T23:08:11.787ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62292023-09-22T23:08:11.787ZINFOcrucible: [2] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active Active WaitQuorum ds_transition to Active
62302023-09-22T23:08:11.787ZINFOcrucible: [2] Transition from WaitQuorum to Active
62312023-09-22T23:08:11.787ZINFOcrucible: 79752912-ce8a-4d2f-a2a8-9a24046a746d is now active with session: c027a9d2-7afa-471b-8cc7-9f51eefe6c60
62322023-09-22T23:08:11.787ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62332023-09-22T23:08:11.787ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 10, work: Write { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, InProgress]), ack_status: AckReady, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
62342023-09-22T23:08:11.787ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
62352023-09-22T23:08:11.787ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
62362023-09-22T23:08:11.787ZINFOcrucible: [1] 79752912-ce8a-4d2f-a2a8-9a24046a746d (c027a9d2-7afa-471b-8cc7-9f51eefe6c60) Active Active Active ds_transition to Faulted
62372023-09-22T23:08:11.787ZINFOcrucible: [1] Transition from Active to Faulted
6238 test test::up_test::write_fail_skips_new_jobs ... ok
62392023-09-22T23:08:11.788ZINFOcrucible: Crucible stats registered with UUID: 63029047-35d1-4ade-9268-107c50cf3afa
62402023-09-22T23:08:11.788ZINFOcrucible: Crucible 63029047-35d1-4ade-9268-107c50cf3afa has session id: ed76b75f-9c74-481b-9727-a52c6e51f95f
62412023-09-22T23:08:11.788ZINFOcrucible: 63029047-35d1-4ade-9268-107c50cf3afa is now active with session: 477d83cc-37f0-408d-bb6d-d256eecf22be
62422023-09-22T23:08:11.788ZINFOcrucible: [0] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) New New New ds_transition to WaitActive
62432023-09-22T23:08:11.788ZINFOcrucible: [0] Transition from New to WaitActive
62442023-09-22T23:08:11.788ZINFOcrucible: [0] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) WaitActive New New ds_transition to WaitQuorum
62452023-09-22T23:08:11.788ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62462023-09-22T23:08:11.788ZINFOcrucible: [0] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) WaitQuorum New New ds_transition to Active
62472023-09-22T23:08:11.788ZINFOcrucible: [0] Transition from WaitQuorum to Active
62482023-09-22T23:08:11.788ZINFOcrucible: [1] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active New New ds_transition to WaitActive
62492023-09-22T23:08:11.788ZINFOcrucible: [1] Transition from New to WaitActive
62502023-09-22T23:08:11.788ZINFOcrucible: [1] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active WaitActive New ds_transition to WaitQuorum
62512023-09-22T23:08:11.788ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62522023-09-22T23:08:11.788ZINFOcrucible: [1] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active WaitQuorum New ds_transition to Active
62532023-09-22T23:08:11.788ZINFOcrucible: [1] Transition from WaitQuorum to Active
62542023-09-22T23:08:11.788ZINFOcrucible: [2] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active Active New ds_transition to WaitActive
62552023-09-22T23:08:11.788ZINFOcrucible: [2] Transition from New to WaitActive
62562023-09-22T23:08:11.788ZINFOcrucible: [2] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active Active WaitActive ds_transition to WaitQuorum
62572023-09-22T23:08:11.788ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62582023-09-22T23:08:11.788ZINFOcrucible: [2] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active Active WaitQuorum ds_transition to Active
62592023-09-22T23:08:11.788ZINFOcrucible: [2] Transition from WaitQuorum to Active
62602023-09-22T23:08:11.788ZINFOcrucible: [1] 63029047-35d1-4ade-9268-107c50cf3afa (477d83cc-37f0-408d-bb6d-d256eecf22be) Active Active Active ds_transition to Faulted
62612023-09-22T23:08:11.788ZINFOcrucible: [1] Transition from Active to Faulted
6262 test test::up_test::write_single_skip ... ok
62632023-09-22T23:08:11.789ZINFOcrucible: Crucible stats registered with UUID: 66e232ce-a5ce-4efc-a04a-375d43621361
62642023-09-22T23:08:11.789ZINFOcrucible: Crucible 66e232ce-a5ce-4efc-a04a-375d43621361 has session id: 5eb51674-bb08-490c-ae56-70af044f1f67
62652023-09-22T23:08:11.789ZINFOcrucible: 66e232ce-a5ce-4efc-a04a-375d43621361 is now active with session: 28e2db0e-563f-4b11-a465-a811b0c7f927
62662023-09-22T23:08:11.789ZINFOcrucible: [0] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) New New New ds_transition to WaitActive
62672023-09-22T23:08:11.789ZINFOcrucible: [0] Transition from New to WaitActive
62682023-09-22T23:08:11.789ZINFOcrucible: [0] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) WaitActive New New ds_transition to WaitQuorum
62692023-09-22T23:08:11.789ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62702023-09-22T23:08:11.789ZINFOcrucible: [0] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) WaitQuorum New New ds_transition to Active
62712023-09-22T23:08:11.789ZINFOcrucible: [0] Transition from WaitQuorum to Active
62722023-09-22T23:08:11.789ZINFOcrucible: [1] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active New New ds_transition to WaitActive
62732023-09-22T23:08:11.789ZINFOcrucible: [1] Transition from New to WaitActive
62742023-09-22T23:08:11.789ZINFOcrucible: [1] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active WaitActive New ds_transition to WaitQuorum
62752023-09-22T23:08:11.789ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
62762023-09-22T23:08:11.789ZINFOcrucible: [1] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active WaitQuorum New ds_transition to Active
62772023-09-22T23:08:11.789ZINFOcrucible: [1] Transition from WaitQuorum to Active
62782023-09-22T23:08:11.789ZINFOcrucible: [2] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active Active New ds_transition to WaitActive
62792023-09-22T23:08:11.789ZINFOcrucible: [2] Transition from New to WaitActive
62802023-09-22T23:08:11.789ZINFOcrucible: [2] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active Active WaitActive ds_transition to WaitQuorum
62812023-09-22T23:08:11.789ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
62822023-09-22T23:08:11.789ZINFOcrucible: [2] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active Active WaitQuorum ds_transition to Active
62832023-09-22T23:08:11.789ZINFOcrucible: [2] Transition from WaitQuorum to Active
62842023-09-22T23:08:11.789ZINFOcrucible: [1] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active Active Active ds_transition to Faulted
62852023-09-22T23:08:11.789ZINFOcrucible: [1] Transition from Active to Faulted
62862023-09-22T23:08:11.789ZINFOcrucible: [2] 66e232ce-a5ce-4efc-a04a-375d43621361 (28e2db0e-563f-4b11-a465-a811b0c7f927) Active Faulted Active ds_transition to Faulted
62872023-09-22T23:08:11.789ZINFOcrucible: [2] Transition from Active to Faulted
6288 test test::up_test::write_unwritten_double_skip ... ok
62892023-09-22T23:08:11.790ZINFOcrucible: Crucible stats registered with UUID: ff99b38c-4c8d-474d-956d-3205797ca532
62902023-09-22T23:08:11.790ZINFOcrucible: Crucible ff99b38c-4c8d-474d-956d-3205797ca532 has session id: 7e2791a2-3f3e-4452-97d1-5eaf0c1a357d
62912023-09-22T23:08:11.790ZINFOcrucible: ff99b38c-4c8d-474d-956d-3205797ca532 is now active with session: d1637da7-c607-4a1e-a7e6-5b2b3ea1094f
62922023-09-22T23:08:11.790ZINFOcrucible: [0] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) New New New ds_transition to WaitActive
62932023-09-22T23:08:11.790ZINFOcrucible: [0] Transition from New to WaitActive
62942023-09-22T23:08:11.790ZINFOcrucible: [0] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) WaitActive New New ds_transition to WaitQuorum
62952023-09-22T23:08:11.790ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
62962023-09-22T23:08:11.790ZINFOcrucible: [0] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) WaitQuorum New New ds_transition to Active
62972023-09-22T23:08:11.790ZINFOcrucible: [0] Transition from WaitQuorum to Active
62982023-09-22T23:08:11.790ZINFOcrucible: [1] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active New New ds_transition to WaitActive
62992023-09-22T23:08:11.790ZINFOcrucible: [1] Transition from New to WaitActive
63002023-09-22T23:08:11.790ZINFOcrucible: [1] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active WaitActive New ds_transition to WaitQuorum
63012023-09-22T23:08:11.790ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63022023-09-22T23:08:11.790ZINFOcrucible: [1] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active WaitQuorum New ds_transition to Active
63032023-09-22T23:08:11.790ZINFOcrucible: [1] Transition from WaitQuorum to Active
63042023-09-22T23:08:11.790ZINFOcrucible: [2] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active Active New ds_transition to WaitActive
63052023-09-22T23:08:11.790ZINFOcrucible: [2] Transition from New to WaitActive
63062023-09-22T23:08:11.790ZINFOcrucible: [2] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active Active WaitActive ds_transition to WaitQuorum
63072023-09-22T23:08:11.790ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
63082023-09-22T23:08:11.790ZINFOcrucible: [2] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active Active WaitQuorum ds_transition to Active
63092023-09-22T23:08:11.790ZINFOcrucible: [2] Transition from WaitQuorum to Active
63102023-09-22T23:08:11.790ZINFOcrucible: [2] ff99b38c-4c8d-474d-956d-3205797ca532 (d1637da7-c607-4a1e-a7e6-5b2b3ea1094f) Active Active Active ds_transition to Faulted
63112023-09-22T23:08:11.790ZINFOcrucible: [2] Transition from Active to Faulted
63122023-09-22T23:08:11.790ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63132023-09-22T23:08:11.790ZERROcrucible: [1] Reports error GenericError("bad") on job 1010, DownstairsIO { ds_id: JobId(1010), guest_id: 19, work: WriteUnwritten { dependencies: [], writes: [Write { eid: 0, offset: Block { value: 7, shift: 9 }, data: b"\x01", block_context: BlockContext { hash: 0, encryption_context: None } }] }, state: ClientData([Done, InProgress, Skipped]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 7 }, ImpactedAddr { extent_id: 0, block: 7 }) } = downstairs
63142023-09-22T23:08:11.790ZINFOcrucible: All done
6315 test test::up_test::write_unwritten_fail_and_skip ... ok
63162023-09-22T23:08:11.791ZINFOcrucible: Crucible stats registered with UUID: 4cb1df6d-b8a4-4fed-a080-b7c433de4b52
63172023-09-22T23:08:11.791ZINFOcrucible: Crucible 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 has session id: a8d6868e-530f-486d-a724-3af44cf7c6ac
63182023-09-22T23:08:11.791ZINFOcrucible: 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 is now active with session: f2c4566a-ab5e-4396-96d2-c94e90ee81ee
63192023-09-22T23:08:11.791ZINFOcrucible: [0] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) New New New ds_transition to WaitActive
63202023-09-22T23:08:11.791ZINFOcrucible: [0] Transition from New to WaitActive
63212023-09-22T23:08:11.791ZINFOcrucible: [0] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) WaitActive New New ds_transition to WaitQuorum
63222023-09-22T23:08:11.791ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
63232023-09-22T23:08:11.791ZINFOcrucible: [0] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) WaitQuorum New New ds_transition to Active
63242023-09-22T23:08:11.791ZINFOcrucible: [0] Transition from WaitQuorum to Active
63252023-09-22T23:08:11.791ZINFOcrucible: [1] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active New New ds_transition to WaitActive
63262023-09-22T23:08:11.791ZINFOcrucible: [1] Transition from New to WaitActive
63272023-09-22T23:08:11.791ZINFOcrucible: [1] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active WaitActive New ds_transition to WaitQuorum
63282023-09-22T23:08:11.791ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
63292023-09-22T23:08:11.791ZINFOcrucible: [1] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active WaitQuorum New ds_transition to Active
63302023-09-22T23:08:11.791ZINFOcrucible: [1] Transition from WaitQuorum to Active
63312023-09-22T23:08:11.791ZINFOcrucible: [2] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active Active New ds_transition to WaitActive
63322023-09-22T23:08:11.791ZINFOcrucible: [2] Transition from New to WaitActive
63332023-09-22T23:08:11.791ZINFOcrucible: [2] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active Active WaitActive ds_transition to WaitQuorum
63342023-09-22T23:08:11.791ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
63352023-09-22T23:08:11.791ZINFOcrucible: [2] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active Active WaitQuorum ds_transition to Active
63362023-09-22T23:08:11.791ZINFOcrucible: [2] Transition from WaitQuorum to Active
63372023-09-22T23:08:11.791ZINFOcrucible: [1] 4cb1df6d-b8a4-4fed-a080-b7c433de4b52 (f2c4566a-ab5e-4396-96d2-c94e90ee81ee) Active Active Active ds_transition to Faulted
63382023-09-22T23:08:11.791ZINFOcrucible: [1] Transition from Active to Faulted
6339 test test::up_test::write_unwritten_single_skip ... ok
6340 test test_buffer_len ... ok
6341 test test_buffer_len_after_clone ... ok
6342 test test_buffer_len_index_overflow - should panic ... ok
6343 test test_buffer_len_over_block_size ... ok
6344 test test_return_iops ... ok
6345 test volume::test::construct_file_block_io ... ok
6346 test volume::test::test_affected_subvolumes ... ok
6347 test volume::test::test_correct_blocks_returned_multiple_subvolumes_1 ... ok
6348 test volume::test::test_correct_blocks_returned_multiple_subvolumes_2 ... ok
6349 test volume::test::test_correct_blocks_returned_multiple_subvolumes_3 ... ok
6350 test volume::test::test_correct_blocks_returned_one_subvolume ... ok
6351 test volume::test::test_correct_blocks_returned_three_subvolumes ... ok
6352 test volume::test::test_drop_then_recreate_test ... ok
6353 test volume::test::test_in_memory_block_io ... ok
6354 test volume::test::test_no_read_only_parent_for_lba_range ... ok
6355 test volume::test::test_out_of_bounds ... ok
63562023-09-22T23:08:12.425ZINFOcrucible: responded to ping downstairs = 1
63572023-09-22T23:08:12.426ZINFOcrucible: responded to ping downstairs = 1
63582023-09-22T23:08:12.442ZINFOcrucible: Waiting for 4 jobs (currently 3)
63592023-09-22T23:08:12.442ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
63602023-09-22T23:08:12.569ZINFOcrucible: Finally, move the ReOpen job forward
63612023-09-22T23:08:12.569ZINFOcrucible: Now ACK the reopen job
63622023-09-22T23:08:12.569ZWARNcrucible: RE:0 Bailing with error
63632023-09-22T23:08:12.570ZINFOcrucible: err:1 or:0
63642023-09-22T23:08:12.570ZINFOcrucible: Crucible stats registered with UUID: ea9c2828-4faa-4745-9992-cc45fe6bf51b
63652023-09-22T23:08:12.570ZINFOcrucible: Crucible ea9c2828-4faa-4745-9992-cc45fe6bf51b has session id: 2f505f29-5b73-4e36-b406-71bbf06e30ad
63662023-09-22T23:08:12.570ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) New New New ds_transition to WaitActive
63672023-09-22T23:08:12.570ZINFOcrucible: [0] Transition from New to WaitActive
63682023-09-22T23:08:12.570ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) WaitActive New New ds_transition to WaitQuorum
6369 {"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:08:12.570701841Z[0] DS Reports error Err(GenericError("\",bad\""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
6370 {"msg":"[0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) WaitQuorum New New ds_transition to Active","v":0)) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) },""name":",crucible"","v"level"::300,"name":"crucible","level":50,"time":"2023-09-22T23:08:12.570744763Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
6371 {"msg":","time"[0] Transition from WaitQuorum to Active:"","v":02023-09-22T23:08:12.570750532Z","name",:""crucible"hostname",:""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
6372 ,"time":"2023-09-22T23:08:12.570783378Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","[0] Reports error GenericError(\"pid":bad4291\"}
6373 {"msg":"[1] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active New New ds_transition to WaitActive","v":0,"name":"crucible","level":30) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,"name":"crucible","level":50,"time":"2023-09-22T23:08:12.57082591Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid"":time"4291:"}
6374 2023-09-22T23:08:12.570833444Z","{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":[1] Transition from New to WaitActive4291",,""":v"":0downstairs,""}name"
6375 :"crucible","{level":30"msg":"[0] client skip 2 in process jobs because fault","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.570871833Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"time":}"
6376 2023-09-22T23:08:12.57088064Z","{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
6377 [1] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active WaitActive New ds_transition to WaitQuorum","v{":0,""msg"name"::""crucible","[0] changed 1 jobs to fault skipped"level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.570918465Z",,""hostname"time"::""2023-09-22T23:08:12.570922871Z"ip-10-150-1-74.us-west-2.compute.internal",,""pid"hostname"::4291"}
6378 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291,""":"msg":"downstairs"}
6379 [1] Transition from WaitActive to WaitQuorum","{v":0,""name"msg"::""crucible","level":30[0] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Active LiveRepair Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.570965018Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4291"time":"}
6380 2023-09-22T23:08:12.570974091Z","hostname{":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
6381 [1] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active WaitQuorum New ds_transition to Active","v":0,{"name":"crucible""msg",":"level":30[0] Transition from Active to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.571014615Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,}"
6382 time":"{2023-09-22T23:08:12.571021778Z",""msg":hostname"":"[1] Transition from WaitQuorum to Active"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid"::04291,"}name
6383 ":"crucible"{,"level"":msg30":"Now ACK the close job","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.5710622Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid":2023-09-22T23:08:12.571069067Z"4291,"}hostname"
6384 :"{ip-10-150-1-74.us-west-2.compute.internal",""msg"pid"::4291"}
6385 {"[2] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active Active New ds_transition to WaitActive"msg",:""v":0,"Waiting for 3 jobs (currently 2)"name",:""v":crucible"0,","level"name"::30"crucible","level":30,"time":"2023-09-22T23:08:12.571112777Z",",hostname"":time"":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:12.571115454Z,"",pid"":4291hostname":"}
6386 ip-10-150-1-74.us-west-2.compute.internal","pid"{:4291"}
6387 msg":"{[2] Transition from New to WaitActive","v"":msg"0:,""name":"crucible"Extent 0 close id:1000 Failed: Error: bad",","level"v"::300,"name":"crucible","level":50,"time":"2023-09-22T23:08:12.571161046Z",","time":hostname"":"2023-09-22T23:08:12.571164917Z",ip-10-150-1-74.us-west-2.compute.internal"","hostnamepid""::"4291}
6388 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
6389 "msg":"{"msg":"[1] client skip 2 in process jobs because fault","v":0,[2] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active Active WaitActive ds_transition to WaitQuorum"","namev""::"0,crucible"","name":level"":30crucible","level":30,"time":","2023-09-22T23:08:12.571210519Z"time",:""hostname":"2023-09-22T23:08:12.5712126Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":pid"":4291,"":ip-10-150-1-74.us-west-2.compute.internal"","downstairs"pid"}:
6390 4291}
6391 {"{msg":""msg":"[1] changed 1 jobs to fault skipped","v":0,"[2] Transition from WaitActive to WaitQuorum"name":,""vcrucible"",:"0level,"":name30":"crucible","level":30,"time":"2023-09-22T23:08:12.571260186Z",",hostname"":"time":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:12.571263196Z"",,""hostname"pid"::"4291,"":"ip-10-150-1-74.us-west-2.compute.internal"downstairs",}"
6392 pid":4291{}
6393 "msg":"{"msg":"[1] bb7a424d-9b30-4a13-96a7-a41c0f6628b1 (5a93f758-ea9a-4a78-ad67-6f82f309f96e) Faulted LiveRepair Active ds_transition to Faulted","v":0,"name"[2] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active Active WaitQuorum ds_transition to Active":","crucible"v",:"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:08:12.571312175Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:12.571315449Z",","pid":hostname"4291:"}
6394 ip-10-150-1-74.us-west-2.compute.internal","pid{":4291"msg"}:
6395 "{[1] Transition from LiveRepair to Faulted"","msg"v"::"0,"name":"[2] Transition from WaitQuorum to Active"crucible",,""v":level0":,"30name":"crucible","level":30,"time":"2023-09-22T23:08:12.571359089Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:12.571362359Z",","pid":hostname"4291:"}
6396 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}"
6397 msg":"{RE:0 Wait for result from repair command 1001:2""msg",:""v":0,"name":"crucible","level":30ea9c2828-4faa-4745-9992-cc45fe6bf51b is now active with session: 62b807af-88b8-4100-a47a-2cd55774bf63","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:12.571401213Z",",time":""hostname":"2023-09-22T23:08:12.571406838Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":"pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
6398 ,"pid":4291}
63992023-09-22T23:08:12.571ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Active Active Active ds_transition to Faulted
64002023-09-22T23:08:12.571ZINFOcrucible: [0] Transition from Active to Faulted
64012023-09-22T23:08:12.571ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) Faulted Active Active ds_transition to LiveRepairReady
64022023-09-22T23:08:12.571ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady
64032023-09-22T23:08:12.571ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) LiveRepairReady Active Active ds_transition to LiveRepair
64042023-09-22T23:08:12.571ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair
64052023-09-22T23:08:12.571ZINFOcrucible: Waiting for Close + ReOpen jobs
64062023-09-22T23:08:12.571ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
64072023-09-22T23:08:12.571ZINFOcrucible: RE:0 close id:1000 queued, notify DS
64082023-09-22T23:08:12.571ZINFOcrucible: RE:0 Wait for result from close command 1000:1
64092023-09-22T23:08:12.574ZINFOcrucible: Waiting for 3 jobs (currently 2)
64102023-09-22T23:08:12.574ZINFOcrucible: No repair needed for extent 0 = downstairs
64112023-09-22T23:08:12.574ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
64122023-09-22T23:08:12.687ZINFOcrucible: Now ACK the close job
64132023-09-22T23:08:12.688ZINFOcrucible: Waiting for 3 jobs (currently 2)
64142023-09-22T23:08:12.688ZINFOcrucible: No repair needed for extent 0 = downstairs
64152023-09-22T23:08:12.688ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
6416 test volume::test::test_parent_initialized_read_only_region_one_subvolume ... ok
64172023-09-22T23:08:13.572ZINFOcrucible: Waiting for 3 jobs (currently 2)
64182023-09-22T23:08:13.572ZINFOcrucible: No repair needed for extent 0 = downstairs
64192023-09-22T23:08:13.572ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
64202023-09-22T23:08:13.572ZINFOcrucible: Waiting for 4 jobs (currently 3)
64212023-09-22T23:08:13.572ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
64222023-09-22T23:08:13.572ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
64232023-09-22T23:08:13.576ZINFOcrucible: Waiting for 4 jobs (currently 3)
64242023-09-22T23:08:13.576ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
64252023-09-22T23:08:13.689ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
64262023-09-22T23:08:13.689ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
64272023-09-22T23:08:13.689ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
64282023-09-22T23:08:13.689ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
64292023-09-22T23:08:13.689ZINFOcrucible: [2] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) LiveRepair Active Active ds_transition to Faulted
64302023-09-22T23:08:13.689ZINFOcrucible: [2] Transition from Active to Faulted
64312023-09-22T23:08:13.689ZINFOcrucible: Waiting for 4 jobs (currently 3)
64322023-09-22T23:08:13.689ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
64332023-09-22T23:08:13.689ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
64342023-09-22T23:08:13.689ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
64352023-09-22T23:08:13.689ZINFOcrucible: [0] c2759bf2-aaf5-4d2a-82e7-2795ddf1ec4e (c90c1fcd-3603-46fd-b4f7-87867e35920a) LiveRepair Active Faulted ds_transition to Faulted
64362023-09-22T23:08:13.689ZINFOcrucible: [0] Transition from LiveRepair to Faulted
64372023-09-22T23:08:13.689ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
64382023-09-22T23:08:14.572ZINFOcrucible: Now move the NoOp job forward
64392023-09-22T23:08:14.572ZINFOcrucible: Now ACK the NoOp job
64402023-09-22T23:08:14.572ZINFOcrucible: Finally, move the ReOpen job forward
64412023-09-22T23:08:14.572ZINFOcrucible: Now ACK the Reopen job
64422023-09-22T23:08:14.572ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
64432023-09-22T23:08:14.572ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
64442023-09-22T23:08:14.572ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
64452023-09-22T23:08:14.572ZWARNcrucible: RE:0 Bailing with error
6446 ----------------------------------------------------------------
6447 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
6448 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
6449 1 Acked 1000 FClose 0 Err Done Done false
6450 2 Acked 1001 NoOp 0 Skip Skip Done false
6451 3 Acked 1002 NoOp 0 Skip Skip Done false
6452 4 Acked 1003 Reopen 0 Skip Skip Done false
6453 STATES DS:0 DS:1 DS:2 TOTAL
6454 New 0 0 0 0
6455 Sent 0 0 0 0
6456 Done 0 1 4 5
6457 Skipped 3 3 0 6
6458 Error 1 0 0 1
6459 Last Flush: 0 0 0
6460 Downstairs last five completed:
6461 Upstairs last five completed: 4 3 2 1
64622023-09-22T23:08:14.573ZINFOcrucible: Waiting for 4 jobs (currently 3)
64632023-09-22T23:08:14.573ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
64642023-09-22T23:08:14.573ZINFOcrucible: Crucible stats registered with UUID: 4bf3db3d-50a4-4818-86ec-4e619b46b97c
64652023-09-22T23:08:14.573ZINFOcrucible: Crucible 4bf3db3d-50a4-4818-86ec-4e619b46b97c has session id: 62650f10-827e-49f2-8652-65f6b8972ff8
64662023-09-22T23:08:14.573ZINFOcrucible: [0] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) New New New ds_transition to WaitActive
64672023-09-22T23:08:14.573ZINFOcrucible: [0] Transition from New to WaitActive
64682023-09-22T23:08:14.573ZINFOcrucible: [0] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) WaitActive New New ds_transition to WaitQuorum
64692023-09-22T23:08:14.574ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
64702023-09-22T23:08:14.574ZINFOcrucible: [0] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) WaitQuorum New New ds_transition to Active
64712023-09-22T23:08:14.574ZINFOcrucible: [0] Transition from WaitQuorum to Active
64722023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active New New ds_transition to WaitActive
64732023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from New to WaitActive
64742023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active WaitActive New ds_transition to WaitQuorum
64752023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
64762023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active WaitQuorum New ds_transition to Active
64772023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from WaitQuorum to Active
64782023-09-22T23:08:14.574ZINFOcrucible: [2] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active Active New ds_transition to WaitActive
64792023-09-22T23:08:14.574ZINFOcrucible: [2] Transition from New to WaitActive
64802023-09-22T23:08:14.574ZINFOcrucible: [2] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active Active WaitActive ds_transition to WaitQuorum
64812023-09-22T23:08:14.574ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
64822023-09-22T23:08:14.574ZINFOcrucible: [2] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active Active WaitQuorum ds_transition to Active
64832023-09-22T23:08:14.574ZINFOcrucible: [2] Transition from WaitQuorum to Active
64842023-09-22T23:08:14.574ZINFOcrucible: 4bf3db3d-50a4-4818-86ec-4e619b46b97c is now active with session: bf820409-3b29-4b81-8028-3c3affe54370
64852023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active Active Active ds_transition to Faulted
64862023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from Active to Faulted
64872023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active Faulted Active ds_transition to LiveRepairReady
64882023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
64892023-09-22T23:08:14.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active LiveRepairReady Active ds_transition to LiveRepair
64902023-09-22T23:08:14.574ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
64912023-09-22T23:08:14.574ZINFOcrucible: Waiting for Close + ReOpen jobs
64922023-09-22T23:08:14.574ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
64932023-09-22T23:08:14.574ZINFOcrucible: RE:0 close id:1000 queued, notify DS
64942023-09-22T23:08:14.574ZINFOcrucible: RE:0 Wait for result from close command 1000:1
64952023-09-22T23:08:14.577ZINFOcrucible: Now move the NoOp job forward
64962023-09-22T23:08:14.577ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
64972023-09-22T23:08:14.577ZERROcrucible: [0] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
64982023-09-22T23:08:14.577ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
64992023-09-22T23:08:14.577ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
65002023-09-22T23:08:14.577ZINFOcrucible: [0] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Active LiveRepair Active ds_transition to Faulted
65012023-09-22T23:08:14.577ZINFOcrucible: [0] Transition from Active to Faulted
65022023-09-22T23:08:14.577ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
65032023-09-22T23:08:14.577ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
65042023-09-22T23:08:14.577ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
65052023-09-22T23:08:14.577ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
65062023-09-22T23:08:14.577ZINFOcrucible: [1] e5daa207-6492-4466-92d0-cc614d6c56ce (8a6b2194-d5ad-4cea-9c66-5ded3e31a721) Faulted LiveRepair Active ds_transition to Faulted
65072023-09-22T23:08:14.577ZINFOcrucible: [1] Transition from LiveRepair to Faulted
65082023-09-22T23:08:14.577ZWARNcrucible: RE:0 Bailing with error
65092023-09-22T23:08:14.578ZINFOcrucible: Crucible stats registered with UUID: d53fef88-17c8-441f-9cbe-e9f976239c5e
65102023-09-22T23:08:14.578ZINFOcrucible: Crucible d53fef88-17c8-441f-9cbe-e9f976239c5e has session id: 1717818e-3bcd-47d2-8b65-284c19fd759f
65112023-09-22T23:08:14.578ZINFOcrucible: [0] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) New New New ds_transition to WaitActive
65122023-09-22T23:08:14.578ZINFOcrucible: [0] Transition from New to WaitActive
65132023-09-22T23:08:14.578ZINFOcrucible: [0] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) WaitActive New New ds_transition to WaitQuorum
65142023-09-22T23:08:14.578ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
65152023-09-22T23:08:14.578ZINFOcrucible: [0] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) WaitQuorum New New ds_transition to Active
65162023-09-22T23:08:14.578ZINFOcrucible: [0] Transition from WaitQuorum to Active
65172023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active New New ds_transition to WaitActive
65182023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from New to WaitActive
65192023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active WaitActive New ds_transition to WaitQuorum
65202023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
65212023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active WaitQuorum New ds_transition to Active
65222023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from WaitQuorum to Active
65232023-09-22T23:08:14.578ZINFOcrucible: [2] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active Active New ds_transition to WaitActive
65242023-09-22T23:08:14.578ZINFOcrucible: [2] Transition from New to WaitActive
65252023-09-22T23:08:14.578ZINFOcrucible: [2] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active Active WaitActive ds_transition to WaitQuorum
65262023-09-22T23:08:14.578ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
65272023-09-22T23:08:14.578ZINFOcrucible: [2] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active Active WaitQuorum ds_transition to Active
65282023-09-22T23:08:14.578ZINFOcrucible: [2] Transition from WaitQuorum to Active
65292023-09-22T23:08:14.578ZINFOcrucible: d53fef88-17c8-441f-9cbe-e9f976239c5e is now active with session: 36f4bb72-2a32-45e5-94a6-6cee1f536fe0
65302023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active Active Active ds_transition to Faulted
65312023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from Active to Faulted
65322023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active Faulted Active ds_transition to LiveRepairReady
65332023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
65342023-09-22T23:08:14.578ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active LiveRepairReady Active ds_transition to LiveRepair
65352023-09-22T23:08:14.578ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
65362023-09-22T23:08:14.579ZINFOcrucible: Waiting for Close + ReOpen jobs
65372023-09-22T23:08:14.579ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
65382023-09-22T23:08:14.579ZINFOcrucible: RE:0 close id:1000 queued, notify DS
65392023-09-22T23:08:14.579ZINFOcrucible: RE:0 Wait for result from close command 1000:1
65402023-09-22T23:08:14.689ZINFOcrucible: Now move the NoOp job forward
65412023-09-22T23:08:14.689ZINFOcrucible: Now ACK the NoOp job
65422023-09-22T23:08:14.689ZINFOcrucible: Finally, move the ReOpen job forward
65432023-09-22T23:08:14.689ZINFOcrucible: Now ACK the Reopen job
65442023-09-22T23:08:14.690ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
65452023-09-22T23:08:14.690ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
65462023-09-22T23:08:14.690ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
65472023-09-22T23:08:14.690ZWARNcrucible: RE:0 Bailing with error
65482023-09-22T23:08:14.690ZINFOcrucible: Crucible stats registered with UUID: 678cfcb8-a5e0-4920-b486-0e81743672c3
65492023-09-22T23:08:14.690ZINFOcrucible: Crucible 678cfcb8-a5e0-4920-b486-0e81743672c3 has session id: 033e8bcc-deed-4cb7-8711-a570c88db5ff
65502023-09-22T23:08:14.690ZINFOcrucible: [0] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) New New New ds_transition to WaitActive
65512023-09-22T23:08:14.690ZINFOcrucible: [0] Transition from New to WaitActive
65522023-09-22T23:08:14.690ZINFOcrucible: [0] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) WaitActive New New ds_transition to WaitQuorum
65532023-09-22T23:08:14.690ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
65542023-09-22T23:08:14.690ZINFOcrucible: [0] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) WaitQuorum New New ds_transition to Active
65552023-09-22T23:08:14.690ZINFOcrucible: [0] Transition from WaitQuorum to Active
65562023-09-22T23:08:14.690ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active New New ds_transition to WaitActive
65572023-09-22T23:08:14.690ZINFOcrucible: [1] Transition from New to WaitActive
65582023-09-22T23:08:14.690ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active WaitActive New ds_transition to WaitQuorum
65592023-09-22T23:08:14.690ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
65602023-09-22T23:08:14.690ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active WaitQuorum New ds_transition to Active
65612023-09-22T23:08:14.690ZINFOcrucible: [1] Transition from WaitQuorum to Active
65622023-09-22T23:08:14.691ZINFOcrucible: [2] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active Active New ds_transition to WaitActive
65632023-09-22T23:08:14.691ZINFOcrucible: [2] Transition from New to WaitActive
65642023-09-22T23:08:14.691ZINFOcrucible: [2] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active Active WaitActive ds_transition to WaitQuorum
65652023-09-22T23:08:14.691ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
65662023-09-22T23:08:14.691ZINFOcrucible: [2] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active Active WaitQuorum ds_transition to Active
65672023-09-22T23:08:14.691ZINFOcrucible: [2] Transition from WaitQuorum to Active
65682023-09-22T23:08:14.691ZINFOcrucible: 678cfcb8-a5e0-4920-b486-0e81743672c3 is now active with session: d9d086f6-80b5-4805-9193-71fbf1abd250
65692023-09-22T23:08:14.691ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active Active Active ds_transition to Faulted
65702023-09-22T23:08:14.691ZINFOcrucible: [1] Transition from Active to Faulted
65712023-09-22T23:08:14.691ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active Faulted Active ds_transition to LiveRepairReady
65722023-09-22T23:08:14.691ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
65732023-09-22T23:08:14.691ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active LiveRepairReady Active ds_transition to LiveRepair
65742023-09-22T23:08:14.691ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
65752023-09-22T23:08:14.691ZINFOcrucible: Waiting for Close + ReOpen jobs
65762023-09-22T23:08:14.691ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
65772023-09-22T23:08:14.691ZINFOcrucible: RE:0 close id:1000 queued, notify DS
65782023-09-22T23:08:14.691ZINFOcrucible: RE:0 Wait for result from close command 1000:1
6579 test volume::test::test_parent_initialized_read_only_region_with_multiple_sub_volumes_1 ... ok
6580 ----------------------------------------------------------------
6581 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
6582 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
6583 1 Acked 1000 FClose 0 Done Done Done false
6584 2 Acked 1001 NoOp 0 Done Done Done false
6585 3 NotAcked 1002 NoOp 0 New New New false
6586 4 NotAcked 1003 Reopen 0 New New New false
6587 STATES DS:0 DS:1 DS:2 TOTAL
6588 New 2 2 2 6
6589 Sent 0 0 0 0
6590 Done 2 2 2 6
6591 Skipped 0 0 0 0
6592 Error 0 0 0 0
6593 Last Flush: 0 0 0
6594 Downstairs last five completed:
6595 Upstairs last five completed: 2 1
65962023-09-22T23:08:15.444ZINFOcrucible: Now move the NoOp job forward
65972023-09-22T23:08:15.444ZINFOcrucible: Finally, move the ReOpen job forward
65982023-09-22T23:08:15.444ZINFOcrucible: Now ACK the reopen job
65992023-09-22T23:08:15.444ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
66002023-09-22T23:08:15.444ZINFOcrucible: jobs are: 4
66012023-09-22T23:08:15.444ZINFOcrucible: Crucible stats registered with UUID: 65a3de01-2768-4988-b51a-36173a07e08d
66022023-09-22T23:08:15.444ZINFOcrucible: Crucible 65a3de01-2768-4988-b51a-36173a07e08d has session id: dbce7671-3ed0-4ad9-a0ef-2beabf4b973c
66032023-09-22T23:08:15.445ZINFOcrucible: [0] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) New New New ds_transition to WaitActive
66042023-09-22T23:08:15.445ZINFOcrucible: [0] Transition from New to WaitActive
66052023-09-22T23:08:15.445ZINFOcrucible: [0] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) WaitActive New New ds_transition to WaitQuorum
66062023-09-22T23:08:15.445ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
66072023-09-22T23:08:15.445ZINFOcrucible: [0] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) WaitQuorum New New ds_transition to Active
66082023-09-22T23:08:15.445ZINFOcrucible: [0] Transition from WaitQuorum to Active
66092023-09-22T23:08:15.445ZINFOcrucible: [1] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active New New ds_transition to WaitActive
66102023-09-22T23:08:15.445ZINFOcrucible: [1] Transition from New to WaitActive
66112023-09-22T23:08:15.445ZINFOcrucible: [1] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active WaitActive New ds_transition to WaitQuorum
66122023-09-22T23:08:15.445ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
66132023-09-22T23:08:15.445ZINFOcrucible: [1] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active WaitQuorum New ds_transition to Active
66142023-09-22T23:08:15.445ZINFOcrucible: [1] Transition from WaitQuorum to Active
66152023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active New ds_transition to WaitActive
66162023-09-22T23:08:15.445ZINFOcrucible: [2] Transition from New to WaitActive
66172023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active WaitActive ds_transition to WaitQuorum
66182023-09-22T23:08:15.445ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
66192023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active WaitQuorum ds_transition to Active
66202023-09-22T23:08:15.445ZINFOcrucible: [2] Transition from WaitQuorum to Active
66212023-09-22T23:08:15.445ZINFOcrucible: 65a3de01-2768-4988-b51a-36173a07e08d is now active with session: 178fe1c3-e441-4bd3-8a70-92b9df375450
66222023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active Active ds_transition to Faulted
66232023-09-22T23:08:15.445ZINFOcrucible: [2] Transition from Active to Faulted
66242023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active Faulted ds_transition to LiveRepairReady
66252023-09-22T23:08:15.445ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
66262023-09-22T23:08:15.445ZINFOcrucible: [2] 65a3de01-2768-4988-b51a-36173a07e08d (178fe1c3-e441-4bd3-8a70-92b9df375450) Active Active LiveRepairReady ds_transition to LiveRepair
66272023-09-22T23:08:15.446ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
66282023-09-22T23:08:15.446ZINFOcrucible: Waiting for Close + ReOpen jobs
66292023-09-22T23:08:15.446ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
66302023-09-22T23:08:15.446ZINFOcrucible: RE:0 close id:1000 queued, notify DS
66312023-09-22T23:08:15.446ZINFOcrucible: RE:0 Wait for result from close command 1000:1
66322023-09-22T23:08:15.573ZINFOcrucible: Now move the NoOp job forward
66332023-09-22T23:08:15.573ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66342023-09-22T23:08:15.573ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66352023-09-22T23:08:15.574ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
66362023-09-22T23:08:15.574ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
66372023-09-22T23:08:15.574ZINFOcrucible: [2] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) LiveRepair Active Active ds_transition to Faulted
66382023-09-22T23:08:15.574ZINFOcrucible: [2] Transition from Active to Faulted
66392023-09-22T23:08:15.574ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
66402023-09-22T23:08:15.574ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
66412023-09-22T23:08:15.574ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
66422023-09-22T23:08:15.574ZINFOcrucible: [0] ea9c2828-4faa-4745-9992-cc45fe6bf51b (62b807af-88b8-4100-a47a-2cd55774bf63) LiveRepair Active Faulted ds_transition to Faulted
66432023-09-22T23:08:15.574ZINFOcrucible: [0] Transition from LiveRepair to Faulted
66442023-09-22T23:08:15.574ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
66452023-09-22T23:08:15.574ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66462023-09-22T23:08:15.574ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
66472023-09-22T23:08:15.574ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
66482023-09-22T23:08:15.574ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
66492023-09-22T23:08:15.574ZINFOcrucible: [1] 4bf3db3d-50a4-4818-86ec-4e619b46b97c (bf820409-3b29-4b81-8028-3c3affe54370) Active LiveRepair Active ds_transition to Faulted
66502023-09-22T23:08:15.574ZINFOcrucible: [1] Transition from LiveRepair to Faulted
66512023-09-22T23:08:15.574ZINFOcrucible: Now ACK the close job
66522023-09-22T23:08:15.574ZINFOcrucible: Waiting for 3 jobs (currently 2)
66532023-09-22T23:08:15.574ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
66542023-09-22T23:08:15.574ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
66552023-09-22T23:08:15.578ZINFOcrucible: Waiting for 3 jobs (currently 2)
66562023-09-22T23:08:15.579ZINFOcrucible: No repair needed for extent 0 = downstairs
66572023-09-22T23:08:15.579ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
66582023-09-22T23:08:15.691ZINFOcrucible: Now ACK the close job
66592023-09-22T23:08:15.692ZINFOcrucible: Waiting for 3 jobs (currently 2)
66602023-09-22T23:08:15.692ZINFOcrucible: No repair needed for extent 0 = downstairs
66612023-09-22T23:08:15.692ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
6662 test volume::test::test_parent_initialized_read_only_region_with_multiple_sub_volumes_2 ... ok
6663 test volume::test::test_parent_uninitialized_read_only_region_one_subvolume ... ok
6664 test volume::test::test_parent_uninitialized_read_only_region_with_multiple_sub_volumes_1 ... ok
6665 test volume::test::test_parent_uninitialized_read_only_region_with_multiple_sub_volumes_2 ... ok
6666 test volume::test::test_read_only_parent_for_lba_range ... ok
6667 block 0 < scrub_point 0. Check with your parent
6668 block 1 < scrub_point 0. Check with your parent
6669 block 2 < scrub_point 0. Check with your parent
6670 block 3 < scrub_point 0. Check with your parent
6671 block 4 < scrub_point 0. Check with your parent
6672 block 5 < scrub_point 0. Check with your parent
6673 block 6 < scrub_point 0. Check with your parent
6674 block 7 < scrub_point 0. Check with your parent
6675 block 8 < scrub_point 0. Check with your parent
6676 block 9 < scrub_point 0. Check with your parent
6677 block 0+1 <= scrub_point 1. No parent check
6678 block 1 < scrub_point 1. Check with your parent
6679 block 2 < scrub_point 1. Check with your parent
6680 block 3 < scrub_point 1. Check with your parent
6681 block 4 < scrub_point 1. Check with your parent
6682 block 5 < scrub_point 1. Check with your parent
6683 block 6 < scrub_point 1. Check with your parent
6684 block 7 < scrub_point 1. Check with your parent
6685 block 8 < scrub_point 1. Check with your parent
6686 block 9 < scrub_point 1. Check with your parent
6687 block 0+1 <= scrub_point 2. No parent check
6688 block 1+1 <= scrub_point 2. No parent check
6689 block 2 < scrub_point 2. Check with your parent
6690 block 3 < scrub_point 2. Check with your parent
6691 block 4 < scrub_point 2. Check with your parent
6692 block 5 < scrub_point 2. Check with your parent
6693 block 6 < scrub_point 2. Check with your parent
6694 block 7 < scrub_point 2. Check with your parent
6695 block 8 < scrub_point 2. Check with your parent
6696 block 9 < scrub_point 2. Check with your parent
6697 block 0+1 <= scrub_point 3. No parent check
6698 block 1+1 <= scrub_point 3. No parent check
6699 block 2+1 <= scrub_point 3. No parent check
6700 block 3 < scrub_point 3. Check with your parent
6701 block 4 < scrub_point 3. Check with your parent
6702 block 5 < scrub_point 3. Check with your parent
6703 block 6 < scrub_point 3. Check with your parent
6704 block 7 < scrub_point 3. Check with your parent
6705 block 8 < scrub_point 3. Check with your parent
6706 block 9 < scrub_point 3. Check with your parent
6707 block 0+1 <= scrub_point 4. No parent check
6708 block 1+1 <= scrub_point 4. No parent check
6709 block 2+1 <= scrub_point 4. No parent check
6710 block 3+1 <= scrub_point 4. No parent check
6711 block 4 < scrub_point 4. Check with your parent
6712 block 5 < scrub_point 4. Check with your parent
6713 block 6 < scrub_point 4. Check with your parent
6714 block 7 < scrub_point 4. Check with your parent
6715 block 8 < scrub_point 4. Check with your parent
6716 block 9 < scrub_point 4. Check with your parent
6717 block 0+1 <= scrub_point 5. No parent check
6718 block 1+1 <= scrub_point 5. No parent check
6719 block 2+1 <= scrub_point 5. No parent check
6720 block 3+1 <= scrub_point 5. No parent check
6721 block 4+1 <= scrub_point 5. No parent check
6722 block 5 < scrub_point 5. Check with your parent
6723 block 6 < scrub_point 5. Check with your parent
6724 block 7 < scrub_point 5. Check with your parent
6725 block 8 < scrub_point 5. Check with your parent
6726 block 9 < scrub_point 5. Check with your parent
6727 block 0+1 <= scrub_point 6. No parent check
6728 block 1+1 <= scrub_point 6. No parent check
6729 block 2+1 <= scrub_point 6. No parent check
6730 block 3+1 <= scrub_point 6. No parent check
6731 block 4+1 <= scrub_point 6. No parent check
6732 block 5+1 <= scrub_point 6. No parent check
6733 block 6 < scrub_point 6. Check with your parent
6734 block 7 < scrub_point 6. Check with your parent
6735 block 8 < scrub_point 6. Check with your parent
6736 block 9 < scrub_point 6. Check with your parent
6737 block 0+1 <= scrub_point 7. No parent check
6738 block 1+1 <= scrub_point 7. No parent check
6739 block 2+1 <= scrub_point 7. No parent check
6740 block 3+1 <= scrub_point 7. No parent check
6741 block 4+1 <= scrub_point 7. No parent check
6742 block 5+1 <= scrub_point 7. No parent check
6743 block 6+1 <= scrub_point 7. No parent check
6744 block 7 < scrub_point 7. Check with your parent
6745 block 8 < scrub_point 7. Check with your parent
6746 block 9 < scrub_point 7. Check with your parent
6747 block 0+1 <= scrub_point 8. No parent check
6748 block 1+1 <= scrub_point 8. No parent check
6749 block 2+1 <= scrub_point 8. No parent check
6750 block 3+1 <= scrub_point 8. No parent check
6751 block 4+1 <= scrub_point 8. No parent check
6752 block 5+1 <= scrub_point 8. No parent check
6753 block 6+1 <= scrub_point 8. No parent check
6754 block 7+1 <= scrub_point 8. No parent check
6755 block 8 < scrub_point 8. Check with your parent
6756 block 9 < scrub_point 8. Check with your parent
6757 block 0+1 <= scrub_point 9. No parent check
6758 block 1+1 <= scrub_point 9. No parent check
6759 block 2+1 <= scrub_point 9. No parent check
6760 block 3+1 <= scrub_point 9. No parent check
6761 block 4+1 <= scrub_point 9. No parent check
6762 block 5+1 <= scrub_point 9. No parent check
6763 block 6+1 <= scrub_point 9. No parent check
6764 block 7+1 <= scrub_point 9. No parent check
6765 block 8+1 <= scrub_point 9. No parent check
6766 block 9 < scrub_point 9. Check with your parent
6767 block 0+1 <= scrub_point 10. No parent check
6768 block 1+1 <= scrub_point 10. No parent check
6769 block 2+1 <= scrub_point 10. No parent check
6770 block 3+1 <= scrub_point 10. No parent check
6771 block 4+1 <= scrub_point 10. No parent check
6772 block 5+1 <= scrub_point 10. No parent check
6773 block 6+1 <= scrub_point 10. No parent check
6774 block 7+1 <= scrub_point 10. No parent check
6775 block 8+1 <= scrub_point 10. No parent check
6776 block 9+1 <= scrub_point 10. No parent check
6777 block 0 < scrub_point 0. Check with your parent
6778 block 1 < scrub_point 0. Check with your parent
6779 block 2 < scrub_point 0. Check with your parent
6780 block 3 < scrub_point 0. Check with your parent
6781 block 4 < scrub_point 0. Check with your parent
6782 block 5 < scrub_point 0. Check with your parent
6783 block 6 < scrub_point 0. Check with your parent
6784 block 7 < scrub_point 0. Check with your parent
6785 block 8 < scrub_point 0. Check with your parent
6786 block 0 < scrub_point 1. Check with your parent
6787 block 1 < scrub_point 1. Check with your parent
6788 block 2 < scrub_point 1. Check with your parent
6789 block 3 < scrub_point 1. Check with your parent
6790 block 4 < scrub_point 1. Check with your parent
6791 block 5 < scrub_point 1. Check with your parent
6792 block 6 < scrub_point 1. Check with your parent
6793 block 7 < scrub_point 1. Check with your parent
6794 block 8 < scrub_point 1. Check with your parent
6795 block 0+2 <= scrub_point 2. No parent check
6796 block 1 < scrub_point 2. Check with your parent
6797 block 2 < scrub_point 2. Check with your parent
6798 block 3 < scrub_point 2. Check with your parent
6799 block 4 < scrub_point 2. Check with your parent
6800 block 5 < scrub_point 2. Check with your parent
6801 block 6 < scrub_point 2. Check with your parent
6802 block 7 < scrub_point 2. Check with your parent
6803 block 8 < scrub_point 2. Check with your parent
6804 block 0+2 <= scrub_point 3. No parent check
6805 block 1+2 <= scrub_point 3. No parent check
6806 block 2 < scrub_point 3. Check with your parent
6807 block 3 < scrub_point 3. Check with your parent
6808 block 4 < scrub_point 3. Check with your parent
6809 block 5 < scrub_point 3. Check with your parent
6810 block 6 < scrub_point 3. Check with your parent
6811 block 7 < scrub_point 3. Check with your parent
6812 block 8 < scrub_point 3. Check with your parent
6813 block 0+2 <= scrub_point 4. No parent check
6814 block 1+2 <= scrub_point 4. No parent check
6815 block 2+2 <= scrub_point 4. No parent check
6816 block 3 < scrub_point 4. Check with your parent
6817 block 4 < scrub_point 4. Check with your parent
6818 block 5 < scrub_point 4. Check with your parent
6819 block 6 < scrub_point 4. Check with your parent
6820 block 7 < scrub_point 4. Check with your parent
6821 block 8 < scrub_point 4. Check with your parent
6822 block 0+2 <= scrub_point 5. No parent check
6823 block 1+2 <= scrub_point 5. No parent check
6824 block 2+2 <= scrub_point 5. No parent check
6825 block 3+2 <= scrub_point 5. No parent check
6826 block 4 < scrub_point 5. Check with your parent
6827 block 5 < scrub_point 5. Check with your parent
6828 block 6 < scrub_point 5. Check with your parent
6829 block 7 < scrub_point 5. Check with your parent
6830 block 8 < scrub_point 5. Check with your parent
6831 block 0+2 <= scrub_point 6. No parent check
6832 block 1+2 <= scrub_point 6. No parent check
6833 block 2+2 <= scrub_point 6. No parent check
6834 block 3+2 <= scrub_point 6. No parent check
6835 block 4+2 <= scrub_point 6. No parent check
6836 block 5 < scrub_point 6. Check with your parent
6837 block 6 < scrub_point 6. Check with your parent
6838 block 7 < scrub_point 6. Check with your parent
6839 block 8 < scrub_point 6. Check with your parent
6840 block 0+2 <= scrub_point 7. No parent check
6841 block 1+2 <= scrub_point 7. No parent check
6842 block 2+2 <= scrub_point 7. No parent check
6843 block 3+2 <= scrub_point 7. No parent check
6844 block 4+2 <= scrub_point 7. No parent check
6845 block 5+2 <= scrub_point 7. No parent check
6846 block 6 < scrub_point 7. Check with your parent
6847 block 7 < scrub_point 7. Check with your parent
6848 block 8 < scrub_point 7. Check with your parent
6849 block 0+2 <= scrub_point 8. No parent check
6850 block 1+2 <= scrub_point 8. No parent check
6851 block 2+2 <= scrub_point 8. No parent check
6852 block 3+2 <= scrub_point 8. No parent check
6853 block 4+2 <= scrub_point 8. No parent check
6854 block 5+2 <= scrub_point 8. No parent check
6855 block 6+2 <= scrub_point 8. No parent check
6856 block 7 < scrub_point 8. Check with your parent
6857 block 8 < scrub_point 8. Check with your parent
6858 block 0+2 <= scrub_point 9. No parent check
6859 block 1+2 <= scrub_point 9. No parent check
6860 block 2+2 <= scrub_point 9. No parent check
6861 block 3+2 <= scrub_point 9. No parent check
6862 block 4+2 <= scrub_point 9. No parent check
6863 block 5+2 <= scrub_point 9. No parent check
6864 block 6+2 <= scrub_point 9. No parent check
6865 block 7+2 <= scrub_point 9. No parent check
6866 block 8 < scrub_point 9. Check with your parent
6867 block 0+2 <= scrub_point 10. No parent check
6868 block 1+2 <= scrub_point 10. No parent check
6869 block 2+2 <= scrub_point 10. No parent check
6870 block 3+2 <= scrub_point 10. No parent check
6871 block 4+2 <= scrub_point 10. No parent check
6872 block 5+2 <= scrub_point 10. No parent check
6873 block 6+2 <= scrub_point 10. No parent check
6874 block 7+2 <= scrub_point 10. No parent check
6875 block 8+2 <= scrub_point 10. No parent check
6876 block 0 < scrub_point 0. Check with your parent
6877 block 1 < scrub_point 0. Check with your parent
6878 block 2 < scrub_point 0. Check with your parent
6879 block 3 < scrub_point 0. Check with your parent
6880 block 4 < scrub_point 0. Check with your parent
6881 block 5 < scrub_point 0. Check with your parent
6882 block 6 < scrub_point 0. Check with your parent
6883 block 7 < scrub_point 0. Check with your parent
6884 block 0 < scrub_point 1. Check with your parent
6885 block 1 < scrub_point 1. Check with your parent
6886 block 2 < scrub_point 1. Check with your parent
6887 block 3 < scrub_point 1. Check with your parent
6888 block 4 < scrub_point 1. Check with your parent
6889 block 5 < scrub_point 1. Check with your parent
6890 block 6 < scrub_point 1. Check with your parent
6891 block 7 < scrub_point 1. Check with your parent
6892 block 0 < scrub_point 2. Check with your parent
6893 block 1 < scrub_point 2. Check with your parent
6894 block 2 < scrub_point 2. Check with your parent
6895 block 3 < scrub_point 2. Check with your parent
6896 block 4 < scrub_point 2. Check with your parent
6897 block 5 < scrub_point 2. Check with your parent
6898 block 6 < scrub_point 2. Check with your parent
6899 block 7 < scrub_point 2. Check with your parent
6900 block 0+3 <= scrub_point 3. No parent check
6901 block 1 < scrub_point 3. Check with your parent
6902 block 2 < scrub_point 3. Check with your parent
6903 block 3 < scrub_point 3. Check with your parent
6904 block 4 < scrub_point 3. Check with your parent
6905 block 5 < scrub_point 3. Check with your parent
6906 block 6 < scrub_point 3. Check with your parent
6907 block 7 < scrub_point 3. Check with your parent
6908 block 0+3 <= scrub_point 4. No parent check
6909 block 1+3 <= scrub_point 4. No parent check
6910 block 2 < scrub_point 4. Check with your parent
6911 block 3 < scrub_point 4. Check with your parent
6912 block 4 < scrub_point 4. Check with your parent
6913 block 5 < scrub_point 4. Check with your parent
6914 block 6 < scrub_point 4. Check with your parent
6915 block 7 < scrub_point 4. Check with your parent
6916 block 0+3 <= scrub_point 5. No parent check
6917 block 1+3 <= scrub_point 5. No parent check
6918 block 2+3 <= scrub_point 5. No parent check
6919 block 3 < scrub_point 5. Check with your parent
6920 block 4 < scrub_point 5. Check with your parent
6921 block 5 < scrub_point 5. Check with your parent
6922 block 6 < scrub_point 5. Check with your parent
6923 block 7 < scrub_point 5. Check with your parent
6924 block 0+3 <= scrub_point 6. No parent check
6925 block 1+3 <= scrub_point 6. No parent check
6926 block 2+3 <= scrub_point 6. No parent check
6927 block 3+3 <= scrub_point 6. No parent check
6928 block 4 < scrub_point 6. Check with your parent
6929 block 5 < scrub_point 6. Check with your parent
6930 block 6 < scrub_point 6. Check with your parent
6931 block 7 < scrub_point 6. Check with your parent
6932 block 0+3 <= scrub_point 7. No parent check
6933 block 1+3 <= scrub_point 7. No parent check
6934 block 2+3 <= scrub_point 7. No parent check
6935 block 3+3 <= scrub_point 7. No parent check
6936 block 4+3 <= scrub_point 7. No parent check
6937 block 5 < scrub_point 7. Check with your parent
6938 block 6 < scrub_point 7. Check with your parent
6939 block 7 < scrub_point 7. Check with your parent
6940 block 0+3 <= scrub_point 8. No parent check
6941 block 1+3 <= scrub_point 8. No parent check
6942 block 2+3 <= scrub_point 8. No parent check
6943 block 3+3 <= scrub_point 8. No parent check
6944 block 4+3 <= scrub_point 8. No parent check
6945 block 5+3 <= scrub_point 8. No parent check
6946 block 6 < scrub_point 8. Check with your parent
6947 block 7 < scrub_point 8. Check with your parent
6948 block 0+3 <= scrub_point 9. No parent check
6949 block 1+3 <= scrub_point 9. No parent check
6950 block 2+3 <= scrub_point 9. No parent check
6951 block 3+3 <= scrub_point 9. No parent check
6952 block 4+3 <= scrub_point 9. No parent check
6953 block 5+3 <= scrub_point 9. No parent check
6954 block 6+3 <= scrub_point 9. No parent check
6955 block 7 < scrub_point 9. Check with your parent
6956 block 0+3 <= scrub_point 10. No parent check
6957 block 1+3 <= scrub_point 10. No parent check
6958 block 2+3 <= scrub_point 10. No parent check
6959 block 3+3 <= scrub_point 10. No parent check
6960 block 4+3 <= scrub_point 10. No parent check
6961 block 5+3 <= scrub_point 10. No parent check
6962 block 6+3 <= scrub_point 10. No parent check
6963 block 7+3 <= scrub_point 10. No parent check
6964 block 0 < scrub_point 0. Check with your parent
6965 block 1 < scrub_point 0. Check with your parent
6966 block 2 < scrub_point 0. Check with your parent
6967 block 3 < scrub_point 0. Check with your parent
6968 block 4 < scrub_point 0. Check with your parent
6969 block 5 < scrub_point 0. Check with your parent
6970 block 6 < scrub_point 0. Check with your parent
6971 block 0 < scrub_point 1. Check with your parent
6972 block 1 < scrub_point 1. Check with your parent
6973 block 2 < scrub_point 1. Check with your parent
6974 block 3 < scrub_point 1. Check with your parent
6975 block 4 < scrub_point 1. Check with your parent
6976 block 5 < scrub_point 1. Check with your parent
6977 block 6 < scrub_point 1. Check with your parent
6978 block 0 < scrub_point 2. Check with your parent
6979 block 1 < scrub_point 2. Check with your parent
6980 block 2 < scrub_point 2. Check with your parent
6981 block 3 < scrub_point 2. Check with your parent
6982 block 4 < scrub_point 2. Check with your parent
6983 block 5 < scrub_point 2. Check with your parent
6984 block 6 < scrub_point 2. Check with your parent
6985 block 0 < scrub_point 3. Check with your parent
6986 block 1 < scrub_point 3. Check with your parent
6987 block 2 < scrub_point 3. Check with your parent
6988 block 3 < scrub_point 3. Check with your parent
6989 block 4 < scrub_point 3. Check with your parent
6990 block 5 < scrub_point 3. Check with your parent
6991 block 6 < scrub_point 3. Check with your parent
6992 block 0+4 <= scrub_point 4. No parent check
6993 block 1 < scrub_point 4. Check with your parent
6994 block 2 < scrub_point 4. Check with your parent
6995 block 3 < scrub_point 4. Check with your parent
6996 block 4 < scrub_point 4. Check with your parent
6997 block 5 < scrub_point 4. Check with your parent
6998 block 6 < scrub_point 4. Check with your parent
6999 block 0+4 <= scrub_point 5. No parent check
7000 block 1+4 <= scrub_point 5. No parent check
7001 block 2 < scrub_point 5. Check with your parent
7002 block 3 < scrub_point 5. Check with your parent
7003 block 4 < scrub_point 5. Check with your parent
7004 block 5 < scrub_point 5. Check with your parent
7005 block 6 < scrub_point 5. Check with your parent
7006 block 0+4 <= scrub_point 6. No parent check
7007 block 1+4 <= scrub_point 6. No parent check
7008 block 2+4 <= scrub_point 6. No parent check
7009 block 3 < scrub_point 6. Check with your parent
7010 block 4 < scrub_point 6. Check with your parent
7011 block 5 < scrub_point 6. Check with your parent
7012 block 6 < scrub_point 6. Check with your parent
7013 block 0+4 <= scrub_point 7. No parent check
7014 block 1+4 <= scrub_point 7. No parent check
7015 block 2+4 <= scrub_point 7. No parent check
7016 block 3+4 <= scrub_point 7. No parent check
7017 block 4 < scrub_point 7. Check with your parent
7018 block 5 < scrub_point 7. Check with your parent
7019 block 6 < scrub_point 7. Check with your parent
7020 block 0+4 <= scrub_point 8. No parent check
7021 block 1+4 <= scrub_point 8. No parent check
7022 block 2+4 <= scrub_point 8. No parent check
7023 block 3+4 <= scrub_point 8. No parent check
7024 block 4+4 <= scrub_point 8. No parent check
7025 block 5 < scrub_point 8. Check with your parent
7026 block 6 < scrub_point 8. Check with your parent
7027 block 0+4 <= scrub_point 9. No parent check
7028 block 1+4 <= scrub_point 9. No parent check
7029 block 2+4 <= scrub_point 9. No parent check
7030 block 3+4 <= scrub_point 9. No parent check
7031 block 4+4 <= scrub_point 9. No parent check
7032 block 5+4 <= scrub_point 9. No parent check
7033 block 6 < scrub_point 9. Check with your parent
7034 block 0+4 <= scrub_point 10. No parent check
7035 block 1+4 <= scrub_point 10. No parent check
7036 block 2+4 <= scrub_point 10. No parent check
7037 block 3+4 <= scrub_point 10. No parent check
7038 block 4+4 <= scrub_point 10. No parent check
7039 block 5+4 <= scrub_point 10. No parent check
7040 block 6+4 <= scrub_point 10. No parent check
7041 block 0 < scrub_point 0. Check with your parent
7042 block 1 < scrub_point 0. Check with your parent
7043 block 2 < scrub_point 0. Check with your parent
7044 block 3 < scrub_point 0. Check with your parent
7045 block 4 < scrub_point 0. Check with your parent
7046 block 5 < scrub_point 0. Check with your parent
7047 block 0 < scrub_point 1. Check with your parent
7048 block 1 < scrub_point 1. Check with your parent
7049 block 2 < scrub_point 1. Check with your parent
7050 block 3 < scrub_point 1. Check with your parent
7051 block 4 < scrub_point 1. Check with your parent
7052 block 5 < scrub_point 1. Check with your parent
7053 block 0 < scrub_point 2. Check with your parent
7054 block 1 < scrub_point 2. Check with your parent
7055 block 2 < scrub_point 2. Check with your parent
7056 block 3 < scrub_point 2. Check with your parent
7057 block 4 < scrub_point 2. Check with your parent
7058 block 5 < scrub_point 2. Check with your parent
7059 block 0 < scrub_point 3. Check with your parent
7060 block 1 < scrub_point 3. Check with your parent
7061 block 2 < scrub_point 3. Check with your parent
7062 block 3 < scrub_point 3. Check with your parent
7063 block 4 < scrub_point 3. Check with your parent
7064 block 5 < scrub_point 3. Check with your parent
7065 block 0 < scrub_point 4. Check with your parent
7066 block 1 < scrub_point 4. Check with your parent
7067 block 2 < scrub_point 4. Check with your parent
7068 block 3 < scrub_point 4. Check with your parent
7069 block 4 < scrub_point 4. Check with your parent
7070 block 5 < scrub_point 4. Check with your parent
7071 block 0+5 <= scrub_point 5. No parent check
7072 block 1 < scrub_point 5. Check with your parent
7073 block 2 < scrub_point 5. Check with your parent
7074 block 3 < scrub_point 5. Check with your parent
7075 block 4 < scrub_point 5. Check with your parent
7076 block 5 < scrub_point 5. Check with your parent
7077 block 0+5 <= scrub_point 6. No parent check
7078 block 1+5 <= scrub_point 6. No parent check
7079 block 2 < scrub_point 6. Check with your parent
7080 block 3 < scrub_point 6. Check with your parent
7081 block 4 < scrub_point 6. Check with your parent
7082 block 5 < scrub_point 6. Check with your parent
7083 block 0+5 <= scrub_point 7. No parent check
7084 block 1+5 <= scrub_point 7. No parent check
7085 block 2+5 <= scrub_point 7. No parent check
7086 block 3 < scrub_point 7. Check with your parent
7087 block 4 < scrub_point 7. Check with your parent
7088 block 5 < scrub_point 7. Check with your parent
7089 block 0+5 <= scrub_point 8. No parent check
7090 block 1+5 <= scrub_point 8. No parent check
7091 block 2+5 <= scrub_point 8. No parent check
7092 block 3+5 <= scrub_point 8. No parent check
7093 block 4 < scrub_point 8. Check with your parent
7094 block 5 < scrub_point 8. Check with your parent
7095 block 0+5 <= scrub_point 9. No parent check
7096 block 1+5 <= scrub_point 9. No parent check
7097 block 2+5 <= scrub_point 9. No parent check
7098 block 3+5 <= scrub_point 9. No parent check
7099 block 4+5 <= scrub_point 9. No parent check
7100 block 5 < scrub_point 9. Check with your parent
7101 block 0+5 <= scrub_point 10. No parent check
7102 block 1+5 <= scrub_point 10. No parent check
7103 block 2+5 <= scrub_point 10. No parent check
7104 block 3+5 <= scrub_point 10. No parent check
7105 block 4+5 <= scrub_point 10. No parent check
7106 block 5+5 <= scrub_point 10. No parent check
7107 block 0 < scrub_point 0. Check with your parent
7108 block 1 < scrub_point 0. Check with your parent
7109 block 2 < scrub_point 0. Check with your parent
7110 block 3 < scrub_point 0. Check with your parent
7111 block 4 < scrub_point 0. Check with your parent
7112 block 0 < scrub_point 1. Check with your parent
7113 block 1 < scrub_point 1. Check with your parent
7114 block 2 < scrub_point 1. Check with your parent
7115 block 3 < scrub_point 1. Check with your parent
7116 block 4 < scrub_point 1. Check with your parent
7117 block 0 < scrub_point 2. Check with your parent
7118 block 1 < scrub_point 2. Check with your parent
7119 block 2 < scrub_point 2. Check with your parent
7120 block 3 < scrub_point 2. Check with your parent
7121 block 4 < scrub_point 2. Check with your parent
7122 block 0 < scrub_point 3. Check with your parent
7123 block 1 < scrub_point 3. Check with your parent
7124 block 2 < scrub_point 3. Check with your parent
7125 block 3 < scrub_point 3. Check with your parent
7126 block 4 < scrub_point 3. Check with your parent
7127 block 0 < scrub_point 4. Check with your parent
7128 block 1 < scrub_point 4. Check with your parent
7129 block 2 < scrub_point 4. Check with your parent
7130 block 3 < scrub_point 4. Check with your parent
7131 block 4 < scrub_point 4. Check with your parent
7132 block 0 < scrub_point 5. Check with your parent
7133 block 1 < scrub_point 5. Check with your parent
7134 block 2 < scrub_point 5. Check with your parent
7135 block 3 < scrub_point 5. Check with your parent
7136 block 4 < scrub_point 5. Check with your parent
7137 block 0+6 <= scrub_point 6. No parent check
7138 block 1 < scrub_point 6. Check with your parent
7139 block 2 < scrub_point 6. Check with your parent
7140 block 3 < scrub_point 6. Check with your parent
7141 block 4 < scrub_point 6. Check with your parent
7142 block 0+6 <= scrub_point 7. No parent check
7143 block 1+6 <= scrub_point 7. No parent check
7144 block 2 < scrub_point 7. Check with your parent
7145 block 3 < scrub_point 7. Check with your parent
7146 block 4 < scrub_point 7. Check with your parent
7147 block 0+6 <= scrub_point 8. No parent check
7148 block 1+6 <= scrub_point 8. No parent check
7149 block 2+6 <= scrub_point 8. No parent check
7150 block 3 < scrub_point 8. Check with your parent
7151 block 4 < scrub_point 8. Check with your parent
7152 block 0+6 <= scrub_point 9. No parent check
7153 block 1+6 <= scrub_point 9. No parent check
7154 block 2+6 <= scrub_point 9. No parent check
7155 block 3+6 <= scrub_point 9. No parent check
7156 block 4 < scrub_point 9. Check with your parent
7157 block 0+6 <= scrub_point 10. No parent check
7158 block 1+6 <= scrub_point 10. No parent check
7159 block 2+6 <= scrub_point 10. No parent check
7160 block 3+6 <= scrub_point 10. No parent check
7161 block 4+6 <= scrub_point 10. No parent check
7162 block 0 < scrub_point 0. Check with your parent
7163 block 1 < scrub_point 0. Check with your parent
7164 block 2 < scrub_point 0. Check with your parent
7165 block 3 < scrub_point 0. Check with your parent
7166 block 0 < scrub_point 1. Check with your parent
7167 block 1 < scrub_point 1. Check with your parent
7168 block 2 < scrub_point 1. Check with your parent
7169 block 3 < scrub_point 1. Check with your parent
7170 block 0 < scrub_point 2. Check with your parent
7171 block 1 < scrub_point 2. Check with your parent
7172 block 2 < scrub_point 2. Check with your parent
7173 block 3 < scrub_point 2. Check with your parent
7174 block 0 < scrub_point 3. Check with your parent
7175 block 1 < scrub_point 3. Check with your parent
7176 block 2 < scrub_point 3. Check with your parent
7177 block 3 < scrub_point 3. Check with your parent
7178 block 0 < scrub_point 4. Check with your parent
7179 block 1 < scrub_point 4. Check with your parent
7180 block 2 < scrub_point 4. Check with your parent
7181 block 3 < scrub_point 4. Check with your parent
7182 block 0 < scrub_point 5. Check with your parent
7183 block 1 < scrub_point 5. Check with your parent
7184 block 2 < scrub_point 5. Check with your parent
7185 block 3 < scrub_point 5. Check with your parent
7186 block 0 < scrub_point 6. Check with your parent
7187 block 1 < scrub_point 6. Check with your parent
7188 block 2 < scrub_point 6. Check with your parent
7189 block 3 < scrub_point 6. Check with your parent
7190 block 0+7 <= scrub_point 7. No parent check
7191 block 1 < scrub_point 7. Check with your parent
7192 block 2 < scrub_point 7. Check with your parent
7193 block 3 < scrub_point 7. Check with your parent
7194 block 0+7 <= scrub_point 8. No parent check
7195 block 1+7 <= scrub_point 8. No parent check
7196 block 2 < scrub_point 8. Check with your parent
7197 block 3 < scrub_point 8. Check with your parent
7198 block 0+7 <= scrub_point 9. No parent check
7199 block 1+7 <= scrub_point 9. No parent check
7200 block 2+7 <= scrub_point 9. No parent check
7201 block 3 < scrub_point 9. Check with your parent
7202 block 0+7 <= scrub_point 10. No parent check
7203 block 1+7 <= scrub_point 10. No parent check
7204 block 2+7 <= scrub_point 10. No parent check
7205 block 3+7 <= scrub_point 10. No parent check
7206 block 0 < scrub_point 0. Check with your parent
7207 block 1 < scrub_point 0. Check with your parent
7208 block 2 < scrub_point 0. Check with your parent
7209 block 0 < scrub_point 1. Check with your parent
7210 block 1 < scrub_point 1. Check with your parent
7211 block 2 < scrub_point 1. Check with your parent
7212 block 0 < scrub_point 2. Check with your parent
7213 block 1 < scrub_point 2. Check with your parent
7214 block 2 < scrub_point 2. Check with your parent
7215 block 0 < scrub_point 3. Check with your parent
7216 block 1 < scrub_point 3. Check with your parent
7217 block 2 < scrub_point 3. Check with your parent
7218 block 0 < scrub_point 4. Check with your parent
7219 block 1 < scrub_point 4. Check with your parent
7220 block 2 < scrub_point 4. Check with your parent
7221 block 0 < scrub_point 5. Check with your parent
7222 block 1 < scrub_point 5. Check with your parent
7223 block 2 < scrub_point 5. Check with your parent
7224 block 0 < scrub_point 6. Check with your parent
7225 block 1 < scrub_point 6. Check with your parent
7226 block 2 < scrub_point 6. Check with your parent
7227 block 0 < scrub_point 7. Check with your parent
7228 block 1 < scrub_point 7. Check with your parent
7229 block 2 < scrub_point 7. Check with your parent
7230 block 0+8 <= scrub_point 8. No parent check
7231 block 1 < scrub_point 8. Check with your parent
7232 block 2 < scrub_point 8. Check with your parent
7233 block 0+8 <= scrub_point 9. No parent check
7234 block 1+8 <= scrub_point 9. No parent check
7235 block 2 < scrub_point 9. Check with your parent
7236 block 0+8 <= scrub_point 10. No parent check
7237 block 1+8 <= scrub_point 10. No parent check
7238 block 2+8 <= scrub_point 10. No parent check
7239 block 0 < scrub_point 0. Check with your parent
7240 block 1 < scrub_point 0. Check with your parent
7241 block 0 < scrub_point 1. Check with your parent
7242 block 1 < scrub_point 1. Check with your parent
7243 block 0 < scrub_point 2. Check with your parent
7244 block 1 < scrub_point 2. Check with your parent
7245 block 0 < scrub_point 3. Check with your parent
7246 block 1 < scrub_point 3. Check with your parent
7247 block 0 < scrub_point 4. Check with your parent
7248 block 1 < scrub_point 4. Check with your parent
7249 block 0 < scrub_point 5. Check with your parent
7250 block 1 < scrub_point 5. Check with your parent
7251 block 0 < scrub_point 6. Check with your parent
7252 block 1 < scrub_point 6. Check with your parent
7253 block 0 < scrub_point 7. Check with your parent
7254 block 1 < scrub_point 7. Check with your parent
7255 block 0 < scrub_point 8. Check with your parent
7256 block 1 < scrub_point 8. Check with your parent
7257 block 0+9 <= scrub_point 9. No parent check
7258 block 1 < scrub_point 9. Check with your parent
7259 block 0+9 <= scrub_point 10. No parent check
7260 block 1+9 <= scrub_point 10. No parent check
7261 test volume::test::test_scrub_point_subvolume_equal ... ok
7262 block 0 < scrub_point 0. Check with your parent
7263 block 1 < scrub_point 0. Check with your parent
7264 block 2 < scrub_point 0. Check with your parent
7265 block 3 < scrub_point 0. Check with your parent
7266 block 4 < scrub_point 0. Check with your parent
7267 block 5 > parent 5. Go to SubVolume
7268 block 6 > parent 5. Go to SubVolume
7269 block 7 > parent 5. Go to SubVolume
7270 block 8 > parent 5. Go to SubVolume
7271 block 9 > parent 5. Go to SubVolume
7272 block 0+1 <= scrub_point 1. No parent check
7273 block 1 < scrub_point 1. Check with your parent
7274 block 2 < scrub_point 1. Check with your parent
7275 block 3 < scrub_point 1. Check with your parent
7276 block 4 < scrub_point 1. Check with your parent
7277 block 5 > parent 5. Go to SubVolume
7278 block 6 > parent 5. Go to SubVolume
7279 block 7 > parent 5. Go to SubVolume
7280 block 8 > parent 5. Go to SubVolume
7281 block 9 > parent 5. Go to SubVolume
7282 block 0+1 <= scrub_point 2. No parent check
7283 block 1+1 <= scrub_point 2. No parent check
7284 block 2 < scrub_point 2. Check with your parent
7285 block 3 < scrub_point 2. Check with your parent
7286 block 4 < scrub_point 2. Check with your parent
7287 block 5 > parent 5. Go to SubVolume
7288 block 6 > parent 5. Go to SubVolume
7289 block 7 > parent 5. Go to SubVolume
7290 block 8 > parent 5. Go to SubVolume
7291 block 9 > parent 5. Go to SubVolume
7292 block 0+1 <= scrub_point 3. No parent check
7293 block 1+1 <= scrub_point 3. No parent check
7294 block 2+1 <= scrub_point 3. No parent check
7295 block 3 < scrub_point 3. Check with your parent
7296 block 4 < scrub_point 3. Check with your parent
7297 block 5 > parent 5. Go to SubVolume
7298 block 6 > parent 5. Go to SubVolume
7299 block 7 > parent 5. Go to SubVolume
7300 block 8 > parent 5. Go to SubVolume
7301 block 9 > parent 5. Go to SubVolume
7302 block 0+1 <= scrub_point 4. No parent check
7303 block 1+1 <= scrub_point 4. No parent check
7304 block 2+1 <= scrub_point 4. No parent check
7305 block 3+1 <= scrub_point 4. No parent check
7306 block 4 < scrub_point 4. Check with your parent
7307 block 5 > parent 5. Go to SubVolume
7308 block 6 > parent 5. Go to SubVolume
7309 block 7 > parent 5. Go to SubVolume
7310 block 8 > parent 5. Go to SubVolume
7311 block 9 > parent 5. Go to SubVolume
7312 block 0+1 <= scrub_point 5. No parent check
7313 block 1+1 <= scrub_point 5. No parent check
7314 block 2+1 <= scrub_point 5. No parent check
7315 block 3+1 <= scrub_point 5. No parent check
7316 block 4+1 <= scrub_point 5. No parent check
7317 block 5 > parent 5. Go to SubVolume
7318 block 6 > parent 5. Go to SubVolume
7319 block 7 > parent 5. Go to SubVolume
7320 block 8 > parent 5. Go to SubVolume
7321 block 9 > parent 5. Go to SubVolume
7322 block 0 < scrub_point 0. Check with your parent
7323 block 1 < scrub_point 0. Check with your parent
7324 block 2 < scrub_point 0. Check with your parent
7325 block 3 < scrub_point 0. Check with your parent
7326 block 4 < scrub_point 0. Check with your parent
7327 block 5 > parent 5. Go to SubVolume
7328 block 6 > parent 5. Go to SubVolume
7329 block 7 > parent 5. Go to SubVolume
7330 block 8 > parent 5. Go to SubVolume
7331 block 0 < scrub_point 1. Check with your parent
7332 block 1 < scrub_point 1. Check with your parent
7333 block 2 < scrub_point 1. Check with your parent
7334 block 3 < scrub_point 1. Check with your parent
7335 block 4 < scrub_point 1. Check with your parent
7336 block 5 > parent 5. Go to SubVolume
7337 block 6 > parent 5. Go to SubVolume
7338 block 7 > parent 5. Go to SubVolume
7339 block 8 > parent 5. Go to SubVolume
7340 block 0+2 <= scrub_point 2. No parent check
7341 block 1 < scrub_point 2. Check with your parent
7342 block 2 < scrub_point 2. Check with your parent
7343 block 3 < scrub_point 2. Check with your parent
7344 block 4 < scrub_point 2. Check with your parent
7345 block 5 > parent 5. Go to SubVolume
7346 block 6 > parent 5. Go to SubVolume
7347 block 7 > parent 5. Go to SubVolume
7348 block 8 > parent 5. Go to SubVolume
7349 block 0+2 <= scrub_point 3. No parent check
7350 block 1+2 <= scrub_point 3. No parent check
7351 block 2 < scrub_point 3. Check with your parent
7352 block 3 < scrub_point 3. Check with your parent
7353 block 4 < scrub_point 3. Check with your parent
7354 block 5 > parent 5. Go to SubVolume
7355 block 6 > parent 5. Go to SubVolume
7356 block 7 > parent 5. Go to SubVolume
7357 block 8 > parent 5. Go to SubVolume
7358 block 0+2 <= scrub_point 4. No parent check
7359 block 1+2 <= scrub_point 4. No parent check
7360 block 2+2 <= scrub_point 4. No parent check
7361 block 3 < scrub_point 4. Check with your parent
7362 block 4 < scrub_point 4. Check with your parent
7363 block 5 > parent 5. Go to SubVolume
7364 block 6 > parent 5. Go to SubVolume
7365 block 7 > parent 5. Go to SubVolume
7366 block 8 > parent 5. Go to SubVolume
7367 block 0+2 <= scrub_point 5. No parent check
7368 block 1+2 <= scrub_point 5. No parent check
7369 block 2+2 <= scrub_point 5. No parent check
7370 block 3+2 <= scrub_point 5. No parent check
7371 block 4 < scrub_point 5. Check with your parent
7372 block 5 > parent 5. Go to SubVolume
7373 block 6 > parent 5. Go to SubVolume
7374 block 7 > parent 5. Go to SubVolume
7375 block 8 > parent 5. Go to SubVolume
7376 block 0 < scrub_point 0. Check with your parent
7377 block 1 < scrub_point 0. Check with your parent
7378 block 2 < scrub_point 0. Check with your parent
7379 block 3 < scrub_point 0. Check with your parent
7380 block 4 < scrub_point 0. Check with your parent
7381 block 5 > parent 5. Go to SubVolume
7382 block 6 > parent 5. Go to SubVolume
7383 block 7 > parent 5. Go to SubVolume
7384 block 0 < scrub_point 1. Check with your parent
7385 block 1 < scrub_point 1. Check with your parent
7386 block 2 < scrub_point 1. Check with your parent
7387 block 3 < scrub_point 1. Check with your parent
7388 block 4 < scrub_point 1. Check with your parent
7389 block 5 > parent 5. Go to SubVolume
7390 block 6 > parent 5. Go to SubVolume
7391 block 7 > parent 5. Go to SubVolume
7392 block 0 < scrub_point 2. Check with your parent
7393 block 1 < scrub_point 2. Check with your parent
7394 block 2 < scrub_point 2. Check with your parent
7395 block 3 < scrub_point 2. Check with your parent
7396 block 4 < scrub_point 2. Check with your parent
7397 block 5 > parent 5. Go to SubVolume
7398 block 6 > parent 5. Go to SubVolume
7399 block 7 > parent 5. Go to SubVolume
7400 block 0+3 <= scrub_point 3. No parent check
7401 block 1 < scrub_point 3. Check with your parent
7402 block 2 < scrub_point 3. Check with your parent
7403 block 3 < scrub_point 3. Check with your parent
7404 block 4 < scrub_point 3. Check with your parent
7405 block 5 > parent 5. Go to SubVolume
7406 block 6 > parent 5. Go to SubVolume
7407 block 7 > parent 5. Go to SubVolume
7408 block 0+3 <= scrub_point 4. No parent check
7409 block 1+3 <= scrub_point 4. No parent check
7410 block 2 < scrub_point 4. Check with your parent
7411 block 3 < scrub_point 4. Check with your parent
7412 block 4 < scrub_point 4. Check with your parent
7413 block 5 > parent 5. Go to SubVolume
7414 block 6 > parent 5. Go to SubVolume
7415 block 7 > parent 5. Go to SubVolume
7416 block 0+3 <= scrub_point 5. No parent check
7417 block 1+3 <= scrub_point 5. No parent check
7418 block 2+3 <= scrub_point 5. No parent check
7419 block 3 < scrub_point 5. Check with your parent
7420 block 4 < scrub_point 5. Check with your parent
7421 block 5 > parent 5. Go to SubVolume
7422 block 6 > parent 5. Go to SubVolume
7423 block 7 > parent 5. Go to SubVolume
7424 block 0 < scrub_point 0. Check with your parent
7425 block 1 < scrub_point 0. Check with your parent
7426 block 2 < scrub_point 0. Check with your parent
7427 block 3 < scrub_point 0. Check with your parent
7428 block 4 < scrub_point 0. Check with your parent
7429 block 5 > parent 5. Go to SubVolume
7430 block 6 > parent 5. Go to SubVolume
7431 block 0 < scrub_point 1. Check with your parent
7432 block 1 < scrub_point 1. Check with your parent
7433 block 2 < scrub_point 1. Check with your parent
7434 block 3 < scrub_point 1. Check with your parent
7435 block 4 < scrub_point 1. Check with your parent
7436 block 5 > parent 5. Go to SubVolume
7437 block 6 > parent 5. Go to SubVolume
7438 block 0 < scrub_point 2. Check with your parent
7439 block 1 < scrub_point 2. Check with your parent
7440 block 2 < scrub_point 2. Check with your parent
7441 block 3 < scrub_point 2. Check with your parent
7442 block 4 < scrub_point 2. Check with your parent
7443 block 5 > parent 5. Go to SubVolume
7444 block 6 > parent 5. Go to SubVolume
7445 block 0 < scrub_point 3. Check with your parent
7446 block 1 < scrub_point 3. Check with your parent
7447 block 2 < scrub_point 3. Check with your parent
7448 block 3 < scrub_point 3. Check with your parent
7449 block 4 < scrub_point 3. Check with your parent
7450 block 5 > parent 5. Go to SubVolume
7451 block 6 > parent 5. Go to SubVolume
7452 block 0+4 <= scrub_point 4. No parent check
7453 block 1 < scrub_point 4. Check with your parent
7454 block 2 < scrub_point 4. Check with your parent
7455 block 3 < scrub_point 4. Check with your parent
7456 block 4 < scrub_point 4. Check with your parent
7457 block 5 > parent 5. Go to SubVolume
7458 block 6 > parent 5. Go to SubVolume
7459 block 0+4 <= scrub_point 5. No parent check
7460 block 1+4 <= scrub_point 5. No parent check
7461 block 2 < scrub_point 5. Check with your parent
7462 block 3 < scrub_point 5. Check with your parent
7463 block 4 < scrub_point 5. Check with your parent
7464 block 5 > parent 5. Go to SubVolume
7465 block 6 > parent 5. Go to SubVolume
7466 block 0 < scrub_point 0. Check with your parent
7467 block 1 < scrub_point 0. Check with your parent
7468 block 2 < scrub_point 0. Check with your parent
7469 block 3 < scrub_point 0. Check with your parent
7470 block 4 < scrub_point 0. Check with your parent
7471 block 5 > parent 5. Go to SubVolume
7472 block 0 < scrub_point 1. Check with your parent
7473 block 1 < scrub_point 1. Check with your parent
7474 block 2 < scrub_point 1. Check with your parent
7475 block 3 < scrub_point 1. Check with your parent
7476 block 4 < scrub_point 1. Check with your parent
7477 block 5 > parent 5. Go to SubVolume
7478 block 0 < scrub_point 2. Check with your parent
7479 block 1 < scrub_point 2. Check with your parent
7480 block 2 < scrub_point 2. Check with your parent
7481 block 3 < scrub_point 2. Check with your parent
7482 block 4 < scrub_point 2. Check with your parent
7483 block 5 > parent 5. Go to SubVolume
7484 block 0 < scrub_point 3. Check with your parent
7485 block 1 < scrub_point 3. Check with your parent
7486 block 2 < scrub_point 3. Check with your parent
7487 block 3 < scrub_point 3. Check with your parent
7488 block 4 < scrub_point 3. Check with your parent
7489 block 5 > parent 5. Go to SubVolume
7490 block 0 < scrub_point 4. Check with your parent
7491 block 1 < scrub_point 4. Check with your parent
7492 block 2 < scrub_point 4. Check with your parent
7493 block 3 < scrub_point 4. Check with your parent
7494 block 4 < scrub_point 4. Check with your parent
7495 block 5 > parent 5. Go to SubVolume
7496 block 0+5 <= scrub_point 5. No parent check
7497 block 1 < scrub_point 5. Check with your parent
7498 block 2 < scrub_point 5. Check with your parent
7499 block 3 < scrub_point 5. Check with your parent
7500 block 4 < scrub_point 5. Check with your parent
7501 block 5 > parent 5. Go to SubVolume
7502 block 0 < scrub_point 0. Check with your parent
7503 block 1 < scrub_point 0. Check with your parent
7504 block 2 < scrub_point 0. Check with your parent
7505 block 3 < scrub_point 0. Check with your parent
7506 block 4 < scrub_point 0. Check with your parent
7507 block 0 < scrub_point 1. Check with your parent
7508 block 1 < scrub_point 1. Check with your parent
7509 block 2 < scrub_point 1. Check with your parent
7510 block 3 < scrub_point 1. Check with your parent
7511 block 4 < scrub_point 1. Check with your parent
7512 block 0 < scrub_point 2. Check with your parent
7513 block 1 < scrub_point 2. Check with your parent
7514 block 2 < scrub_point 2. Check with your parent
7515 block 3 < scrub_point 2. Check with your parent
7516 block 4 < scrub_point 2. Check with your parent
7517 block 0 < scrub_point 3. Check with your parent
7518 block 1 < scrub_point 3. Check with your parent
7519 block 2 < scrub_point 3. Check with your parent
7520 block 3 < scrub_point 3. Check with your parent
7521 block 4 < scrub_point 3. Check with your parent
7522 block 0 < scrub_point 4. Check with your parent
7523 block 1 < scrub_point 4. Check with your parent
7524 block 2 < scrub_point 4. Check with your parent
7525 block 3 < scrub_point 4. Check with your parent
7526 block 4 < scrub_point 4. Check with your parent
7527 block 0 < scrub_point 5. Check with your parent
7528 block 1 < scrub_point 5. Check with your parent
7529 block 2 < scrub_point 5. Check with your parent
7530 block 3 < scrub_point 5. Check with your parent
7531 block 4 < scrub_point 5. Check with your parent
7532 block 0 < scrub_point 0. Check with your parent
7533 block 1 < scrub_point 0. Check with your parent
7534 block 2 < scrub_point 0. Check with your parent
7535 block 3 < scrub_point 0. Check with your parent
7536 block 0 < scrub_point 1. Check with your parent
7537 block 1 < scrub_point 1. Check with your parent
7538 block 2 < scrub_point 1. Check with your parent
7539 block 3 < scrub_point 1. Check with your parent
7540 block 0 < scrub_point 2. Check with your parent
7541 block 1 < scrub_point 2. Check with your parent
7542 block 2 < scrub_point 2. Check with your parent
7543 block 3 < scrub_point 2. Check with your parent
7544 block 0 < scrub_point 3. Check with your parent
7545 block 1 < scrub_point 3. Check with your parent
7546 block 2 < scrub_point 3. Check with your parent
7547 block 3 < scrub_point 3. Check with your parent
7548 block 0 < scrub_point 4. Check with your parent
7549 block 1 < scrub_point 4. Check with your parent
7550 block 2 < scrub_point 4. Check with your parent
7551 block 3 < scrub_point 4. Check with your parent
7552 block 0 < scrub_point 5. Check with your parent
7553 block 1 < scrub_point 5. Check with your parent
7554 block 2 < scrub_point 5. Check with your parent
7555 block 3 < scrub_point 5. Check with your parent
7556 block 0 < scrub_point 0. Check with your parent
7557 block 1 < scrub_point 0. Check with your parent
7558 block 2 < scrub_point 0. Check with your parent
7559 block 0 < scrub_point 1. Check with your parent
7560 block 1 < scrub_point 1. Check with your parent
7561 block 2 < scrub_point 1. Check with your parent
7562 block 0 < scrub_point 2. Check with your parent
7563 block 1 < scrub_point 2. Check with your parent
7564 block 2 < scrub_point 2. Check with your parent
7565 block 0 < scrub_point 3. Check with your parent
7566 block 1 < scrub_point 3. Check with your parent
7567 block 2 < scrub_point 3. Check with your parent
7568 block 0 < scrub_point 4. Check with your parent
7569 block 1 < scrub_point 4. Check with your parent
7570 block 2 < scrub_point 4. Check with your parent
7571 block 0 < scrub_point 5. Check with your parent
7572 block 1 < scrub_point 5. Check with your parent
7573 block 2 < scrub_point 5. Check with your parent
7574 block 0 < scrub_point 0. Check with your parent
7575 block 1 < scrub_point 0. Check with your parent
7576 block 0 < scrub_point 1. Check with your parent
7577 block 1 < scrub_point 1. Check with your parent
7578 block 0 < scrub_point 2. Check with your parent
7579 block 1 < scrub_point 2. Check with your parent
7580 block 0 < scrub_point 3. Check with your parent
7581 block 1 < scrub_point 3. Check with your parent
7582 block 0 < scrub_point 4. Check with your parent
7583 block 1 < scrub_point 4. Check with your parent
7584 block 0 < scrub_point 5. Check with your parent
7585 block 1 < scrub_point 5. Check with your parent
7586 test volume::test::test_scrub_point_subvolume_smaller ... ok
7587 block 0 < scrub_point 0. Check with your parent
7588 block 1 < scrub_point 0. Check with your parent
7589 block 2 < scrub_point 0. Check with your parent
7590 block 3 < scrub_point 0. Check with your parent
7591 block 4 < scrub_point 0. Check with your parent
7592 block 5 < scrub_point 0. Check with your parent
7593 block 6 < scrub_point 0. Check with your parent
7594 block 7 < scrub_point 0. Check with your parent
7595 block 8 > parent 8. Go to SubVolume
7596 block 9 > parent 8. Go to SubVolume
7597 block 0+1 <= scrub_point 1. No parent check
7598 block 1 < scrub_point 1. Check with your parent
7599 block 2 < scrub_point 1. Check with your parent
7600 block 3 < scrub_point 1. Check with your parent
7601 block 4 < scrub_point 1. Check with your parent
7602 block 5 < scrub_point 1. Check with your parent
7603 block 6 < scrub_point 1. Check with your parent
7604 block 7 < scrub_point 1. Check with your parent
7605 block 8 > parent 8. Go to SubVolume
7606 block 9 > parent 8. Go to SubVolume
7607 block 0+1 <= scrub_point 2. No parent check
7608 block 1+1 <= scrub_point 2. No parent check
7609 block 2 < scrub_point 2. Check with your parent
7610 block 3 < scrub_point 2. Check with your parent
7611 block 4 < scrub_point 2. Check with your parent
7612 block 5 < scrub_point 2. Check with your parent
7613 block 6 < scrub_point 2. Check with your parent
7614 block 7 < scrub_point 2. Check with your parent
7615 block 8 > parent 8. Go to SubVolume
7616 block 9 > parent 8. Go to SubVolume
7617 block 0+1 <= scrub_point 3. No parent check
7618 block 1+1 <= scrub_point 3. No parent check
7619 block 2+1 <= scrub_point 3. No parent check
7620 block 3 < scrub_point 3. Check with your parent
7621 block 4 < scrub_point 3. Check with your parent
7622 block 5 < scrub_point 3. Check with your parent
7623 block 6 < scrub_point 3. Check with your parent
7624 block 7 < scrub_point 3. Check with your parent
7625 block 8 > parent 8. Go to SubVolume
7626 block 9 > parent 8. Go to SubVolume
7627 block 0+1 <= scrub_point 4. No parent check
7628 block 1+1 <= scrub_point 4. No parent check
7629 block 2+1 <= scrub_point 4. No parent check
7630 block 3+1 <= scrub_point 4. No parent check
7631 block 4 < scrub_point 4. Check with your parent
7632 block 5 < scrub_point 4. Check with your parent
7633 block 6 < scrub_point 4. Check with your parent
7634 block 7 < scrub_point 4. Check with your parent
7635 block 8 > parent 8. Go to SubVolume
7636 block 9 > parent 8. Go to SubVolume
7637 block 0+1 <= scrub_point 5. No parent check
7638 block 1+1 <= scrub_point 5. No parent check
7639 block 2+1 <= scrub_point 5. No parent check
7640 block 3+1 <= scrub_point 5. No parent check
7641 block 4+1 <= scrub_point 5. No parent check
7642 block 5 < scrub_point 5. Check with your parent
7643 block 6 < scrub_point 5. Check with your parent
7644 block 7 < scrub_point 5. Check with your parent
7645 block 8 > parent 8. Go to SubVolume
7646 block 9 > parent 8. Go to SubVolume
7647 block 0+1 <= scrub_point 6. No parent check
7648 block 1+1 <= scrub_point 6. No parent check
7649 block 2+1 <= scrub_point 6. No parent check
7650 block 3+1 <= scrub_point 6. No parent check
7651 block 4+1 <= scrub_point 6. No parent check
7652 block 5+1 <= scrub_point 6. No parent check
7653 block 6 < scrub_point 6. Check with your parent
7654 block 7 < scrub_point 6. Check with your parent
7655 block 8 > parent 8. Go to SubVolume
7656 block 9 > parent 8. Go to SubVolume
7657 block 0+1 <= scrub_point 7. No parent check
7658 block 1+1 <= scrub_point 7. No parent check
7659 block 2+1 <= scrub_point 7. No parent check
7660 block 3+1 <= scrub_point 7. No parent check
7661 block 4+1 <= scrub_point 7. No parent check
7662 block 5+1 <= scrub_point 7. No parent check
7663 block 6+1 <= scrub_point 7. No parent check
7664 block 7 < scrub_point 7. Check with your parent
7665 block 8 > parent 8. Go to SubVolume
7666 block 9 > parent 8. Go to SubVolume
7667 block 0+1 <= scrub_point 8. No parent check
7668 block 1+1 <= scrub_point 8. No parent check
7669 block 2+1 <= scrub_point 8. No parent check
7670 block 3+1 <= scrub_point 8. No parent check
7671 block 4+1 <= scrub_point 8. No parent check
7672 block 5+1 <= scrub_point 8. No parent check
7673 block 6+1 <= scrub_point 8. No parent check
7674 block 7+1 <= scrub_point 8. No parent check
7675 block 8 > parent 8. Go to SubVolume
7676 block 9 > parent 8. Go to SubVolume
7677 block 0 < scrub_point 0. Check with your parent
7678 block 1 < scrub_point 0. Check with your parent
7679 block 2 < scrub_point 0. Check with your parent
7680 block 3 < scrub_point 0. Check with your parent
7681 block 4 < scrub_point 0. Check with your parent
7682 block 5 < scrub_point 0. Check with your parent
7683 block 6 < scrub_point 0. Check with your parent
7684 block 7 < scrub_point 0. Check with your parent
7685 block 8 > parent 8. Go to SubVolume
7686 block 0 < scrub_point 1. Check with your parent
7687 block 1 < scrub_point 1. Check with your parent
7688 block 2 < scrub_point 1. Check with your parent
7689 block 3 < scrub_point 1. Check with your parent
7690 block 4 < scrub_point 1. Check with your parent
7691 block 5 < scrub_point 1. Check with your parent
7692 block 6 < scrub_point 1. Check with your parent
7693 block 7 < scrub_point 1. Check with your parent
7694 block 8 > parent 8. Go to SubVolume
7695 block 0+2 <= scrub_point 2. No parent check
7696 block 1 < scrub_point 2. Check with your parent
7697 block 2 < scrub_point 2. Check with your parent
7698 block 3 < scrub_point 2. Check with your parent
7699 block 4 < scrub_point 2. Check with your parent
7700 block 5 < scrub_point 2. Check with your parent
7701 block 6 < scrub_point 2. Check with your parent
7702 block 7 < scrub_point 2. Check with your parent
7703 block 8 > parent 8. Go to SubVolume
7704 block 0+2 <= scrub_point 3. No parent check
7705 block 1+2 <= scrub_point 3. No parent check
7706 block 2 < scrub_point 3. Check with your parent
7707 block 3 < scrub_point 3. Check with your parent
7708 block 4 < scrub_point 3. Check with your parent
7709 block 5 < scrub_point 3. Check with your parent
7710 block 6 < scrub_point 3. Check with your parent
7711 block 7 < scrub_point 3. Check with your parent
7712 block 8 > parent 8. Go to SubVolume
7713 block 0+2 <= scrub_point 4. No parent check
7714 block 1+2 <= scrub_point 4. No parent check
7715 block 2+2 <= scrub_point 4. No parent check
7716 block 3 < scrub_point 4. Check with your parent
7717 block 4 < scrub_point 4. Check with your parent
7718 block 5 < scrub_point 4. Check with your parent
7719 block 6 < scrub_point 4. Check with your parent
7720 block 7 < scrub_point 4. Check with your parent
7721 block 8 > parent 8. Go to SubVolume
7722 block 0+2 <= scrub_point 5. No parent check
7723 block 1+2 <= scrub_point 5. No parent check
7724 block 2+2 <= scrub_point 5. No parent check
7725 block 3+2 <= scrub_point 5. No parent check
7726 block 4 < scrub_point 5. Check with your parent
7727 block 5 < scrub_point 5. Check with your parent
7728 block 6 < scrub_point 5. Check with your parent
7729 block 7 < scrub_point 5. Check with your parent
7730 block 8 > parent 8. Go to SubVolume
7731 block 0+2 <= scrub_point 6. No parent check
7732 block 1+2 <= scrub_point 6. No parent check
7733 block 2+2 <= scrub_point 6. No parent check
7734 block 3+2 <= scrub_point 6. No parent check
7735 block 4+2 <= scrub_point 6. No parent check
7736 block 5 < scrub_point 6. Check with your parent
7737 block 6 < scrub_point 6. Check with your parent
7738 block 7 < scrub_point 6. Check with your parent
7739 block 8 > parent 8. Go to SubVolume
7740 block 0+2 <= scrub_point 7. No parent check
7741 block 1+2 <= scrub_point 7. No parent check
7742 block 2+2 <= scrub_point 7. No parent check
7743 block 3+2 <= scrub_point 7. No parent check
7744 block 4+2 <= scrub_point 7. No parent check
7745 block 5+2 <= scrub_point 7. No parent check
7746 block 6 < scrub_point 7. Check with your parent
7747 block 7 < scrub_point 7. Check with your parent
7748 block 8 > parent 8. Go to SubVolume
7749 block 0+2 <= scrub_point 8. No parent check
7750 block 1+2 <= scrub_point 8. No parent check
7751 block 2+2 <= scrub_point 8. No parent check
7752 block 3+2 <= scrub_point 8. No parent check
7753 block 4+2 <= scrub_point 8. No parent check
7754 block 5+2 <= scrub_point 8. No parent check
7755 block 6+2 <= scrub_point 8. No parent check
7756 block 7 < scrub_point 8. Check with your parent
7757 block 8 > parent 8. Go to SubVolume
7758 block 0 < scrub_point 0. Check with your parent
7759 block 1 < scrub_point 0. Check with your parent
7760 block 2 < scrub_point 0. Check with your parent
7761 block 3 < scrub_point 0. Check with your parent
7762 block 4 < scrub_point 0. Check with your parent
7763 block 5 < scrub_point 0. Check with your parent
7764 block 6 < scrub_point 0. Check with your parent
7765 block 7 < scrub_point 0. Check with your parent
7766 block 0 < scrub_point 1. Check with your parent
7767 block 1 < scrub_point 1. Check with your parent
7768 block 2 < scrub_point 1. Check with your parent
7769 block 3 < scrub_point 1. Check with your parent
7770 block 4 < scrub_point 1. Check with your parent
7771 block 5 < scrub_point 1. Check with your parent
7772 block 6 < scrub_point 1. Check with your parent
7773 block 7 < scrub_point 1. Check with your parent
7774 block 0 < scrub_point 2. Check with your parent
7775 block 1 < scrub_point 2. Check with your parent
7776 block 2 < scrub_point 2. Check with your parent
7777 block 3 < scrub_point 2. Check with your parent
7778 block 4 < scrub_point 2. Check with your parent
7779 block 5 < scrub_point 2. Check with your parent
7780 block 6 < scrub_point 2. Check with your parent
7781 block 7 < scrub_point 2. Check with your parent
7782 block 0+3 <= scrub_point 3. No parent check
7783 block 1 < scrub_point 3. Check with your parent
7784 block 2 < scrub_point 3. Check with your parent
7785 block 3 < scrub_point 3. Check with your parent
7786 block 4 < scrub_point 3. Check with your parent
7787 block 5 < scrub_point 3. Check with your parent
7788 block 6 < scrub_point 3. Check with your parent
7789 block 7 < scrub_point 3. Check with your parent
7790 block 0+3 <= scrub_point 4. No parent check
7791 block 1+3 <= scrub_point 4. No parent check
7792 block 2 < scrub_point 4. Check with your parent
7793 block 3 < scrub_point 4. Check with your parent
7794 block 4 < scrub_point 4. Check with your parent
7795 block 5 < scrub_point 4. Check with your parent
7796 block 6 < scrub_point 4. Check with your parent
7797 block 7 < scrub_point 4. Check with your parent
7798 block 0+3 <= scrub_point 5. No parent check
7799 block 1+3 <= scrub_point 5. No parent check
7800 block 2+3 <= scrub_point 5. No parent check
7801 block 3 < scrub_point 5. Check with your parent
7802 block 4 < scrub_point 5. Check with your parent
7803 block 5 < scrub_point 5. Check with your parent
7804 block 6 < scrub_point 5. Check with your parent
7805 block 7 < scrub_point 5. Check with your parent
7806 block 0+3 <= scrub_point 6. No parent check
7807 block 1+3 <= scrub_point 6. No parent check
7808 block 2+3 <= scrub_point 6. No parent check
7809 block 3+3 <= scrub_point 6. No parent check
7810 block 4 < scrub_point 6. Check with your parent
7811 block 5 < scrub_point 6. Check with your parent
7812 block 6 < scrub_point 6. Check with your parent
7813 block 7 < scrub_point 6. Check with your parent
7814 block 0+3 <= scrub_point 7. No parent check
7815 block 1+3 <= scrub_point 7. No parent check
7816 block 2+3 <= scrub_point 7. No parent check
7817 block 3+3 <= scrub_point 7. No parent check
7818 block 4+3 <= scrub_point 7. No parent check
7819 block 5 < scrub_point 7. Check with your parent
7820 block 6 < scrub_point 7. Check with your parent
7821 block 7 < scrub_point 7. Check with your parent
7822 block 0+3 <= scrub_point 8. No parent check
7823 block 1+3 <= scrub_point 8. No parent check
7824 block 2+3 <= scrub_point 8. No parent check
7825 block 3+3 <= scrub_point 8. No parent check
7826 block 4+3 <= scrub_point 8. No parent check
7827 block 5+3 <= scrub_point 8. No parent check
7828 block 6 < scrub_point 8. Check with your parent
7829 block 7 < scrub_point 8. Check with your parent
7830 block 0 < scrub_point 0. Check with your parent
7831 block 1 < scrub_point 0. Check with your parent
7832 block 2 < scrub_point 0. Check with your parent
7833 block 3 < scrub_point 0. Check with your parent
7834 block 4 < scrub_point 0. Check with your parent
7835 block 5 < scrub_point 0. Check with your parent
7836 block 6 < scrub_point 0. Check with your parent
7837 block 0 < scrub_point 1. Check with your parent
7838 block 1 < scrub_point 1. Check with your parent
7839 block 2 < scrub_point 1. Check with your parent
7840 block 3 < scrub_point 1. Check with your parent
7841 block 4 < scrub_point 1. Check with your parent
7842 block 5 < scrub_point 1. Check with your parent
7843 block 6 < scrub_point 1. Check with your parent
7844 block 0 < scrub_point 2. Check with your parent
7845 block 1 < scrub_point 2. Check with your parent
7846 block 2 < scrub_point 2. Check with your parent
7847 block 3 < scrub_point 2. Check with your parent
7848 block 4 < scrub_point 2. Check with your parent
7849 block 5 < scrub_point 2. Check with your parent
7850 block 6 < scrub_point 2. Check with your parent
7851 block 0 < scrub_point 3. Check with your parent
7852 block 1 < scrub_point 3. Check with your parent
7853 block 2 < scrub_point 3. Check with your parent
7854 block 3 < scrub_point 3. Check with your parent
7855 block 4 < scrub_point 3. Check with your parent
7856 block 5 < scrub_point 3. Check with your parent
7857 block 6 < scrub_point 3. Check with your parent
7858 block 0+4 <= scrub_point 4. No parent check
7859 block 1 < scrub_point 4. Check with your parent
7860 block 2 < scrub_point 4. Check with your parent
7861 block 3 < scrub_point 4. Check with your parent
7862 block 4 < scrub_point 4. Check with your parent
7863 block 5 < scrub_point 4. Check with your parent
7864 block 6 < scrub_point 4. Check with your parent
7865 block 0+4 <= scrub_point 5. No parent check
7866 block 1+4 <= scrub_point 5. No parent check
7867 block 2 < scrub_point 5. Check with your parent
7868 block 3 < scrub_point 5. Check with your parent
7869 block 4 < scrub_point 5. Check with your parent
7870 block 5 < scrub_point 5. Check with your parent
7871 block 6 < scrub_point 5. Check with your parent
7872 block 0+4 <= scrub_point 6. No parent check
7873 block 1+4 <= scrub_point 6. No parent check
7874 block 2+4 <= scrub_point 6. No parent check
7875 block 3 < scrub_point 6. Check with your parent
7876 block 4 < scrub_point 6. Check with your parent
7877 block 5 < scrub_point 6. Check with your parent
7878 block 6 < scrub_point 6. Check with your parent
7879 block 0+4 <= scrub_point 7. No parent check
7880 block 1+4 <= scrub_point 7. No parent check
7881 block 2+4 <= scrub_point 7. No parent check
7882 block 3+4 <= scrub_point 7. No parent check
7883 block 4 < scrub_point 7. Check with your parent
7884 block 5 < scrub_point 7. Check with your parent
7885 block 6 < scrub_point 7. Check with your parent
7886 block 0+4 <= scrub_point 8. No parent check
7887 block 1+4 <= scrub_point 8. No parent check
7888 block 2+4 <= scrub_point 8. No parent check
7889 block 3+4 <= scrub_point 8. No parent check
7890 block 4+4 <= scrub_point 8. No parent check
7891 block 5 < scrub_point 8. Check with your parent
7892 block 6 < scrub_point 8. Check with your parent
7893 block 0 < scrub_point 0. Check with your parent
7894 block 1 < scrub_point 0. Check with your parent
7895 block 2 < scrub_point 0. Check with your parent
7896 block 3 < scrub_point 0. Check with your parent
7897 block 4 < scrub_point 0. Check with your parent
7898 block 5 < scrub_point 0. Check with your parent
7899 block 0 < scrub_point 1. Check with your parent
7900 block 1 < scrub_point 1. Check with your parent
7901 block 2 < scrub_point 1. Check with your parent
7902 block 3 < scrub_point 1. Check with your parent
7903 block 4 < scrub_point 1. Check with your parent
7904 block 5 < scrub_point 1. Check with your parent
7905 block 0 < scrub_point 2. Check with your parent
7906 block 1 < scrub_point 2. Check with your parent
7907 block 2 < scrub_point 2. Check with your parent
7908 block 3 < scrub_point 2. Check with your parent
7909 block 4 < scrub_point 2. Check with your parent
7910 block 5 < scrub_point 2. Check with your parent
7911 block 0 < scrub_point 3. Check with your parent
7912 block 1 < scrub_point 3. Check with your parent
7913 block 2 < scrub_point 3. Check with your parent
7914 block 3 < scrub_point 3. Check with your parent
7915 block 4 < scrub_point 3. Check with your parent
7916 block 5 < scrub_point 3. Check with your parent
7917 block 0 < scrub_point 4. Check with your parent
7918 block 1 < scrub_point 4. Check with your parent
7919 block 2 < scrub_point 4. Check with your parent
7920 block 3 < scrub_point 4. Check with your parent
7921 block 4 < scrub_point 4. Check with your parent
7922 block 5 < scrub_point 4. Check with your parent
7923 block 0+5 <= scrub_point 5. No parent check
7924 block 1 < scrub_point 5. Check with your parent
7925 block 2 < scrub_point 5. Check with your parent
7926 block 3 < scrub_point 5. Check with your parent
7927 block 4 < scrub_point 5. Check with your parent
7928 block 5 < scrub_point 5. Check with your parent
7929 block 0+5 <= scrub_point 6. No parent check
7930 block 1+5 <= scrub_point 6. No parent check
7931 block 2 < scrub_point 6. Check with your parent
7932 block 3 < scrub_point 6. Check with your parent
7933 block 4 < scrub_point 6. Check with your parent
7934 block 5 < scrub_point 6. Check with your parent
7935 block 0+5 <= scrub_point 7. No parent check
7936 block 1+5 <= scrub_point 7. No parent check
7937 block 2+5 <= scrub_point 7. No parent check
7938 block 3 < scrub_point 7. Check with your parent
7939 block 4 < scrub_point 7. Check with your parent
7940 block 5 < scrub_point 7. Check with your parent
7941 block 0+5 <= scrub_point 8. No parent check
7942 block 1+5 <= scrub_point 8. No parent check
7943 block 2+5 <= scrub_point 8. No parent check
7944 block 3+5 <= scrub_point 8. No parent check
7945 block 4 < scrub_point 8. Check with your parent
7946 block 5 < scrub_point 8. Check with your parent
7947 block 0 < scrub_point 0. Check with your parent
7948 block 1 < scrub_point 0. Check with your parent
7949 block 2 < scrub_point 0. Check with your parent
7950 block 3 < scrub_point 0. Check with your parent
7951 block 4 < scrub_point 0. Check with your parent
7952 block 0 < scrub_point 1. Check with your parent
7953 block 1 < scrub_point 1. Check with your parent
7954 block 2 < scrub_point 1. Check with your parent
7955 block 3 < scrub_point 1. Check with your parent
7956 block 4 < scrub_point 1. Check with your parent
7957 block 0 < scrub_point 2. Check with your parent
7958 block 1 < scrub_point 2. Check with your parent
7959 block 2 < scrub_point 2. Check with your parent
7960 block 3 < scrub_point 2. Check with your parent
7961 block 4 < scrub_point 2. Check with your parent
7962 block 0 < scrub_point 3. Check with your parent
7963 block 1 < scrub_point 3. Check with your parent
7964 block 2 < scrub_point 3. Check with your parent
7965 block 3 < scrub_point 3. Check with your parent
7966 block 4 < scrub_point 3. Check with your parent
7967 block 0 < scrub_point 4. Check with your parent
7968 block 1 < scrub_point 4. Check with your parent
7969 block 2 < scrub_point 4. Check with your parent
7970 block 3 < scrub_point 4. Check with your parent
7971 block 4 < scrub_point 4. Check with your parent
7972 block 0 < scrub_point 5. Check with your parent
7973 block 1 < scrub_point 5. Check with your parent
7974 block 2 < scrub_point 5. Check with your parent
7975 block 3 < scrub_point 5. Check with your parent
7976 block 4 < scrub_point 5. Check with your parent
7977 block 0+6 <= scrub_point 6. No parent check
7978 block 1 < scrub_point 6. Check with your parent
7979 block 2 < scrub_point 6. Check with your parent
7980 block 3 < scrub_point 6. Check with your parent
7981 block 4 < scrub_point 6. Check with your parent
7982 block 0+6 <= scrub_point 7. No parent check
7983 block 1+6 <= scrub_point 7. No parent check
7984 block 2 < scrub_point 7. Check with your parent
7985 block 3 < scrub_point 7. Check with your parent
7986 block 4 < scrub_point 7. Check with your parent
7987 block 0+6 <= scrub_point 8. No parent check
7988 block 1+6 <= scrub_point 8. No parent check
7989 block 2+6 <= scrub_point 8. No parent check
7990 block 3 < scrub_point 8. Check with your parent
7991 block 4 < scrub_point 8. Check with your parent
7992 block 0 < scrub_point 0. Check with your parent
7993 block 1 < scrub_point 0. Check with your parent
7994 block 2 < scrub_point 0. Check with your parent
7995 block 3 < scrub_point 0. Check with your parent
7996 block 0 < scrub_point 1. Check with your parent
7997 block 1 < scrub_point 1. Check with your parent
7998 block 2 < scrub_point 1. Check with your parent
7999 block 3 < scrub_point 1. Check with your parent
8000 block 0 < scrub_point 2. Check with your parent
8001 block 1 < scrub_point 2. Check with your parent
8002 block 2 < scrub_point 2. Check with your parent
8003 block 3 < scrub_point 2. Check with your parent
8004 block 0 < scrub_point 3. Check with your parent
8005 block 1 < scrub_point 3. Check with your parent
8006 block 2 < scrub_point 3. Check with your parent
8007 block 3 < scrub_point 3. Check with your parent
8008 block 0 < scrub_point 4. Check with your parent
8009 block 1 < scrub_point 4. Check with your parent
8010 block 2 < scrub_point 4. Check with your parent
8011 block 3 < scrub_point 4. Check with your parent
8012 block 0 < scrub_point 5. Check with your parent
8013 block 1 < scrub_point 5. Check with your parent
8014 block 2 < scrub_point 5. Check with your parent
8015 block 3 < scrub_point 5. Check with your parent
8016 block 0 < scrub_point 6. Check with your parent
8017 block 1 < scrub_point 6. Check with your parent
8018 block 2 < scrub_point 6. Check with your parent
8019 block 3 < scrub_point 6. Check with your parent
8020 block 0+7 <= scrub_point 7. No parent check
8021 block 1 < scrub_point 7. Check with your parent
8022 block 2 < scrub_point 7. Check with your parent
8023 block 3 < scrub_point 7. Check with your parent
8024 block 0+7 <= scrub_point 8. No parent check
8025 block 1+7 <= scrub_point 8. No parent check
8026 block 2 < scrub_point 8. Check with your parent
8027 block 3 < scrub_point 8. Check with your parent
8028 block 0 < scrub_point 0. Check with your parent
8029 block 1 < scrub_point 0. Check with your parent
8030 block 2 < scrub_point 0. Check with your parent
8031 block 0 < scrub_point 1. Check with your parent
8032 block 1 < scrub_point 1. Check with your parent
8033 block 2 < scrub_point 1. Check with your parent
8034 block 0 < scrub_point 2. Check with your parent
8035 block 1 < scrub_point 2. Check with your parent
8036 block 2 < scrub_point 2. Check with your parent
8037 block 0 < scrub_point 3. Check with your parent
8038 block 1 < scrub_point 3. Check with your parent
8039 block 2 < scrub_point 3. Check with your parent
8040 block 0 < scrub_point 4. Check with your parent
8041 block 1 < scrub_point 4. Check with your parent
8042 block 2 < scrub_point 4. Check with your parent
8043 block 0 < scrub_point 5. Check with your parent
8044 block 1 < scrub_point 5. Check with your parent
8045 block 2 < scrub_point 5. Check with your parent
8046 block 0 < scrub_point 6. Check with your parent
8047 block 1 < scrub_point 6. Check with your parent
8048 block 2 < scrub_point 6. Check with your parent
8049 block 0 < scrub_point 7. Check with your parent
8050 block 1 < scrub_point 7. Check with your parent
8051 block 2 < scrub_point 7. Check with your parent
8052 block 0+8 <= scrub_point 8. No parent check
8053 block 1 < scrub_point 8. Check with your parent
8054 block 2 < scrub_point 8. Check with your parent
8055 block 0 < scrub_point 0. Check with your parent
8056 block 1 < scrub_point 0. Check with your parent
8057 block 0 < scrub_point 1. Check with your parent
8058 block 1 < scrub_point 1. Check with your parent
8059 block 0 < scrub_point 2. Check with your parent
8060 block 1 < scrub_point 2. Check with your parent
8061 block 0 < scrub_point 3. Check with your parent
8062 block 1 < scrub_point 3. Check with your parent
8063 block 0 < scrub_point 4. Check with your parent
8064 block 1 < scrub_point 4. Check with your parent
8065 block 0 < scrub_point 5. Check with your parent
8066 block 1 < scrub_point 5. Check with your parent
8067 block 0 < scrub_point 6. Check with your parent
8068 block 1 < scrub_point 6. Check with your parent
8069 block 0 < scrub_point 7. Check with your parent
8070 block 1 < scrub_point 7. Check with your parent
8071 block 0 < scrub_point 8. Check with your parent
8072 block 1 < scrub_point 8. Check with your parent
8073 test volume::test::test_scrub_point_two_subvolume_equal ... ok
8074 block 0 < scrub_point 0. Check with your parent
8075 block 1 < scrub_point 0. Check with your parent
8076 block 2 < scrub_point 0. Check with your parent
8077 block 3 < scrub_point 0. Check with your parent
8078 block 4 < scrub_point 0. Check with your parent
8079 block 5 > parent 5. Go to SubVolume
8080 block 6 > parent 5. Go to SubVolume
8081 block 7 > parent 5. Go to SubVolume
8082 block 8 > parent 5. Go to SubVolume
8083 block 9 > parent 5. Go to SubVolume
8084 block 10 > parent 5. Go to SubVolume
8085 block 11 > parent 5. Go to SubVolume
8086 block 12 > parent 5. Go to SubVolume
8087 block 13 > parent 5. Go to SubVolume
8088 block 14 > parent 5. Go to SubVolume
8089 block 0+1 <= scrub_point 1. No parent check
8090 block 1 < scrub_point 1. Check with your parent
8091 block 2 < scrub_point 1. Check with your parent
8092 block 3 < scrub_point 1. Check with your parent
8093 block 4 < scrub_point 1. Check with your parent
8094 block 5 > parent 5. Go to SubVolume
8095 block 6 > parent 5. Go to SubVolume
8096 block 7 > parent 5. Go to SubVolume
8097 block 8 > parent 5. Go to SubVolume
8098 block 9 > parent 5. Go to SubVolume
8099 block 10 > parent 5. Go to SubVolume
8100 block 11 > parent 5. Go to SubVolume
8101 block 12 > parent 5. Go to SubVolume
8102 block 13 > parent 5. Go to SubVolume
8103 block 14 > parent 5. Go to SubVolume
8104 block 0+1 <= scrub_point 2. No parent check
8105 block 1+1 <= scrub_point 2. No parent check
8106 block 2 < scrub_point 2. Check with your parent
8107 block 3 < scrub_point 2. Check with your parent
8108 block 4 < scrub_point 2. Check with your parent
8109 block 5 > parent 5. Go to SubVolume
8110 block 6 > parent 5. Go to SubVolume
8111 block 7 > parent 5. Go to SubVolume
8112 block 8 > parent 5. Go to SubVolume
8113 block 9 > parent 5. Go to SubVolume
8114 block 10 > parent 5. Go to SubVolume
8115 block 11 > parent 5. Go to SubVolume
8116 block 12 > parent 5. Go to SubVolume
8117 block 13 > parent 5. Go to SubVolume
8118 block 14 > parent 5. Go to SubVolume
8119 block 0+1 <= scrub_point 3. No parent check
8120 block 1+1 <= scrub_point 3. No parent check
8121 block 2+1 <= scrub_point 3. No parent check
8122 block 3 < scrub_point 3. Check with your parent
8123 block 4 < scrub_point 3. Check with your parent
8124 block 5 > parent 5. Go to SubVolume
8125 block 6 > parent 5. Go to SubVolume
8126 block 7 > parent 5. Go to SubVolume
8127 block 8 > parent 5. Go to SubVolume
8128 block 9 > parent 5. Go to SubVolume
8129 block 10 > parent 5. Go to SubVolume
8130 block 11 > parent 5. Go to SubVolume
8131 block 12 > parent 5. Go to SubVolume
8132 block 13 > parent 5. Go to SubVolume
8133 block 14 > parent 5. Go to SubVolume
8134 block 0+1 <= scrub_point 4. No parent check
8135 block 1+1 <= scrub_point 4. No parent check
8136 block 2+1 <= scrub_point 4. No parent check
8137 block 3+1 <= scrub_point 4. No parent check
8138 block 4 < scrub_point 4. Check with your parent
8139 block 5 > parent 5. Go to SubVolume
8140 block 6 > parent 5. Go to SubVolume
8141 block 7 > parent 5. Go to SubVolume
8142 block 8 > parent 5. Go to SubVolume
8143 block 9 > parent 5. Go to SubVolume
8144 block 10 > parent 5. Go to SubVolume
8145 block 11 > parent 5. Go to SubVolume
8146 block 12 > parent 5. Go to SubVolume
8147 block 13 > parent 5. Go to SubVolume
8148 block 14 > parent 5. Go to SubVolume
8149 block 0+1 <= scrub_point 5. No parent check
8150 block 1+1 <= scrub_point 5. No parent check
8151 block 2+1 <= scrub_point 5. No parent check
8152 block 3+1 <= scrub_point 5. No parent check
8153 block 4+1 <= scrub_point 5. No parent check
8154 block 5 > parent 5. Go to SubVolume
8155 block 6 > parent 5. Go to SubVolume
8156 block 7 > parent 5. Go to SubVolume
8157 block 8 > parent 5. Go to SubVolume
8158 block 9 > parent 5. Go to SubVolume
8159 block 10 > parent 5. Go to SubVolume
8160 block 11 > parent 5. Go to SubVolume
8161 block 12 > parent 5. Go to SubVolume
8162 block 13 > parent 5. Go to SubVolume
8163 block 14 > parent 5. Go to SubVolume
8164 block 0 < scrub_point 0. Check with your parent
8165 block 1 < scrub_point 0. Check with your parent
8166 block 2 < scrub_point 0. Check with your parent
8167 block 3 < scrub_point 0. Check with your parent
8168 block 4 < scrub_point 0. Check with your parent
8169 block 5 > parent 5. Go to SubVolume
8170 block 6 > parent 5. Go to SubVolume
8171 block 7 > parent 5. Go to SubVolume
8172 block 8 > parent 5. Go to SubVolume
8173 block 9 > parent 5. Go to SubVolume
8174 block 10 > parent 5. Go to SubVolume
8175 block 11 > parent 5. Go to SubVolume
8176 block 12 > parent 5. Go to SubVolume
8177 block 13 > parent 5. Go to SubVolume
8178 block 0 < scrub_point 1. Check with your parent
8179 block 1 < scrub_point 1. Check with your parent
8180 block 2 < scrub_point 1. Check with your parent
8181 block 3 < scrub_point 1. Check with your parent
8182 block 4 < scrub_point 1. Check with your parent
8183 block 5 > parent 5. Go to SubVolume
8184 block 6 > parent 5. Go to SubVolume
8185 block 7 > parent 5. Go to SubVolume
8186 block 8 > parent 5. Go to SubVolume
8187 block 9 > parent 5. Go to SubVolume
8188 block 10 > parent 5. Go to SubVolume
8189 block 11 > parent 5. Go to SubVolume
8190 block 12 > parent 5. Go to SubVolume
8191 block 13 > parent 5. Go to SubVolume
8192 block 0+2 <= scrub_point 2. No parent check
8193 block 1 < scrub_point 2. Check with your parent
8194 block 2 < scrub_point 2. Check with your parent
8195 block 3 < scrub_point 2. Check with your parent
8196 block 4 < scrub_point 2. Check with your parent
8197 block 5 > parent 5. Go to SubVolume
8198 block 6 > parent 5. Go to SubVolume
8199 block 7 > parent 5. Go to SubVolume
8200 block 8 > parent 5. Go to SubVolume
8201 block 9 > parent 5. Go to SubVolume
8202 block 10 > parent 5. Go to SubVolume
8203 block 11 > parent 5. Go to SubVolume
8204 block 12 > parent 5. Go to SubVolume
8205 block 13 > parent 5. Go to SubVolume
8206 block 0+2 <= scrub_point 3. No parent check
8207 block 1+2 <= scrub_point 3. No parent check
8208 block 2 < scrub_point 3. Check with your parent
8209 block 3 < scrub_point 3. Check with your parent
8210 block 4 < scrub_point 3. Check with your parent
8211 block 5 > parent 5. Go to SubVolume
8212 block 6 > parent 5. Go to SubVolume
8213 block 7 > parent 5. Go to SubVolume
8214 block 8 > parent 5. Go to SubVolume
8215 block 9 > parent 5. Go to SubVolume
8216 block 10 > parent 5. Go to SubVolume
8217 block 11 > parent 5. Go to SubVolume
8218 block 12 > parent 5. Go to SubVolume
8219 block 13 > parent 5. Go to SubVolume
8220 block 0+2 <= scrub_point 4. No parent check
8221 block 1+2 <= scrub_point 4. No parent check
8222 block 2+2 <= scrub_point 4. No parent check
8223 block 3 < scrub_point 4. Check with your parent
8224 block 4 < scrub_point 4. Check with your parent
8225 block 5 > parent 5. Go to SubVolume
8226 block 6 > parent 5. Go to SubVolume
8227 block 7 > parent 5. Go to SubVolume
8228 block 8 > parent 5. Go to SubVolume
8229 block 9 > parent 5. Go to SubVolume
8230 block 10 > parent 5. Go to SubVolume
8231 block 11 > parent 5. Go to SubVolume
8232 block 12 > parent 5. Go to SubVolume
8233 block 13 > parent 5. Go to SubVolume
8234 block 0+2 <= scrub_point 5. No parent check
8235 block 1+2 <= scrub_point 5. No parent check
8236 block 2+2 <= scrub_point 5. No parent check
8237 block 3+2 <= scrub_point 5. No parent check
8238 block 4 < scrub_point 5. Check with your parent
8239 block 5 > parent 5. Go to SubVolume
8240 block 6 > parent 5. Go to SubVolume
8241 block 7 > parent 5. Go to SubVolume
8242 block 8 > parent 5. Go to SubVolume
8243 block 9 > parent 5. Go to SubVolume
8244 block 10 > parent 5. Go to SubVolume
8245 block 11 > parent 5. Go to SubVolume
8246 block 12 > parent 5. Go to SubVolume
8247 block 13 > parent 5. Go to SubVolume
8248 block 0 < scrub_point 0. Check with your parent
8249 block 1 < scrub_point 0. Check with your parent
8250 block 2 < scrub_point 0. Check with your parent
8251 block 3 < scrub_point 0. Check with your parent
8252 block 4 < scrub_point 0. Check with your parent
8253 block 5 > parent 5. Go to SubVolume
8254 block 6 > parent 5. Go to SubVolume
8255 block 7 > parent 5. Go to SubVolume
8256 block 8 > parent 5. Go to SubVolume
8257 block 9 > parent 5. Go to SubVolume
8258 block 10 > parent 5. Go to SubVolume
8259 block 11 > parent 5. Go to SubVolume
8260 block 12 > parent 5. Go to SubVolume
8261 block 0 < scrub_point 1. Check with your parent
8262 block 1 < scrub_point 1. Check with your parent
8263 block 2 < scrub_point 1. Check with your parent
8264 block 3 < scrub_point 1. Check with your parent
8265 block 4 < scrub_point 1. Check with your parent
8266 block 5 > parent 5. Go to SubVolume
8267 block 6 > parent 5. Go to SubVolume
8268 block 7 > parent 5. Go to SubVolume
8269 block 8 > parent 5. Go to SubVolume
8270 block 9 > parent 5. Go to SubVolume
8271 block 10 > parent 5. Go to SubVolume
8272 block 11 > parent 5. Go to SubVolume
8273 block 12 > parent 5. Go to SubVolume
8274 block 0 < scrub_point 2. Check with your parent
8275 block 1 < scrub_point 2. Check with your parent
8276 block 2 < scrub_point 2. Check with your parent
8277 block 3 < scrub_point 2. Check with your parent
8278 block 4 < scrub_point 2. Check with your parent
8279 block 5 > parent 5. Go to SubVolume
8280 block 6 > parent 5. Go to SubVolume
8281 block 7 > parent 5. Go to SubVolume
8282 block 8 > parent 5. Go to SubVolume
8283 block 9 > parent 5. Go to SubVolume
8284 block 10 > parent 5. Go to SubVolume
8285 block 11 > parent 5. Go to SubVolume
8286 block 12 > parent 5. Go to SubVolume
8287 block 0+3 <= scrub_point 3. No parent check
8288 block 1 < scrub_point 3. Check with your parent
8289 block 2 < scrub_point 3. Check with your parent
8290 block 3 < scrub_point 3. Check with your parent
8291 block 4 < scrub_point 3. Check with your parent
8292 block 5 > parent 5. Go to SubVolume
8293 block 6 > parent 5. Go to SubVolume
8294 block 7 > parent 5. Go to SubVolume
8295 block 8 > parent 5. Go to SubVolume
8296 block 9 > parent 5. Go to SubVolume
8297 block 10 > parent 5. Go to SubVolume
8298 block 11 > parent 5. Go to SubVolume
8299 block 12 > parent 5. Go to SubVolume
8300 block 0+3 <= scrub_point 4. No parent check
8301 block 1+3 <= scrub_point 4. No parent check
8302 block 2 < scrub_point 4. Check with your parent
8303 block 3 < scrub_point 4. Check with your parent
8304 block 4 < scrub_point 4. Check with your parent
8305 block 5 > parent 5. Go to SubVolume
8306 block 6 > parent 5. Go to SubVolume
8307 block 7 > parent 5. Go to SubVolume
8308 block 8 > parent 5. Go to SubVolume
8309 block 9 > parent 5. Go to SubVolume
8310 block 10 > parent 5. Go to SubVolume
8311 block 11 > parent 5. Go to SubVolume
8312 block 12 > parent 5. Go to SubVolume
8313 block 0+3 <= scrub_point 5. No parent check
8314 block 1+3 <= scrub_point 5. No parent check
8315 block 2+3 <= scrub_point 5. No parent check
8316 block 3 < scrub_point 5. Check with your parent
8317 block 4 < scrub_point 5. Check with your parent
8318 block 5 > parent 5. Go to SubVolume
8319 block 6 > parent 5. Go to SubVolume
8320 block 7 > parent 5. Go to SubVolume
8321 block 8 > parent 5. Go to SubVolume
8322 block 9 > parent 5. Go to SubVolume
8323 block 10 > parent 5. Go to SubVolume
8324 block 11 > parent 5. Go to SubVolume
8325 block 12 > parent 5. Go to SubVolume
8326 block 0 < scrub_point 0. Check with your parent
8327 block 1 < scrub_point 0. Check with your parent
8328 block 2 < scrub_point 0. Check with your parent
8329 block 3 < scrub_point 0. Check with your parent
8330 block 4 < scrub_point 0. Check with your parent
8331 block 5 > parent 5. Go to SubVolume
8332 block 6 > parent 5. Go to SubVolume
8333 block 7 > parent 5. Go to SubVolume
8334 block 8 > parent 5. Go to SubVolume
8335 block 9 > parent 5. Go to SubVolume
8336 block 10 > parent 5. Go to SubVolume
8337 block 11 > parent 5. Go to SubVolume
8338 block 0 < scrub_point 1. Check with your parent
8339 block 1 < scrub_point 1. Check with your parent
8340 block 2 < scrub_point 1. Check with your parent
8341 block 3 < scrub_point 1. Check with your parent
8342 block 4 < scrub_point 1. Check with your parent
8343 block 5 > parent 5. Go to SubVolume
8344 block 6 > parent 5. Go to SubVolume
8345 block 7 > parent 5. Go to SubVolume
8346 block 8 > parent 5. Go to SubVolume
8347 block 9 > parent 5. Go to SubVolume
8348 block 10 > parent 5. Go to SubVolume
8349 block 11 > parent 5. Go to SubVolume
8350 block 0 < scrub_point 2. Check with your parent
8351 block 1 < scrub_point 2. Check with your parent
8352 block 2 < scrub_point 2. Check with your parent
8353 block 3 < scrub_point 2. Check with your parent
8354 block 4 < scrub_point 2. Check with your parent
8355 block 5 > parent 5. Go to SubVolume
8356 block 6 > parent 5. Go to SubVolume
8357 block 7 > parent 5. Go to SubVolume
8358 block 8 > parent 5. Go to SubVolume
8359 block 9 > parent 5. Go to SubVolume
8360 block 10 > parent 5. Go to SubVolume
8361 block 11 > parent 5. Go to SubVolume
8362 block 0 < scrub_point 3. Check with your parent
8363 block 1 < scrub_point 3. Check with your parent
8364 block 2 < scrub_point 3. Check with your parent
8365 block 3 < scrub_point 3. Check with your parent
8366 block 4 < scrub_point 3. Check with your parent
8367 block 5 > parent 5. Go to SubVolume
8368 block 6 > parent 5. Go to SubVolume
8369 block 7 > parent 5. Go to SubVolume
8370 block 8 > parent 5. Go to SubVolume
8371 block 9 > parent 5. Go to SubVolume
8372 block 10 > parent 5. Go to SubVolume
8373 block 11 > parent 5. Go to SubVolume
8374 block 0+4 <= scrub_point 4. No parent check
8375 block 1 < scrub_point 4. Check with your parent
8376 block 2 < scrub_point 4. Check with your parent
8377 block 3 < scrub_point 4. Check with your parent
8378 block 4 < scrub_point 4. Check with your parent
8379 block 5 > parent 5. Go to SubVolume
8380 block 6 > parent 5. Go to SubVolume
8381 block 7 > parent 5. Go to SubVolume
8382 block 8 > parent 5. Go to SubVolume
8383 block 9 > parent 5. Go to SubVolume
8384 block 10 > parent 5. Go to SubVolume
8385 block 11 > parent 5. Go to SubVolume
8386 block 0+4 <= scrub_point 5. No parent check
8387 block 1+4 <= scrub_point 5. No parent check
8388 block 2 < scrub_point 5. Check with your parent
8389 block 3 < scrub_point 5. Check with your parent
8390 block 4 < scrub_point 5. Check with your parent
8391 block 5 > parent 5. Go to SubVolume
8392 block 6 > parent 5. Go to SubVolume
8393 block 7 > parent 5. Go to SubVolume
8394 block 8 > parent 5. Go to SubVolume
8395 block 9 > parent 5. Go to SubVolume
8396 block 10 > parent 5. Go to SubVolume
8397 block 11 > parent 5. Go to SubVolume
8398 block 0 < scrub_point 0. Check with your parent
8399 block 1 < scrub_point 0. Check with your parent
8400 block 2 < scrub_point 0. Check with your parent
8401 block 3 < scrub_point 0. Check with your parent
8402 block 4 < scrub_point 0. Check with your parent
8403 block 5 > parent 5. Go to SubVolume
8404 block 6 > parent 5. Go to SubVolume
8405 block 7 > parent 5. Go to SubVolume
8406 block 8 > parent 5. Go to SubVolume
8407 block 9 > parent 5. Go to SubVolume
8408 block 10 > parent 5. Go to SubVolume
8409 block 0 < scrub_point 1. Check with your parent
8410 block 1 < scrub_point 1. Check with your parent
8411 block 2 < scrub_point 1. Check with your parent
8412 block 3 < scrub_point 1. Check with your parent
8413 block 4 < scrub_point 1. Check with your parent
8414 block 5 > parent 5. Go to SubVolume
8415 block 6 > parent 5. Go to SubVolume
8416 block 7 > parent 5. Go to SubVolume
8417 block 8 > parent 5. Go to SubVolume
8418 block 9 > parent 5. Go to SubVolume
8419 block 10 > parent 5. Go to SubVolume
8420 block 0 < scrub_point 2. Check with your parent
8421 block 1 < scrub_point 2. Check with your parent
8422 block 2 < scrub_point 2. Check with your parent
8423 block 3 < scrub_point 2. Check with your parent
8424 block 4 < scrub_point 2. Check with your parent
8425 block 5 > parent 5. Go to SubVolume
8426 block 6 > parent 5. Go to SubVolume
8427 block 7 > parent 5. Go to SubVolume
8428 block 8 > parent 5. Go to SubVolume
8429 block 9 > parent 5. Go to SubVolume
8430 block 10 > parent 5. Go to SubVolume
8431 block 0 < scrub_point 3. Check with your parent
8432 block 1 < scrub_point 3. Check with your parent
8433 block 2 < scrub_point 3. Check with your parent
8434 block 3 < scrub_point 3. Check with your parent
8435 block 4 < scrub_point 3. Check with your parent
8436 block 5 > parent 5. Go to SubVolume
8437 block 6 > parent 5. Go to SubVolume
8438 block 7 > parent 5. Go to SubVolume
8439 block 8 > parent 5. Go to SubVolume
8440 block 9 > parent 5. Go to SubVolume
8441 block 10 > parent 5. Go to SubVolume
8442 block 0 < scrub_point 4. Check with your parent
8443 block 1 < scrub_point 4. Check with your parent
8444 block 2 < scrub_point 4. Check with your parent
8445 block 3 < scrub_point 4. Check with your parent
8446 block 4 < scrub_point 4. Check with your parent
8447 block 5 > parent 5. Go to SubVolume
8448 block 6 > parent 5. Go to SubVolume
8449 block 7 > parent 5. Go to SubVolume
8450 block 8 > parent 5. Go to SubVolume
8451 block 9 > parent 5. Go to SubVolume
8452 block 10 > parent 5. Go to SubVolume
8453 block 0+5 <= scrub_point 5. No parent check
8454 block 1 < scrub_point 5. Check with your parent
8455 block 2 < scrub_point 5. Check with your parent
8456 block 3 < scrub_point 5. Check with your parent
8457 block 4 < scrub_point 5. Check with your parent
8458 block 5 > parent 5. Go to SubVolume
8459 block 6 > parent 5. Go to SubVolume
8460 block 7 > parent 5. Go to SubVolume
8461 block 8 > parent 5. Go to SubVolume
8462 block 9 > parent 5. Go to SubVolume
8463 block 10 > parent 5. Go to SubVolume
8464 block 0 < scrub_point 0. Check with your parent
8465 block 1 < scrub_point 0. Check with your parent
8466 block 2 < scrub_point 0. Check with your parent
8467 block 3 < scrub_point 0. Check with your parent
8468 block 4 < scrub_point 0. Check with your parent
8469 block 5 > parent 5. Go to SubVolume
8470 block 6 > parent 5. Go to SubVolume
8471 block 7 > parent 5. Go to SubVolume
8472 block 8 > parent 5. Go to SubVolume
8473 block 9 > parent 5. Go to SubVolume
8474 block 0 < scrub_point 1. Check with your parent
8475 block 1 < scrub_point 1. Check with your parent
8476 block 2 < scrub_point 1. Check with your parent
8477 block 3 < scrub_point 1. Check with your parent
8478 block 4 < scrub_point 1. Check with your parent
8479 block 5 > parent 5. Go to SubVolume
8480 block 6 > parent 5. Go to SubVolume
8481 block 7 > parent 5. Go to SubVolume
8482 block 8 > parent 5. Go to SubVolume
8483 block 9 > parent 5. Go to SubVolume
8484 block 0 < scrub_point 2. Check with your parent
8485 block 1 < scrub_point 2. Check with your parent
8486 block 2 < scrub_point 2. Check with your parent
8487 block 3 < scrub_point 2. Check with your parent
8488 block 4 < scrub_point 2. Check with your parent
8489 block 5 > parent 5. Go to SubVolume
8490 block 6 > parent 5. Go to SubVolume
8491 block 7 > parent 5. Go to SubVolume
8492 block 8 > parent 5. Go to SubVolume
8493 block 9 > parent 5. Go to SubVolume
8494 block 0 < scrub_point 3. Check with your parent
8495 block 1 < scrub_point 3. Check with your parent
8496 block 2 < scrub_point 3. Check with your parent
8497 block 3 < scrub_point 3. Check with your parent
8498 block 4 < scrub_point 3. Check with your parent
8499 block 5 > parent 5. Go to SubVolume
8500 block 6 > parent 5. Go to SubVolume
8501 block 7 > parent 5. Go to SubVolume
8502 block 8 > parent 5. Go to SubVolume
8503 block 9 > parent 5. Go to SubVolume
8504 block 0 < scrub_point 4. Check with your parent
8505 block 1 < scrub_point 4. Check with your parent
8506 block 2 < scrub_point 4. Check with your parent
8507 block 3 < scrub_point 4. Check with your parent
8508 block 4 < scrub_point 4. Check with your parent
8509 block 5 > parent 5. Go to SubVolume
8510 block 6 > parent 5. Go to SubVolume
8511 block 7 > parent 5. Go to SubVolume
8512 block 8 > parent 5. Go to SubVolume
8513 block 9 > parent 5. Go to SubVolume
8514 block 0 < scrub_point 5. Check with your parent
8515 block 1 < scrub_point 5. Check with your parent
8516 block 2 < scrub_point 5. Check with your parent
8517 block 3 < scrub_point 5. Check with your parent
8518 block 4 < scrub_point 5. Check with your parent
8519 block 5 > parent 5. Go to SubVolume
8520 block 6 > parent 5. Go to SubVolume
8521 block 7 > parent 5. Go to SubVolume
8522 block 8 > parent 5. Go to SubVolume
8523 block 9 > parent 5. Go to SubVolume
8524 block 0 < scrub_point 0. Check with your parent
8525 block 1 < scrub_point 0. Check with your parent
8526 block 2 < scrub_point 0. Check with your parent
8527 block 3 < scrub_point 0. Check with your parent
8528 block 4 < scrub_point 0. Check with your parent
8529 block 5 > parent 5. Go to SubVolume
8530 block 6 > parent 5. Go to SubVolume
8531 block 7 > parent 5. Go to SubVolume
8532 block 8 > parent 5. Go to SubVolume
8533 block 0 < scrub_point 1. Check with your parent
8534 block 1 < scrub_point 1. Check with your parent
8535 block 2 < scrub_point 1. Check with your parent
8536 block 3 < scrub_point 1. Check with your parent
8537 block 4 < scrub_point 1. Check with your parent
8538 block 5 > parent 5. Go to SubVolume
8539 block 6 > parent 5. Go to SubVolume
8540 block 7 > parent 5. Go to SubVolume
8541 block 8 > parent 5. Go to SubVolume
8542 block 0 < scrub_point 2. Check with your parent
8543 block 1 < scrub_point 2. Check with your parent
8544 block 2 < scrub_point 2. Check with your parent
8545 block 3 < scrub_point 2. Check with your parent
8546 block 4 < scrub_point 2. Check with your parent
8547 block 5 > parent 5. Go to SubVolume
8548 block 6 > parent 5. Go to SubVolume
8549 block 7 > parent 5. Go to SubVolume
8550 block 8 > parent 5. Go to SubVolume
8551 block 0 < scrub_point 3. Check with your parent
8552 block 1 < scrub_point 3. Check with your parent
8553 block 2 < scrub_point 3. Check with your parent
8554 block 3 < scrub_point 3. Check with your parent
8555 block 4 < scrub_point 3. Check with your parent
8556 block 5 > parent 5. Go to SubVolume
8557 block 6 > parent 5. Go to SubVolume
8558 block 7 > parent 5. Go to SubVolume
8559 block 8 > parent 5. Go to SubVolume
8560 block 0 < scrub_point 4. Check with your parent
8561 block 1 < scrub_point 4. Check with your parent
8562 block 2 < scrub_point 4. Check with your parent
8563 block 3 < scrub_point 4. Check with your parent
8564 block 4 < scrub_point 4. Check with your parent
8565 block 5 > parent 5. Go to SubVolume
8566 block 6 > parent 5. Go to SubVolume
8567 block 7 > parent 5. Go to SubVolume
8568 block 8 > parent 5. Go to SubVolume
8569 block 0 < scrub_point 5. Check with your parent
8570 block 1 < scrub_point 5. Check with your parent
8571 block 2 < scrub_point 5. Check with your parent
8572 block 3 < scrub_point 5. Check with your parent
8573 block 4 < scrub_point 5. Check with your parent
8574 block 5 > parent 5. Go to SubVolume
8575 block 6 > parent 5. Go to SubVolume
8576 block 7 > parent 5. Go to SubVolume
8577 block 8 > parent 5. Go to SubVolume
8578 block 0 < scrub_point 0. Check with your parent
8579 block 1 < scrub_point 0. Check with your parent
8580 block 2 < scrub_point 0. Check with your parent
8581 block 3 < scrub_point 0. Check with your parent
8582 block 4 < scrub_point 0. Check with your parent
8583 block 5 > parent 5. Go to SubVolume
8584 block 6 > parent 5. Go to SubVolume
8585 block 7 > parent 5. Go to SubVolume
8586 block 0 < scrub_point 1. Check with your parent
8587 block 1 < scrub_point 1. Check with your parent
8588 block 2 < scrub_point 1. Check with your parent
8589 block 3 < scrub_point 1. Check with your parent
8590 block 4 < scrub_point 1. Check with your parent
8591 block 5 > parent 5. Go to SubVolume
8592 block 6 > parent 5. Go to SubVolume
8593 block 7 > parent 5. Go to SubVolume
8594 block 0 < scrub_point 2. Check with your parent
8595 block 1 < scrub_point 2. Check with your parent
8596 block 2 < scrub_point 2. Check with your parent
8597 block 3 < scrub_point 2. Check with your parent
8598 block 4 < scrub_point 2. Check with your parent
8599 block 5 > parent 5. Go to SubVolume
8600 block 6 > parent 5. Go to SubVolume
8601 block 7 > parent 5. Go to SubVolume
8602 block 0 < scrub_point 3. Check with your parent
8603 block 1 < scrub_point 3. Check with your parent
8604 block 2 < scrub_point 3. Check with your parent
8605 block 3 < scrub_point 3. Check with your parent
8606 block 4 < scrub_point 3. Check with your parent
8607 block 5 > parent 5. Go to SubVolume
8608 block 6 > parent 5. Go to SubVolume
8609 block 7 > parent 5. Go to SubVolume
8610 block 0 < scrub_point 4. Check with your parent
8611 block 1 < scrub_point 4. Check with your parent
8612 block 2 < scrub_point 4. Check with your parent
8613 block 3 < scrub_point 4. Check with your parent
8614 block 4 < scrub_point 4. Check with your parent
8615 block 5 > parent 5. Go to SubVolume
8616 block 6 > parent 5. Go to SubVolume
8617 block 7 > parent 5. Go to SubVolume
8618 block 0 < scrub_point 5. Check with your parent
8619 block 1 < scrub_point 5. Check with your parent
8620 block 2 < scrub_point 5. Check with your parent
8621 block 3 < scrub_point 5. Check with your parent
8622 block 4 < scrub_point 5. Check with your parent
8623 block 5 > parent 5. Go to SubVolume
8624 block 6 > parent 5. Go to SubVolume
8625 block 7 > parent 5. Go to SubVolume
8626 block 0 < scrub_point 0. Check with your parent
8627 block 1 < scrub_point 0. Check with your parent
8628 block 2 < scrub_point 0. Check with your parent
8629 block 3 < scrub_point 0. Check with your parent
8630 block 4 < scrub_point 0. Check with your parent
8631 block 5 > parent 5. Go to SubVolume
8632 block 6 > parent 5. Go to SubVolume
8633 block 0 < scrub_point 1. Check with your parent
8634 block 1 < scrub_point 1. Check with your parent
8635 block 2 < scrub_point 1. Check with your parent
8636 block 3 < scrub_point 1. Check with your parent
8637 block 4 < scrub_point 1. Check with your parent
8638 block 5 > parent 5. Go to SubVolume
8639 block 6 > parent 5. Go to SubVolume
8640 block 0 < scrub_point 2. Check with your parent
8641 block 1 < scrub_point 2. Check with your parent
8642 block 2 < scrub_point 2. Check with your parent
8643 block 3 < scrub_point 2. Check with your parent
8644 block 4 < scrub_point 2. Check with your parent
8645 block 5 > parent 5. Go to SubVolume
8646 block 6 > parent 5. Go to SubVolume
8647 block 0 < scrub_point 3. Check with your parent
8648 block 1 < scrub_point 3. Check with your parent
8649 block 2 < scrub_point 3. Check with your parent
8650 block 3 < scrub_point 3. Check with your parent
8651 block 4 < scrub_point 3. Check with your parent
8652 block 5 > parent 5. Go to SubVolume
8653 block 6 > parent 5. Go to SubVolume
8654 block 0 < scrub_point 4. Check with your parent
8655 block 1 < scrub_point 4. Check with your parent
8656 block 2 < scrub_point 4. Check with your parent
8657 block 3 < scrub_point 4. Check with your parent
8658 block 4 < scrub_point 4. Check with your parent
8659 block 5 > parent 5. Go to SubVolume
8660 block 6 > parent 5. Go to SubVolume
8661 block 0 < scrub_point 5. Check with your parent
8662 block 1 < scrub_point 5. Check with your parent
8663 block 2 < scrub_point 5. Check with your parent
8664 block 3 < scrub_point 5. Check with your parent
8665 block 4 < scrub_point 5. Check with your parent
8666 block 5 > parent 5. Go to SubVolume
8667 block 6 > parent 5. Go to SubVolume
8668 block 0 < scrub_point 0. Check with your parent
8669 block 1 < scrub_point 0. Check with your parent
8670 block 2 < scrub_point 0. Check with your parent
8671 block 3 < scrub_point 0. Check with your parent
8672 block 4 < scrub_point 0. Check with your parent
8673 block 5 > parent 5. Go to SubVolume
8674 block 0 < scrub_point 1. Check with your parent
8675 block 1 < scrub_point 1. Check with your parent
8676 block 2 < scrub_point 1. Check with your parent
8677 block 3 < scrub_point 1. Check with your parent
8678 block 4 < scrub_point 1. Check with your parent
8679 block 5 > parent 5. Go to SubVolume
8680 block 0 < scrub_point 2. Check with your parent
8681 block 1 < scrub_point 2. Check with your parent
8682 block 2 < scrub_point 2. Check with your parent
8683 block 3 < scrub_point 2. Check with your parent
8684 block 4 < scrub_point 2. Check with your parent
8685 block 5 > parent 5. Go to SubVolume
8686 block 0 < scrub_point 3. Check with your parent
8687 block 1 < scrub_point 3. Check with your parent
8688 block 2 < scrub_point 3. Check with your parent
8689 block 3 < scrub_point 3. Check with your parent
8690 block 4 < scrub_point 3. Check with your parent
8691 block 5 > parent 5. Go to SubVolume
8692 block 0 < scrub_point 4. Check with your parent
8693 block 1 < scrub_point 4. Check with your parent
8694 block 2 < scrub_point 4. Check with your parent
8695 block 3 < scrub_point 4. Check with your parent
8696 block 4 < scrub_point 4. Check with your parent
8697 block 5 > parent 5. Go to SubVolume
8698 block 0 < scrub_point 5. Check with your parent
8699 block 1 < scrub_point 5. Check with your parent
8700 block 2 < scrub_point 5. Check with your parent
8701 block 3 < scrub_point 5. Check with your parent
8702 block 4 < scrub_point 5. Check with your parent
8703 block 5 > parent 5. Go to SubVolume
8704 block 0 < scrub_point 0. Check with your parent
8705 block 1 < scrub_point 0. Check with your parent
8706 block 2 < scrub_point 0. Check with your parent
8707 block 3 < scrub_point 0. Check with your parent
8708 block 4 < scrub_point 0. Check with your parent
8709 block 0 < scrub_point 1. Check with your parent
8710 block 1 < scrub_point 1. Check with your parent
8711 block 2 < scrub_point 1. Check with your parent
8712 block 3 < scrub_point 1. Check with your parent
8713 block 4 < scrub_point 1. Check with your parent
8714 block 0 < scrub_point 2. Check with your parent
8715 block 1 < scrub_point 2. Check with your parent
8716 block 2 < scrub_point 2. Check with your parent
8717 block 3 < scrub_point 2. Check with your parent
8718 block 4 < scrub_point 2. Check with your parent
8719 block 0 < scrub_point 3. Check with your parent
8720 block 1 < scrub_point 3. Check with your parent
8721 block 2 < scrub_point 3. Check with your parent
8722 block 3 < scrub_point 3. Check with your parent
8723 block 4 < scrub_point 3. Check with your parent
8724 block 0 < scrub_point 4. Check with your parent
8725 block 1 < scrub_point 4. Check with your parent
8726 block 2 < scrub_point 4. Check with your parent
8727 block 3 < scrub_point 4. Check with your parent
8728 block 4 < scrub_point 4. Check with your parent
8729 block 0 < scrub_point 5. Check with your parent
8730 block 1 < scrub_point 5. Check with your parent
8731 block 2 < scrub_point 5. Check with your parent
8732 block 3 < scrub_point 5. Check with your parent
8733 block 4 < scrub_point 5. Check with your parent
8734 block 0 < scrub_point 0. Check with your parent
8735 block 1 < scrub_point 0. Check with your parent
8736 block 2 < scrub_point 0. Check with your parent
8737 block 3 < scrub_point 0. Check with your parent
8738 block 0 < scrub_point 1. Check with your parent
8739 block 1 < scrub_point 1. Check with your parent
8740 block 2 < scrub_point 1. Check with your parent
8741 block 3 < scrub_point 1. Check with your parent
8742 block 0 < scrub_point 2. Check with your parent
8743 block 1 < scrub_point 2. Check with your parent
8744 block 2 < scrub_point 2. Check with your parent
8745 block 3 < scrub_point 2. Check with your parent
8746 block 0 < scrub_point 3. Check with your parent
8747 block 1 < scrub_point 3. Check with your parent
8748 block 2 < scrub_point 3. Check with your parent
8749 block 3 < scrub_point 3. Check with your parent
8750 block 0 < scrub_point 4. Check with your parent
8751 block 1 < scrub_point 4. Check with your parent
8752 block 2 < scrub_point 4. Check with your parent
8753 block 3 < scrub_point 4. Check with your parent
8754 block 0 < scrub_point 5. Check with your parent
8755 block 1 < scrub_point 5. Check with your parent
8756 block 2 < scrub_point 5. Check with your parent
8757 block 3 < scrub_point 5. Check with your parent
8758 block 0 < scrub_point 0. Check with your parent
8759 block 1 < scrub_point 0. Check with your parent
8760 block 2 < scrub_point 0. Check with your parent
8761 block 0 < scrub_point 1. Check with your parent
8762 block 1 < scrub_point 1. Check with your parent
8763 block 2 < scrub_point 1. Check with your parent
8764 block 0 < scrub_point 2. Check with your parent
8765 block 1 < scrub_point 2. Check with your parent
8766 block 2 < scrub_point 2. Check with your parent
8767 block 0 < scrub_point 3. Check with your parent
8768 block 1 < scrub_point 3. Check with your parent
8769 block 2 < scrub_point 3. Check with your parent
8770 block 0 < scrub_point 4. Check with your parent
8771 block 1 < scrub_point 4. Check with your parent
8772 block 2 < scrub_point 4. Check with your parent
8773 block 0 < scrub_point 5. Check with your parent
8774 block 1 < scrub_point 5. Check with your parent
8775 block 2 < scrub_point 5. Check with your parent
8776 block 0 < scrub_point 0. Check with your parent
8777 block 1 < scrub_point 0. Check with your parent
8778 block 0 < scrub_point 1. Check with your parent
8779 block 1 < scrub_point 1. Check with your parent
8780 block 0 < scrub_point 2. Check with your parent
8781 block 1 < scrub_point 2. Check with your parent
8782 block 0 < scrub_point 3. Check with your parent
8783 block 1 < scrub_point 3. Check with your parent
8784 block 0 < scrub_point 4. Check with your parent
8785 block 1 < scrub_point 4. Check with your parent
8786 block 0 < scrub_point 5. Check with your parent
8787 block 1 < scrub_point 5. Check with your parent
8788 test volume::test::test_scrub_point_two_subvolume_smaller_1 ... ok
8789 block 0 < scrub_point 0. Check with your parent
8790 block 1 < scrub_point 0. Check with your parent
8791 block 2 < scrub_point 0. Check with your parent
8792 block 3 < scrub_point 0. Check with your parent
8793 block 4 < scrub_point 0. Check with your parent
8794 block 5 > parent 5. Go to SubVolume
8795 block 6 > parent 5. Go to SubVolume
8796 block 7 > parent 5. Go to SubVolume
8797 block 8 > parent 5. Go to SubVolume
8798 block 9 > parent 5. Go to SubVolume
8799 block 0+1 <= scrub_point 1. No parent check
8800 block 1 < scrub_point 1. Check with your parent
8801 block 2 < scrub_point 1. Check with your parent
8802 block 3 < scrub_point 1. Check with your parent
8803 block 4 < scrub_point 1. Check with your parent
8804 block 5 > parent 5. Go to SubVolume
8805 block 6 > parent 5. Go to SubVolume
8806 block 7 > parent 5. Go to SubVolume
8807 block 8 > parent 5. Go to SubVolume
8808 block 9 > parent 5. Go to SubVolume
8809 block 0+1 <= scrub_point 2. No parent check
8810 block 1+1 <= scrub_point 2. No parent check
8811 block 2 < scrub_point 2. Check with your parent
8812 block 3 < scrub_point 2. Check with your parent
8813 block 4 < scrub_point 2. Check with your parent
8814 block 5 > parent 5. Go to SubVolume
8815 block 6 > parent 5. Go to SubVolume
8816 block 7 > parent 5. Go to SubVolume
8817 block 8 > parent 5. Go to SubVolume
8818 block 9 > parent 5. Go to SubVolume
8819 block 0+1 <= scrub_point 3. No parent check
8820 block 1+1 <= scrub_point 3. No parent check
8821 block 2+1 <= scrub_point 3. No parent check
8822 block 3 < scrub_point 3. Check with your parent
8823 block 4 < scrub_point 3. Check with your parent
8824 block 5 > parent 5. Go to SubVolume
8825 block 6 > parent 5. Go to SubVolume
8826 block 7 > parent 5. Go to SubVolume
8827 block 8 > parent 5. Go to SubVolume
8828 block 9 > parent 5. Go to SubVolume
8829 block 0+1 <= scrub_point 4. No parent check
8830 block 1+1 <= scrub_point 4. No parent check
8831 block 2+1 <= scrub_point 4. No parent check
8832 block 3+1 <= scrub_point 4. No parent check
8833 block 4 < scrub_point 4. Check with your parent
8834 block 5 > parent 5. Go to SubVolume
8835 block 6 > parent 5. Go to SubVolume
8836 block 7 > parent 5. Go to SubVolume
8837 block 8 > parent 5. Go to SubVolume
8838 block 9 > parent 5. Go to SubVolume
8839 block 0+1 <= scrub_point 5. No parent check
8840 block 1+1 <= scrub_point 5. No parent check
8841 block 2+1 <= scrub_point 5. No parent check
8842 block 3+1 <= scrub_point 5. No parent check
8843 block 4+1 <= scrub_point 5. No parent check
8844 block 5 > parent 5. Go to SubVolume
8845 block 6 > parent 5. Go to SubVolume
8846 block 7 > parent 5. Go to SubVolume
8847 block 8 > parent 5. Go to SubVolume
8848 block 9 > parent 5. Go to SubVolume
8849 block 0 < scrub_point 0. Check with your parent
8850 block 1 < scrub_point 0. Check with your parent
8851 block 2 < scrub_point 0. Check with your parent
8852 block 3 < scrub_point 0. Check with your parent
8853 block 4 < scrub_point 0. Check with your parent
8854 block 5 > parent 5. Go to SubVolume
8855 block 6 > parent 5. Go to SubVolume
8856 block 7 > parent 5. Go to SubVolume
8857 block 8 > parent 5. Go to SubVolume
8858 block 0 < scrub_point 1. Check with your parent
8859 block 1 < scrub_point 1. Check with your parent
8860 block 2 < scrub_point 1. Check with your parent
8861 block 3 < scrub_point 1. Check with your parent
8862 block 4 < scrub_point 1. Check with your parent
8863 block 5 > parent 5. Go to SubVolume
8864 block 6 > parent 5. Go to SubVolume
8865 block 7 > parent 5. Go to SubVolume
8866 block 8 > parent 5. Go to SubVolume
8867 block 0+2 <= scrub_point 2. No parent check
8868 block 1 < scrub_point 2. Check with your parent
8869 block 2 < scrub_point 2. Check with your parent
8870 block 3 < scrub_point 2. Check with your parent
8871 block 4 < scrub_point 2. Check with your parent
8872 block 5 > parent 5. Go to SubVolume
8873 block 6 > parent 5. Go to SubVolume
8874 block 7 > parent 5. Go to SubVolume
8875 block 8 > parent 5. Go to SubVolume
8876 block 0+2 <= scrub_point 3. No parent check
8877 block 1+2 <= scrub_point 3. No parent check
8878 block 2 < scrub_point 3. Check with your parent
8879 block 3 < scrub_point 3. Check with your parent
8880 block 4 < scrub_point 3. Check with your parent
8881 block 5 > parent 5. Go to SubVolume
8882 block 6 > parent 5. Go to SubVolume
8883 block 7 > parent 5. Go to SubVolume
8884 block 8 > parent 5. Go to SubVolume
8885 block 0+2 <= scrub_point 4. No parent check
8886 block 1+2 <= scrub_point 4. No parent check
8887 block 2+2 <= scrub_point 4. No parent check
8888 block 3 < scrub_point 4. Check with your parent
8889 block 4 < scrub_point 4. Check with your parent
8890 block 5 > parent 5. Go to SubVolume
8891 block 6 > parent 5. Go to SubVolume
8892 block 7 > parent 5. Go to SubVolume
8893 block 8 > parent 5. Go to SubVolume
8894 block 0+2 <= scrub_point 5. No parent check
8895 block 1+2 <= scrub_point 5. No parent check
8896 block 2+2 <= scrub_point 5. No parent check
8897 block 3+2 <= scrub_point 5. No parent check
8898 block 4 < scrub_point 5. Check with your parent
8899 block 5 > parent 5. Go to SubVolume
8900 block 6 > parent 5. Go to SubVolume
8901 block 7 > parent 5. Go to SubVolume
8902 block 8 > parent 5. Go to SubVolume
8903 block 0 < scrub_point 0. Check with your parent
8904 block 1 < scrub_point 0. Check with your parent
8905 block 2 < scrub_point 0. Check with your parent
8906 block 3 < scrub_point 0. Check with your parent
8907 block 4 < scrub_point 0. Check with your parent
8908 block 5 > parent 5. Go to SubVolume
8909 block 6 > parent 5. Go to SubVolume
8910 block 7 > parent 5. Go to SubVolume
8911 block 0 < scrub_point 1. Check with your parent
8912 block 1 < scrub_point 1. Check with your parent
8913 block 2 < scrub_point 1. Check with your parent
8914 block 3 < scrub_point 1. Check with your parent
8915 block 4 < scrub_point 1. Check with your parent
8916 block 5 > parent 5. Go to SubVolume
8917 block 6 > parent 5. Go to SubVolume
8918 block 7 > parent 5. Go to SubVolume
8919 block 0 < scrub_point 2. Check with your parent
8920 block 1 < scrub_point 2. Check with your parent
8921 block 2 < scrub_point 2. Check with your parent
8922 block 3 < scrub_point 2. Check with your parent
8923 block 4 < scrub_point 2. Check with your parent
8924 block 5 > parent 5. Go to SubVolume
8925 block 6 > parent 5. Go to SubVolume
8926 block 7 > parent 5. Go to SubVolume
8927 block 0+3 <= scrub_point 3. No parent check
8928 block 1 < scrub_point 3. Check with your parent
8929 block 2 < scrub_point 3. Check with your parent
8930 block 3 < scrub_point 3. Check with your parent
8931 block 4 < scrub_point 3. Check with your parent
8932 block 5 > parent 5. Go to SubVolume
8933 block 6 > parent 5. Go to SubVolume
8934 block 7 > parent 5. Go to SubVolume
8935 block 0+3 <= scrub_point 4. No parent check
8936 block 1+3 <= scrub_point 4. No parent check
8937 block 2 < scrub_point 4. Check with your parent
8938 block 3 < scrub_point 4. Check with your parent
8939 block 4 < scrub_point 4. Check with your parent
8940 block 5 > parent 5. Go to SubVolume
8941 block 6 > parent 5. Go to SubVolume
8942 block 7 > parent 5. Go to SubVolume
8943 block 0+3 <= scrub_point 5. No parent check
8944 block 1+3 <= scrub_point 5. No parent check
8945 block 2+3 <= scrub_point 5. No parent check
8946 block 3 < scrub_point 5. Check with your parent
8947 block 4 < scrub_point 5. Check with your parent
8948 block 5 > parent 5. Go to SubVolume
8949 block 6 > parent 5. Go to SubVolume
8950 block 7 > parent 5. Go to SubVolume
8951 block 0 < scrub_point 0. Check with your parent
8952 block 1 < scrub_point 0. Check with your parent
8953 block 2 < scrub_point 0. Check with your parent
8954 block 3 < scrub_point 0. Check with your parent
8955 block 4 < scrub_point 0. Check with your parent
8956 block 5 > parent 5. Go to SubVolume
8957 block 6 > parent 5. Go to SubVolume
8958 block 0 < scrub_point 1. Check with your parent
8959 block 1 < scrub_point 1. Check with your parent
8960 block 2 < scrub_point 1. Check with your parent
8961 block 3 < scrub_point 1. Check with your parent
8962 block 4 < scrub_point 1. Check with your parent
8963 block 5 > parent 5. Go to SubVolume
8964 block 6 > parent 5. Go to SubVolume
8965 block 0 < scrub_point 2. Check with your parent
8966 block 1 < scrub_point 2. Check with your parent
8967 block 2 < scrub_point 2. Check with your parent
8968 block 3 < scrub_point 2. Check with your parent
8969 block 4 < scrub_point 2. Check with your parent
8970 block 5 > parent 5. Go to SubVolume
8971 block 6 > parent 5. Go to SubVolume
8972 block 0 < scrub_point 3. Check with your parent
8973 block 1 < scrub_point 3. Check with your parent
8974 block 2 < scrub_point 3. Check with your parent
8975 block 3 < scrub_point 3. Check with your parent
8976 block 4 < scrub_point 3. Check with your parent
8977 block 5 > parent 5. Go to SubVolume
8978 block 6 > parent 5. Go to SubVolume
8979 block 0+4 <= scrub_point 4. No parent check
8980 block 1 < scrub_point 4. Check with your parent
8981 block 2 < scrub_point 4. Check with your parent
8982 block 3 < scrub_point 4. Check with your parent
8983 block 4 < scrub_point 4. Check with your parent
8984 block 5 > parent 5. Go to SubVolume
8985 block 6 > parent 5. Go to SubVolume
8986 block 0+4 <= scrub_point 5. No parent check
8987 block 1+4 <= scrub_point 5. No parent check
8988 block 2 < scrub_point 5. Check with your parent
8989 block 3 < scrub_point 5. Check with your parent
8990 block 4 < scrub_point 5. Check with your parent
8991 block 5 > parent 5. Go to SubVolume
8992 block 6 > parent 5. Go to SubVolume
8993 block 0 < scrub_point 0. Check with your parent
8994 block 1 < scrub_point 0. Check with your parent
8995 block 2 < scrub_point 0. Check with your parent
8996 block 3 < scrub_point 0. Check with your parent
8997 block 4 < scrub_point 0. Check with your parent
8998 block 5 > parent 5. Go to SubVolume
8999 block 0 < scrub_point 1. Check with your parent
9000 block 1 < scrub_point 1. Check with your parent
9001 block 2 < scrub_point 1. Check with your parent
9002 block 3 < scrub_point 1. Check with your parent
9003 block 4 < scrub_point 1. Check with your parent
9004 block 5 > parent 5. Go to SubVolume
9005 block 0 < scrub_point 2. Check with your parent
9006 block 1 < scrub_point 2. Check with your parent
9007 block 2 < scrub_point 2. Check with your parent
9008 block 3 < scrub_point 2. Check with your parent
9009 block 4 < scrub_point 2. Check with your parent
9010 block 5 > parent 5. Go to SubVolume
9011 block 0 < scrub_point 3. Check with your parent
9012 block 1 < scrub_point 3. Check with your parent
9013 block 2 < scrub_point 3. Check with your parent
9014 block 3 < scrub_point 3. Check with your parent
9015 block 4 < scrub_point 3. Check with your parent
9016 block 5 > parent 5. Go to SubVolume
9017 block 0 < scrub_point 4. Check with your parent
9018 block 1 < scrub_point 4. Check with your parent
9019 block 2 < scrub_point 4. Check with your parent
9020 block 3 < scrub_point 4. Check with your parent
9021 block 4 < scrub_point 4. Check with your parent
9022 block 5 > parent 5. Go to SubVolume
9023 block 0+5 <= scrub_point 5. No parent check
9024 block 1 < scrub_point 5. Check with your parent
9025 block 2 < scrub_point 5. Check with your parent
9026 block 3 < scrub_point 5. Check with your parent
9027 block 4 < scrub_point 5. Check with your parent
9028 block 5 > parent 5. Go to SubVolume
9029 block 0 < scrub_point 0. Check with your parent
9030 block 1 < scrub_point 0. Check with your parent
9031 block 2 < scrub_point 0. Check with your parent
9032 block 3 < scrub_point 0. Check with your parent
9033 block 4 < scrub_point 0. Check with your parent
9034 block 0 < scrub_point 1. Check with your parent
9035 block 1 < scrub_point 1. Check with your parent
9036 block 2 < scrub_point 1. Check with your parent
9037 block 3 < scrub_point 1. Check with your parent
9038 block 4 < scrub_point 1. Check with your parent
9039 block 0 < scrub_point 2. Check with your parent
9040 block 1 < scrub_point 2. Check with your parent
9041 block 2 < scrub_point 2. Check with your parent
9042 block 3 < scrub_point 2. Check with your parent
9043 block 4 < scrub_point 2. Check with your parent
9044 block 0 < scrub_point 3. Check with your parent
9045 block 1 < scrub_point 3. Check with your parent
9046 block 2 < scrub_point 3. Check with your parent
9047 block 3 < scrub_point 3. Check with your parent
9048 block 4 < scrub_point 3. Check with your parent
9049 block 0 < scrub_point 4. Check with your parent
9050 block 1 < scrub_point 4. Check with your parent
9051 block 2 < scrub_point 4. Check with your parent
9052 block 3 < scrub_point 4. Check with your parent
9053 block 4 < scrub_point 4. Check with your parent
9054 block 0 < scrub_point 5. Check with your parent
9055 block 1 < scrub_point 5. Check with your parent
9056 block 2 < scrub_point 5. Check with your parent
9057 block 3 < scrub_point 5. Check with your parent
9058 block 4 < scrub_point 5. Check with your parent
9059 block 0 < scrub_point 0. Check with your parent
9060 block 1 < scrub_point 0. Check with your parent
9061 block 2 < scrub_point 0. Check with your parent
9062 block 3 < scrub_point 0. Check with your parent
9063 block 0 < scrub_point 1. Check with your parent
9064 block 1 < scrub_point 1. Check with your parent
9065 block 2 < scrub_point 1. Check with your parent
9066 block 3 < scrub_point 1. Check with your parent
9067 block 0 < scrub_point 2. Check with your parent
9068 block 1 < scrub_point 2. Check with your parent
9069 block 2 < scrub_point 2. Check with your parent
9070 block 3 < scrub_point 2. Check with your parent
9071 block 0 < scrub_point 3. Check with your parent
9072 block 1 < scrub_point 3. Check with your parent
9073 block 2 < scrub_point 3. Check with your parent
9074 block 3 < scrub_point 3. Check with your parent
9075 block 0 < scrub_point 4. Check with your parent
9076 block 1 < scrub_point 4. Check with your parent
9077 block 2 < scrub_point 4. Check with your parent
9078 block 3 < scrub_point 4. Check with your parent
9079 block 0 < scrub_point 5. Check with your parent
9080 block 1 < scrub_point 5. Check with your parent
9081 block 2 < scrub_point 5. Check with your parent
9082 block 3 < scrub_point 5. Check with your parent
9083 block 0 < scrub_point 0. Check with your parent
9084 block 1 < scrub_point 0. Check with your parent
9085 block 2 < scrub_point 0. Check with your parent
9086 block 0 < scrub_point 1. Check with your parent
9087 block 1 < scrub_point 1. Check with your parent
9088 block 2 < scrub_point 1. Check with your parent
9089 block 0 < scrub_point 2. Check with your parent
9090 block 1 < scrub_point 2. Check with your parent
9091 block 2 < scrub_point 2. Check with your parent
9092 block 0 < scrub_point 3. Check with your parent
9093 block 1 < scrub_point 3. Check with your parent
9094 block 2 < scrub_point 3. Check with your parent
9095 block 0 < scrub_point 4. Check with your parent
9096 block 1 < scrub_point 4. Check with your parent
9097 block 2 < scrub_point 4. Check with your parent
9098 block 0 < scrub_point 5. Check with your parent
9099 block 1 < scrub_point 5. Check with your parent
9100 block 2 < scrub_point 5. Check with your parent
9101 block 0 < scrub_point 0. Check with your parent
9102 block 1 < scrub_point 0. Check with your parent
9103 block 0 < scrub_point 1. Check with your parent
9104 block 1 < scrub_point 1. Check with your parent
9105 block 0 < scrub_point 2. Check with your parent
9106 block 1 < scrub_point 2. Check with your parent
9107 block 0 < scrub_point 3. Check with your parent
9108 block 1 < scrub_point 3. Check with your parent
9109 block 0 < scrub_point 4. Check with your parent
9110 block 1 < scrub_point 4. Check with your parent
9111 block 0 < scrub_point 5. Check with your parent
9112 block 1 < scrub_point 5. Check with your parent
9113 test volume::test::test_scrub_point_two_subvolume_smaller_2 ... ok
9114 block 0 < scrub_point 0. Check with your parent
9115 block 1 < scrub_point 0. Check with your parent
9116 block 2 < scrub_point 0. Check with your parent
9117 block 3 < scrub_point 0. Check with your parent
9118 block 4 < scrub_point 0. Check with your parent
9119 block 5 < scrub_point 0. Check with your parent
9120 block 6 < scrub_point 0. Check with your parent
9121 block 7 < scrub_point 0. Check with your parent
9122 block 8 > parent 8. Go to SubVolume
9123 block 9 > parent 8. Go to SubVolume
9124 block 0+1 <= scrub_point 1. No parent check
9125 block 1 < scrub_point 1. Check with your parent
9126 block 2 < scrub_point 1. Check with your parent
9127 block 3 < scrub_point 1. Check with your parent
9128 block 4 < scrub_point 1. Check with your parent
9129 block 5 < scrub_point 1. Check with your parent
9130 block 6 < scrub_point 1. Check with your parent
9131 block 7 < scrub_point 1. Check with your parent
9132 block 8 > parent 8. Go to SubVolume
9133 block 9 > parent 8. Go to SubVolume
9134 block 0+1 <= scrub_point 2. No parent check
9135 block 1+1 <= scrub_point 2. No parent check
9136 block 2 < scrub_point 2. Check with your parent
9137 block 3 < scrub_point 2. Check with your parent
9138 block 4 < scrub_point 2. Check with your parent
9139 block 5 < scrub_point 2. Check with your parent
9140 block 6 < scrub_point 2. Check with your parent
9141 block 7 < scrub_point 2. Check with your parent
9142 block 8 > parent 8. Go to SubVolume
9143 block 9 > parent 8. Go to SubVolume
9144 block 0+1 <= scrub_point 3. No parent check
9145 block 1+1 <= scrub_point 3. No parent check
9146 block 2+1 <= scrub_point 3. No parent check
9147 block 3 < scrub_point 3. Check with your parent
9148 block 4 < scrub_point 3. Check with your parent
9149 block 5 < scrub_point 3. Check with your parent
9150 block 6 < scrub_point 3. Check with your parent
9151 block 7 < scrub_point 3. Check with your parent
9152 block 8 > parent 8. Go to SubVolume
9153 block 9 > parent 8. Go to SubVolume
9154 block 0+1 <= scrub_point 4. No parent check
9155 block 1+1 <= scrub_point 4. No parent check
9156 block 2+1 <= scrub_point 4. No parent check
9157 block 3+1 <= scrub_point 4. No parent check
9158 block 4 < scrub_point 4. Check with your parent
9159 block 5 < scrub_point 4. Check with your parent
9160 block 6 < scrub_point 4. Check with your parent
9161 block 7 < scrub_point 4. Check with your parent
9162 block 8 > parent 8. Go to SubVolume
9163 block 9 > parent 8. Go to SubVolume
9164 block 0+1 <= scrub_point 5. No parent check
9165 block 1+1 <= scrub_point 5. No parent check
9166 block 2+1 <= scrub_point 5. No parent check
9167 block 3+1 <= scrub_point 5. No parent check
9168 block 4+1 <= scrub_point 5. No parent check
9169 block 5 < scrub_point 5. Check with your parent
9170 block 6 < scrub_point 5. Check with your parent
9171 block 7 < scrub_point 5. Check with your parent
9172 block 8 > parent 8. Go to SubVolume
9173 block 9 > parent 8. Go to SubVolume
9174 block 0+1 <= scrub_point 6. No parent check
9175 block 1+1 <= scrub_point 6. No parent check
9176 block 2+1 <= scrub_point 6. No parent check
9177 block 3+1 <= scrub_point 6. No parent check
9178 block 4+1 <= scrub_point 6. No parent check
9179 block 5+1 <= scrub_point 6. No parent check
9180 block 6 < scrub_point 6. Check with your parent
9181 block 7 < scrub_point 6. Check with your parent
9182 block 8 > parent 8. Go to SubVolume
9183 block 9 > parent 8. Go to SubVolume
9184 block 0+1 <= scrub_point 7. No parent check
9185 block 1+1 <= scrub_point 7. No parent check
9186 block 2+1 <= scrub_point 7. No parent check
9187 block 3+1 <= scrub_point 7. No parent check
9188 block 4+1 <= scrub_point 7. No parent check
9189 block 5+1 <= scrub_point 7. No parent check
9190 block 6+1 <= scrub_point 7. No parent check
9191 block 7 < scrub_point 7. Check with your parent
9192 block 8 > parent 8. Go to SubVolume
9193 block 9 > parent 8. Go to SubVolume
9194 block 0+1 <= scrub_point 8. No parent check
9195 block 1+1 <= scrub_point 8. No parent check
9196 block 2+1 <= scrub_point 8. No parent check
9197 block 3+1 <= scrub_point 8. No parent check
9198 block 4+1 <= scrub_point 8. No parent check
9199 block 5+1 <= scrub_point 8. No parent check
9200 block 6+1 <= scrub_point 8. No parent check
9201 block 7+1 <= scrub_point 8. No parent check
9202 block 8 > parent 8. Go to SubVolume
9203 block 9 > parent 8. Go to SubVolume
9204 block 0 < scrub_point 0. Check with your parent
9205 block 1 < scrub_point 0. Check with your parent
9206 block 2 < scrub_point 0. Check with your parent
9207 block 3 < scrub_point 0. Check with your parent
9208 block 4 < scrub_point 0. Check with your parent
9209 block 5 < scrub_point 0. Check with your parent
9210 block 6 < scrub_point 0. Check with your parent
9211 block 7 < scrub_point 0. Check with your parent
9212 block 8 > parent 8. Go to SubVolume
9213 block 0 < scrub_point 1. Check with your parent
9214 block 1 < scrub_point 1. Check with your parent
9215 block 2 < scrub_point 1. Check with your parent
9216 block 3 < scrub_point 1. Check with your parent
9217 block 4 < scrub_point 1. Check with your parent
9218 block 5 < scrub_point 1. Check with your parent
9219 block 6 < scrub_point 1. Check with your parent
9220 block 7 < scrub_point 1. Check with your parent
9221 block 8 > parent 8. Go to SubVolume
9222 block 0+2 <= scrub_point 2. No parent check
9223 block 1 < scrub_point 2. Check with your parent
9224 block 2 < scrub_point 2. Check with your parent
9225 block 3 < scrub_point 2. Check with your parent
9226 block 4 < scrub_point 2. Check with your parent
9227 block 5 < scrub_point 2. Check with your parent
9228 block 6 < scrub_point 2. Check with your parent
9229 block 7 < scrub_point 2. Check with your parent
9230 block 8 > parent 8. Go to SubVolume
9231 block 0+2 <= scrub_point 3. No parent check
9232 block 1+2 <= scrub_point 3. No parent check
9233 block 2 < scrub_point 3. Check with your parent
9234 block 3 < scrub_point 3. Check with your parent
9235 block 4 < scrub_point 3. Check with your parent
9236 block 5 < scrub_point 3. Check with your parent
9237 block 6 < scrub_point 3. Check with your parent
9238 block 7 < scrub_point 3. Check with your parent
9239 block 8 > parent 8. Go to SubVolume
9240 block 0+2 <= scrub_point 4. No parent check
9241 block 1+2 <= scrub_point 4. No parent check
9242 block 2+2 <= scrub_point 4. No parent check
9243 block 3 < scrub_point 4. Check with your parent
9244 block 4 < scrub_point 4. Check with your parent
9245 block 5 < scrub_point 4. Check with your parent
9246 block 6 < scrub_point 4. Check with your parent
9247 block 7 < scrub_point 4. Check with your parent
9248 block 8 > parent 8. Go to SubVolume
9249 block 0+2 <= scrub_point 5. No parent check
9250 block 1+2 <= scrub_point 5. No parent check
9251 block 2+2 <= scrub_point 5. No parent check
9252 block 3+2 <= scrub_point 5. No parent check
9253 block 4 < scrub_point 5. Check with your parent
9254 block 5 < scrub_point 5. Check with your parent
9255 block 6 < scrub_point 5. Check with your parent
9256 block 7 < scrub_point 5. Check with your parent
9257 block 8 > parent 8. Go to SubVolume
9258 block 0+2 <= scrub_point 6. No parent check
9259 block 1+2 <= scrub_point 6. No parent check
9260 block 2+2 <= scrub_point 6. No parent check
9261 block 3+2 <= scrub_point 6. No parent check
9262 block 4+2 <= scrub_point 6. No parent check
9263 block 5 < scrub_point 6. Check with your parent
9264 block 6 < scrub_point 6. Check with your parent
9265 block 7 < scrub_point 6. Check with your parent
9266 block 8 > parent 8. Go to SubVolume
9267 block 0+2 <= scrub_point 7. No parent check
9268 block 1+2 <= scrub_point 7. No parent check
9269 block 2+2 <= scrub_point 7. No parent check
9270 block 3+2 <= scrub_point 7. No parent check
9271 block 4+2 <= scrub_point 7. No parent check
9272 block 5+2 <= scrub_point 7. No parent check
9273 block 6 < scrub_point 7. Check with your parent
9274 block 7 < scrub_point 7. Check with your parent
9275 block 8 > parent 8. Go to SubVolume
9276 block 0+2 <= scrub_point 8. No parent check
9277 block 1+2 <= scrub_point 8. No parent check
9278 block 2+2 <= scrub_point 8. No parent check
9279 block 3+2 <= scrub_point 8. No parent check
9280 block 4+2 <= scrub_point 8. No parent check
9281 block 5+2 <= scrub_point 8. No parent check
9282 block 6+2 <= scrub_point 8. No parent check
9283 block 7 < scrub_point 8. Check with your parent
9284 block 8 > parent 8. Go to SubVolume
9285 block 0 < scrub_point 0. Check with your parent
9286 block 1 < scrub_point 0. Check with your parent
9287 block 2 < scrub_point 0. Check with your parent
9288 block 3 < scrub_point 0. Check with your parent
9289 block 4 < scrub_point 0. Check with your parent
9290 block 5 < scrub_point 0. Check with your parent
9291 block 6 < scrub_point 0. Check with your parent
9292 block 7 < scrub_point 0. Check with your parent
9293 block 0 < scrub_point 1. Check with your parent
9294 block 1 < scrub_point 1. Check with your parent
9295 block 2 < scrub_point 1. Check with your parent
9296 block 3 < scrub_point 1. Check with your parent
9297 block 4 < scrub_point 1. Check with your parent
9298 block 5 < scrub_point 1. Check with your parent
9299 block 6 < scrub_point 1. Check with your parent
9300 block 7 < scrub_point 1. Check with your parent
9301 block 0 < scrub_point 2. Check with your parent
9302 block 1 < scrub_point 2. Check with your parent
9303 block 2 < scrub_point 2. Check with your parent
9304 block 3 < scrub_point 2. Check with your parent
9305 block 4 < scrub_point 2. Check with your parent
9306 block 5 < scrub_point 2. Check with your parent
9307 block 6 < scrub_point 2. Check with your parent
9308 block 7 < scrub_point 2. Check with your parent
9309 block 0+3 <= scrub_point 3. No parent check
9310 block 1 < scrub_point 3. Check with your parent
9311 block 2 < scrub_point 3. Check with your parent
9312 block 3 < scrub_point 3. Check with your parent
9313 block 4 < scrub_point 3. Check with your parent
9314 block 5 < scrub_point 3. Check with your parent
9315 block 6 < scrub_point 3. Check with your parent
9316 block 7 < scrub_point 3. Check with your parent
9317 block 0+3 <= scrub_point 4. No parent check
9318 block 1+3 <= scrub_point 4. No parent check
9319 block 2 < scrub_point 4. Check with your parent
9320 block 3 < scrub_point 4. Check with your parent
9321 block 4 < scrub_point 4. Check with your parent
9322 block 5 < scrub_point 4. Check with your parent
9323 block 6 < scrub_point 4. Check with your parent
9324 block 7 < scrub_point 4. Check with your parent
9325 block 0+3 <= scrub_point 5. No parent check
9326 block 1+3 <= scrub_point 5. No parent check
9327 block 2+3 <= scrub_point 5. No parent check
9328 block 3 < scrub_point 5. Check with your parent
9329 block 4 < scrub_point 5. Check with your parent
9330 block 5 < scrub_point 5. Check with your parent
9331 block 6 < scrub_point 5. Check with your parent
9332 block 7 < scrub_point 5. Check with your parent
9333 block 0+3 <= scrub_point 6. No parent check
9334 block 1+3 <= scrub_point 6. No parent check
9335 block 2+3 <= scrub_point 6. No parent check
9336 block 3+3 <= scrub_point 6. No parent check
9337 block 4 < scrub_point 6. Check with your parent
9338 block 5 < scrub_point 6. Check with your parent
9339 block 6 < scrub_point 6. Check with your parent
9340 block 7 < scrub_point 6. Check with your parent
9341 block 0+3 <= scrub_point 7. No parent check
9342 block 1+3 <= scrub_point 7. No parent check
9343 block 2+3 <= scrub_point 7. No parent check
9344 block 3+3 <= scrub_point 7. No parent check
9345 block 4+3 <= scrub_point 7. No parent check
9346 block 5 < scrub_point 7. Check with your parent
9347 block 6 < scrub_point 7. Check with your parent
9348 block 7 < scrub_point 7. Check with your parent
9349 block 0+3 <= scrub_point 8. No parent check
9350 block 1+3 <= scrub_point 8. No parent check
9351 block 2+3 <= scrub_point 8. No parent check
9352 block 3+3 <= scrub_point 8. No parent check
9353 block 4+3 <= scrub_point 8. No parent check
9354 block 5+3 <= scrub_point 8. No parent check
9355 block 6 < scrub_point 8. Check with your parent
9356 block 7 < scrub_point 8. Check with your parent
9357 block 0 < scrub_point 0. Check with your parent
9358 block 1 < scrub_point 0. Check with your parent
9359 block 2 < scrub_point 0. Check with your parent
9360 block 3 < scrub_point 0. Check with your parent
9361 block 4 < scrub_point 0. Check with your parent
9362 block 5 < scrub_point 0. Check with your parent
9363 block 6 < scrub_point 0. Check with your parent
9364 block 0 < scrub_point 1. Check with your parent
9365 block 1 < scrub_point 1. Check with your parent
9366 block 2 < scrub_point 1. Check with your parent
9367 block 3 < scrub_point 1. Check with your parent
9368 block 4 < scrub_point 1. Check with your parent
9369 block 5 < scrub_point 1. Check with your parent
9370 block 6 < scrub_point 1. Check with your parent
9371 block 0 < scrub_point 2. Check with your parent
9372 block 1 < scrub_point 2. Check with your parent
9373 block 2 < scrub_point 2. Check with your parent
9374 block 3 < scrub_point 2. Check with your parent
9375 block 4 < scrub_point 2. Check with your parent
9376 block 5 < scrub_point 2. Check with your parent
9377 block 6 < scrub_point 2. Check with your parent
9378 block 0 < scrub_point 3. Check with your parent
9379 block 1 < scrub_point 3. Check with your parent
9380 block 2 < scrub_point 3. Check with your parent
9381 block 3 < scrub_point 3. Check with your parent
9382 block 4 < scrub_point 3. Check with your parent
9383 block 5 < scrub_point 3. Check with your parent
9384 block 6 < scrub_point 3. Check with your parent
9385 block 0+4 <= scrub_point 4. No parent check
9386 block 1 < scrub_point 4. Check with your parent
9387 block 2 < scrub_point 4. Check with your parent
9388 block 3 < scrub_point 4. Check with your parent
9389 block 4 < scrub_point 4. Check with your parent
9390 block 5 < scrub_point 4. Check with your parent
9391 block 6 < scrub_point 4. Check with your parent
9392 block 0+4 <= scrub_point 5. No parent check
9393 block 1+4 <= scrub_point 5. No parent check
9394 block 2 < scrub_point 5. Check with your parent
9395 block 3 < scrub_point 5. Check with your parent
9396 block 4 < scrub_point 5. Check with your parent
9397 block 5 < scrub_point 5. Check with your parent
9398 block 6 < scrub_point 5. Check with your parent
9399 block 0+4 <= scrub_point 6. No parent check
9400 block 1+4 <= scrub_point 6. No parent check
9401 block 2+4 <= scrub_point 6. No parent check
9402 block 3 < scrub_point 6. Check with your parent
9403 block 4 < scrub_point 6. Check with your parent
9404 block 5 < scrub_point 6. Check with your parent
9405 block 6 < scrub_point 6. Check with your parent
9406 block 0+4 <= scrub_point 7. No parent check
9407 block 1+4 <= scrub_point 7. No parent check
9408 block 2+4 <= scrub_point 7. No parent check
9409 block 3+4 <= scrub_point 7. No parent check
9410 block 4 < scrub_point 7. Check with your parent
9411 block 5 < scrub_point 7. Check with your parent
9412 block 6 < scrub_point 7. Check with your parent
9413 block 0+4 <= scrub_point 8. No parent check
9414 block 1+4 <= scrub_point 8. No parent check
9415 block 2+4 <= scrub_point 8. No parent check
9416 block 3+4 <= scrub_point 8. No parent check
9417 block 4+4 <= scrub_point 8. No parent check
9418 block 5 < scrub_point 8. Check with your parent
9419 block 6 < scrub_point 8. Check with your parent
9420 block 0 < scrub_point 0. Check with your parent
9421 block 1 < scrub_point 0. Check with your parent
9422 block 2 < scrub_point 0. Check with your parent
9423 block 3 < scrub_point 0. Check with your parent
9424 block 4 < scrub_point 0. Check with your parent
9425 block 5 < scrub_point 0. Check with your parent
9426 block 0 < scrub_point 1. Check with your parent
9427 block 1 < scrub_point 1. Check with your parent
9428 block 2 < scrub_point 1. Check with your parent
9429 block 3 < scrub_point 1. Check with your parent
9430 block 4 < scrub_point 1. Check with your parent
9431 block 5 < scrub_point 1. Check with your parent
9432 block 0 < scrub_point 2. Check with your parent
9433 block 1 < scrub_point 2. Check with your parent
9434 block 2 < scrub_point 2. Check with your parent
9435 block 3 < scrub_point 2. Check with your parent
9436 block 4 < scrub_point 2. Check with your parent
9437 block 5 < scrub_point 2. Check with your parent
9438 block 0 < scrub_point 3. Check with your parent
9439 block 1 < scrub_point 3. Check with your parent
9440 block 2 < scrub_point 3. Check with your parent
9441 block 3 < scrub_point 3. Check with your parent
9442 block 4 < scrub_point 3. Check with your parent
9443 block 5 < scrub_point 3. Check with your parent
9444 block 0 < scrub_point 4. Check with your parent
9445 block 1 < scrub_point 4. Check with your parent
9446 block 2 < scrub_point 4. Check with your parent
9447 block 3 < scrub_point 4. Check with your parent
9448 block 4 < scrub_point 4. Check with your parent
9449 block 5 < scrub_point 4. Check with your parent
9450 block 0+5 <= scrub_point 5. No parent check
9451 block 1 < scrub_point 5. Check with your parent
9452 block 2 < scrub_point 5. Check with your parent
9453 block 3 < scrub_point 5. Check with your parent
9454 block 4 < scrub_point 5. Check with your parent
9455 block 5 < scrub_point 5. Check with your parent
9456 block 0+5 <= scrub_point 6. No parent check
9457 block 1+5 <= scrub_point 6. No parent check
9458 block 2 < scrub_point 6. Check with your parent
9459 block 3 < scrub_point 6. Check with your parent
9460 block 4 < scrub_point 6. Check with your parent
9461 block 5 < scrub_point 6. Check with your parent
9462 block 0+5 <= scrub_point 7. No parent check
9463 block 1+5 <= scrub_point 7. No parent check
9464 block 2+5 <= scrub_point 7. No parent check
9465 block 3 < scrub_point 7. Check with your parent
9466 block 4 < scrub_point 7. Check with your parent
9467 block 5 < scrub_point 7. Check with your parent
9468 block 0+5 <= scrub_point 8. No parent check
9469 block 1+5 <= scrub_point 8. No parent check
9470 block 2+5 <= scrub_point 8. No parent check
9471 block 3+5 <= scrub_point 8. No parent check
9472 block 4 < scrub_point 8. Check with your parent
9473 block 5 < scrub_point 8. Check with your parent
9474 block 0 < scrub_point 0. Check with your parent
9475 block 1 < scrub_point 0. Check with your parent
9476 block 2 < scrub_point 0. Check with your parent
9477 block 3 < scrub_point 0. Check with your parent
9478 block 4 < scrub_point 0. Check with your parent
9479 block 0 < scrub_point 1. Check with your parent
9480 block 1 < scrub_point 1. Check with your parent
9481 block 2 < scrub_point 1. Check with your parent
9482 block 3 < scrub_point 1. Check with your parent
9483 block 4 < scrub_point 1. Check with your parent
9484 block 0 < scrub_point 2. Check with your parent
9485 block 1 < scrub_point 2. Check with your parent
9486 block 2 < scrub_point 2. Check with your parent
9487 block 3 < scrub_point 2. Check with your parent
9488 block 4 < scrub_point 2. Check with your parent
9489 block 0 < scrub_point 3. Check with your parent
9490 block 1 < scrub_point 3. Check with your parent
9491 block 2 < scrub_point 3. Check with your parent
9492 block 3 < scrub_point 3. Check with your parent
9493 block 4 < scrub_point 3. Check with your parent
9494 block 0 < scrub_point 4. Check with your parent
9495 block 1 < scrub_point 4. Check with your parent
9496 block 2 < scrub_point 4. Check with your parent
9497 block 3 < scrub_point 4. Check with your parent
9498 block 4 < scrub_point 4. Check with your parent
9499 block 0 < scrub_point 5. Check with your parent
9500 block 1 < scrub_point 5. Check with your parent
9501 block 2 < scrub_point 5. Check with your parent
9502 block 3 < scrub_point 5. Check with your parent
9503 block 4 < scrub_point 5. Check with your parent
9504 block 0+6 <= scrub_point 6. No parent check
9505 block 1 < scrub_point 6. Check with your parent
9506 block 2 < scrub_point 6. Check with your parent
9507 block 3 < scrub_point 6. Check with your parent
9508 block 4 < scrub_point 6. Check with your parent
9509 block 0+6 <= scrub_point 7. No parent check
9510 block 1+6 <= scrub_point 7. No parent check
9511 block 2 < scrub_point 7. Check with your parent
9512 block 3 < scrub_point 7. Check with your parent
9513 block 4 < scrub_point 7. Check with your parent
9514 block 0+6 <= scrub_point 8. No parent check
9515 block 1+6 <= scrub_point 8. No parent check
9516 block 2+6 <= scrub_point 8. No parent check
9517 block 3 < scrub_point 8. Check with your parent
9518 block 4 < scrub_point 8. Check with your parent
9519 block 0 < scrub_point 0. Check with your parent
9520 block 1 < scrub_point 0. Check with your parent
9521 block 2 < scrub_point 0. Check with your parent
9522 block 3 < scrub_point 0. Check with your parent
9523 block 0 < scrub_point 1. Check with your parent
9524 block 1 < scrub_point 1. Check with your parent
9525 block 2 < scrub_point 1. Check with your parent
9526 block 3 < scrub_point 1. Check with your parent
9527 block 0 < scrub_point 2. Check with your parent
9528 block 1 < scrub_point 2. Check with your parent
9529 block 2 < scrub_point 2. Check with your parent
9530 block 3 < scrub_point 2. Check with your parent
9531 block 0 < scrub_point 3. Check with your parent
9532 block 1 < scrub_point 3. Check with your parent
9533 block 2 < scrub_point 3. Check with your parent
9534 block 3 < scrub_point 3. Check with your parent
9535 block 0 < scrub_point 4. Check with your parent
9536 block 1 < scrub_point 4. Check with your parent
9537 block 2 < scrub_point 4. Check with your parent
9538 block 3 < scrub_point 4. Check with your parent
9539 block 0 < scrub_point 5. Check with your parent
9540 block 1 < scrub_point 5. Check with your parent
9541 block 2 < scrub_point 5. Check with your parent
9542 block 3 < scrub_point 5. Check with your parent
9543 block 0 < scrub_point 6. Check with your parent
9544 block 1 < scrub_point 6. Check with your parent
9545 block 2 < scrub_point 6. Check with your parent
9546 block 3 < scrub_point 6. Check with your parent
9547 block 0+7 <= scrub_point 7. No parent check
9548 block 1 < scrub_point 7. Check with your parent
9549 block 2 < scrub_point 7. Check with your parent
9550 block 3 < scrub_point 7. Check with your parent
9551 block 0+7 <= scrub_point 8. No parent check
9552 block 1+7 <= scrub_point 8. No parent check
9553 block 2 < scrub_point 8. Check with your parent
9554 block 3 < scrub_point 8. Check with your parent
9555 block 0 < scrub_point 0. Check with your parent
9556 block 1 < scrub_point 0. Check with your parent
9557 block 2 < scrub_point 0. Check with your parent
9558 block 0 < scrub_point 1. Check with your parent
9559 block 1 < scrub_point 1. Check with your parent
9560 block 2 < scrub_point 1. Check with your parent
9561 block 0 < scrub_point 2. Check with your parent
9562 block 1 < scrub_point 2. Check with your parent
9563 block 2 < scrub_point 2. Check with your parent
9564 block 0 < scrub_point 3. Check with your parent
9565 block 1 < scrub_point 3. Check with your parent
9566 block 2 < scrub_point 3. Check with your parent
9567 block 0 < scrub_point 4. Check with your parent
9568 block 1 < scrub_point 4. Check with your parent
9569 block 2 < scrub_point 4. Check with your parent
9570 block 0 < scrub_point 5. Check with your parent
9571 block 1 < scrub_point 5. Check with your parent
9572 block 2 < scrub_point 5. Check with your parent
9573 block 0 < scrub_point 6. Check with your parent
9574 block 1 < scrub_point 6. Check with your parent
9575 block 2 < scrub_point 6. Check with your parent
9576 block 0 < scrub_point 7. Check with your parent
9577 block 1 < scrub_point 7. Check with your parent
9578 block 2 < scrub_point 7. Check with your parent
9579 block 0+8 <= scrub_point 8. No parent check
9580 block 1 < scrub_point 8. Check with your parent
9581 block 2 < scrub_point 8. Check with your parent
9582 block 0 < scrub_point 0. Check with your parent
9583 block 1 < scrub_point 0. Check with your parent
9584 block 0 < scrub_point 1. Check with your parent
9585 block 1 < scrub_point 1. Check with your parent
9586 block 0 < scrub_point 2. Check with your parent
9587 block 1 < scrub_point 2. Check with your parent
9588 block 0 < scrub_point 3. Check with your parent
9589 block 1 < scrub_point 3. Check with your parent
9590 block 0 < scrub_point 4. Check with your parent
9591 block 1 < scrub_point 4. Check with your parent
9592 block 0 < scrub_point 5. Check with your parent
9593 block 1 < scrub_point 5. Check with your parent
9594 block 0 < scrub_point 6. Check with your parent
9595 block 1 < scrub_point 6. Check with your parent
9596 block 0 < scrub_point 7. Check with your parent
9597 block 1 < scrub_point 7. Check with your parent
9598 block 0 < scrub_point 8. Check with your parent
9599 block 1 < scrub_point 8. Check with your parent
9600 test volume::test::test_scrub_point_two_subvolume_smaller_3 ... ok
9601 test volume::test::test_single_block ... ok
9602 test volume::test::test_single_sub_volume_lba_coverage ... ok
9603 test volume::test::test_single_sub_volume_lba_coverage_with_offset ... ok
9604 test volume::test::test_three_layers ... ok
9605 test volume::test::test_volume_size ... ok
9606 test volume::test::test_volume_with_only_read_only_parent ... ok
9607 test volume::test::test_write_unwritten_to_volume_with_only_read_only_parent ... ok
9608 test volume::test::test_writing_to_volume_with_only_read_only_parent ... ok
9609 Sep 22 23:08:16.243 INFO Test replacement of CID 0
9610 Sep 22 23:08:16.243 INFO replace 127.0.0.1:5555 with 127.0.0.1:8888
9611 Sep 22 23:08:16.243 INFO Test replacement of CID 1
9612 Sep 22 23:08:16.243 INFO replace 127.0.0.1:6666 with 127.0.0.1:8888
9613 Sep 22 23:08:16.243 INFO Test replacement of CID 2
9614 Sep 22 23:08:16.243 INFO replace 127.0.0.1:7777 with 127.0.0.1:8888
9615 test volume::test::volume_replace_basic ... ok
9616 test volume::test::volume_replace_drop_rop ... ok
9617 test volume::test::volume_replace_mismatch_opts_cert_pem ... ok
9618 test volume::test::volume_replace_mismatch_opts_control ... ok
9619 test volume::test::volume_replace_mismatch_opts_flush_timeout ... ok
9620 test volume::test::volume_replace_mismatch_opts_id ... ok
9621 test volume::test::volume_replace_mismatch_opts_key ... ok
9622 test volume::test::volume_replace_mismatch_opts_key_pem ... ok
9623 test volume::test::volume_replace_mismatch_opts_lossy ... ok
9624 test volume::test::volume_replace_mismatch_opts_read_only ... ok
9625 test volume::test::volume_replace_mismatch_opts_root_cert ... ok
9626 test volume::test::volume_replace_mismatch_sv_bpe ... ok
9627 test volume::test::volume_replace_mismatch_sv_bs ... ok
9628 test volume::test::volume_replace_mismatch_sv_ec ... ok
9629 test volume::test::volume_replace_mismatch_vblock ... ok
9630 test volume::test::volume_replace_mismatch_vid ... ok
9631 test volume::test::volume_replace_mismatch_vrop ... ok
9632 test volume::test::volume_replace_rop ... ok
9633 test volume::test::volume_replace_self ... ok
9634 test volume::test::volume_vcr_no_target ... ok
96352023-09-22T23:08:16.446ZINFOcrucible: Waiting for 3 jobs (currently 2)
96362023-09-22T23:08:16.446ZINFOcrucible: No repair needed for extent 0 = downstairs
96372023-09-22T23:08:16.446ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
96382023-09-22T23:08:16.576ZINFOcrucible: Waiting for 4 jobs (currently 3)
96392023-09-22T23:08:16.576ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
96402023-09-22T23:08:16.576ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
96412023-09-22T23:08:16.580ZINFOcrucible: Waiting for 4 jobs (currently 3)
96422023-09-22T23:08:16.580ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
96432023-09-22T23:08:16.693ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
96442023-09-22T23:08:16.693ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
96452023-09-22T23:08:16.693ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
96462023-09-22T23:08:16.693ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
96472023-09-22T23:08:16.693ZINFOcrucible: [0] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Active LiveRepair Active ds_transition to Faulted
96482023-09-22T23:08:16.693ZINFOcrucible: [0] Transition from Active to Faulted
96492023-09-22T23:08:16.693ZINFOcrucible: Waiting for 4 jobs (currently 3)
96502023-09-22T23:08:16.693ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
96512023-09-22T23:08:16.693ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
96522023-09-22T23:08:16.693ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
96532023-09-22T23:08:16.693ZINFOcrucible: [1] 678cfcb8-a5e0-4920-b486-0e81743672c3 (d9d086f6-80b5-4805-9193-71fbf1abd250) Faulted LiveRepair Active ds_transition to Faulted
96542023-09-22T23:08:16.693ZINFOcrucible: [1] Transition from LiveRepair to Faulted
96552023-09-22T23:08:16.693ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
96562023-09-22T23:08:17.447ZINFOcrucible: Waiting for 4 jobs (currently 3)
96572023-09-22T23:08:17.447ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
96582023-09-22T23:08:17.575ZINFOcrucible: Finally, move the ReOpen job forward
96592023-09-22T23:08:17.575ZINFOcrucible: Now ACK the reopen job
96602023-09-22T23:08:17.575ZWARNcrucible: RE:0 Bailing with error
96612023-09-22T23:08:17.575ZINFOcrucible: err:2 or:0
96622023-09-22T23:08:17.576ZINFOcrucible: Crucible stats registered with UUID: 0c47db24-e385-42f3-95d6-fa49ef7706df
96632023-09-22T23:08:17.576ZINFOcrucible: Crucible 0c47db24-e385-42f3-95d6-fa49ef7706df has session id: 88c0379c-a848-42d6-8062-1cd30b9aef3c
96642023-09-22T23:08:17.576ZINFOcrucible: [0] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) New New New ds_transition to WaitActive
96652023-09-22T23:08:17.576ZINFOcrucible: [0] Transition from New to WaitActive
96662023-09-22T23:08:17.576ZINFOcrucible: [0] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) WaitActive New New ds_transition to WaitQuorum
96672023-09-22T23:08:17.576ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
96682023-09-22T23:08:17.576ZINFOcrucible: [0] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) WaitQuorum New New ds_transition to Active
96692023-09-22T23:08:17.576ZINFOcrucible: [0] Transition from WaitQuorum to Active
96702023-09-22T23:08:17.576ZINFOcrucible: [1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active New New ds_transition to WaitActive
96712023-09-22T23:08:17.576ZINFOcrucible: [1] Transition from New to WaitActive
96722023-09-22T23:08:17.576ZINFOcrucible: [1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active WaitActive New ds_transition to WaitQuorum
96732023-09-22T23:08:17.576ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
96742023-09-22T23:08:17.576ZINFOcrucible: [1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active WaitQuorum New ds_transition to Active
96752023-09-22T23:08:17.576ZINFOcrucible: [1] Transition from WaitQuorum to Active
96762023-09-22T23:08:17.576ZINFOcrucible: [2] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active Active New ds_transition to WaitActive
96772023-09-22T23:08:17.576ZINFOcrucible: [2] Transition from New to WaitActive
9678 {"msg":"[2] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active Active WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30{"msg",:""time":"Now move the NoOp job forward"2023-09-22T23:08:17.576368834Z",,""v"hostname:":"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible,""pid,"":4291level":}30
9679 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:17.576389778Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:17.57640229Z",,""pidhostname""::"4291}
9680 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
9681 {{"msg":""msg":"Now ACK the NoOp job","v":0,"name":"crucible","level":[2] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active Active WaitQuorum ds_transition to Active30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:17.57643434Z",,""time"hostname:"":"2023-09-22T23:08:17.576439056Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid:"":4291}
9682 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
9683 "msg":"{"Finally, move the ReOpen job forwardmsg"":","v":0,"[2] Transition from WaitQuorum to Active"name":,""v"crucible:"0,,""levelname""::"30crucible","level":30,"time":","time":"2023-09-22T23:08:17.576469356Z","2023-09-22T23:08:17.576471339Zhostname"":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291","}pid
9684 ":4291}
9685 {"{msg":""msg":"Now ACK the Reopen job","v":0,"name":"crucible","0c47db24-e385-42f3-95d6-fa49ef7706df is now active with session: 927b0333-54f5-4011-a1f3-3f18a06b5db1"level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:17.576500933Z",","hostname":time"":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:08:17.576505107Z"",","pid":hostname"4291:"}
9686 ip-10-150-1-74.us-west-2.compute.internal","pid":4291{}
9687 "msg":"{"Extent 0 close id:1002 Failed: Error: badmsg"":","v":0,"name":"crucible","level":50[1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active Active Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:17.576541068Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:17.576546531Z,"",pid"":hostname4291":"}
9688 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
9689 "msg":"{"msg"RE:0 Wait for result from reopen command 1003:4:""----------------------------------------------------------------
9690 [1] Transition from Active to Faulted,""v,"":v"0:,0",name""name:"" Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
9691 :crucible"",crucible"",level""GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
9692 :level"30:30 1 Acked 1000 FClose 0 Done Err Done false
9693 2 Acked 1001 NoOp 0,","time Donetime"":: Skip"" Done false
9694 2023-09-22T23:08:17.576589689Z2023-09-22T23:08:17.576589378Z"" 3 Acked 1002 NoOp 0,,"" Donehostnamehostname" Skip"::"" Done false
9695 ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal 4 Acked 1003 Reopen 0"",," Donepid"" Skip:pid4291" Done}:
9696 4291 false
9697 {} STATES DS:0 DS:1 DS:2 TOTAL
9698 "
9699 msg" New :{" 0 0 " 0 msgExtent 0 close id:1003 Failed: Error: bad"": 0
9700 , Sent ""v" 0 : 0 0 0 , 0
9701 Done "[1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active Faulted Active ds_transition to LiveRepairReadyname"":,"" 4 cruciblev"",:"0level,"": 0 50name" 4 8
9702 Skipped : 0 " 3 0 crucible" 3
9703 ," Error 0 ,level""time:"30: 1 " 0 1
9704 2023-09-22T23:08:17.576658929Z"Last Flush: ,0 "0 0
9705 hostname":","time"Downstairs last five completed::ip-10-150-1-74.us-west-2.compute.internal"",
9706 2023-09-22T23:08:17.576670246Z""pid",:"4291hostname":}"
9707 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}
9708 "Upstairs last five completed: msg":{" 4"RE:0 Bailing with error"msg",:""v" 3: 2[1] Transition from Faulted to LiveRepairReady0",," 1
9709 name"":v"":0crucible",","name"level:"":40crucible","level":30,"time":,""time":"2023-09-22T23:08:17.576706389Z","2023-09-22T23:08:17.576708222Z"hostname",:""hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"",:"4291pid":}4291
9710 }
97112023-09-22T23:08:17.576ZINFOcrucible: [1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active LiveRepairReady Active ds_transition to LiveRepair
97122023-09-22T23:08:17.576ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
97132023-09-22T23:08:17.576ZINFOcrucible: Waiting for Close + ReOpen jobs
97142023-09-22T23:08:17.576ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
97152023-09-22T23:08:17.576ZINFOcrucible: RE:0 close id:1000 queued, notify DS
97162023-09-22T23:08:17.576ZINFOcrucible: RE:0 Wait for result from close command 1000:1
97172023-09-22T23:08:17.577ZINFOcrucible: Crucible stats registered with UUID: 179ddd3d-9699-42f1-8c1f-c406c6f24702
97182023-09-22T23:08:17.577ZINFOcrucible: Crucible 179ddd3d-9699-42f1-8c1f-c406c6f24702 has session id: bfa3e4d2-5ff8-44fe-9d46-bee60ad15c9f
97192023-09-22T23:08:17.577ZINFOcrucible: [0] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) New New New ds_transition to WaitActive
97202023-09-22T23:08:17.577ZINFOcrucible: [0] Transition from New to WaitActive
97212023-09-22T23:08:17.577ZINFOcrucible: [0] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) WaitActive New New ds_transition to WaitQuorum
97222023-09-22T23:08:17.577ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
97232023-09-22T23:08:17.577ZINFOcrucible: [0] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) WaitQuorum New New ds_transition to Active
97242023-09-22T23:08:17.577ZINFOcrucible: [0] Transition from WaitQuorum to Active
97252023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active New New ds_transition to WaitActive
97262023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from New to WaitActive
97272023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active WaitActive New ds_transition to WaitQuorum
97282023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
97292023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active WaitQuorum New ds_transition to Active
97302023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from WaitQuorum to Active
97312023-09-22T23:08:17.577ZINFOcrucible: [2] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active Active New ds_transition to WaitActive
97322023-09-22T23:08:17.577ZINFOcrucible: [2] Transition from New to WaitActive
97332023-09-22T23:08:17.577ZINFOcrucible: [2] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active Active WaitActive ds_transition to WaitQuorum
97342023-09-22T23:08:17.577ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
97352023-09-22T23:08:17.577ZINFOcrucible: [2] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active Active WaitQuorum ds_transition to Active
97362023-09-22T23:08:17.577ZINFOcrucible: [2] Transition from WaitQuorum to Active
97372023-09-22T23:08:17.577ZINFOcrucible: 179ddd3d-9699-42f1-8c1f-c406c6f24702 is now active with session: 0798fb5a-0860-4b6f-8556-63835c5b08b6
97382023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active Active Active ds_transition to Faulted
97392023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from Active to Faulted
97402023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active Faulted Active ds_transition to LiveRepairReady
97412023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
97422023-09-22T23:08:17.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active LiveRepairReady Active ds_transition to LiveRepair
97432023-09-22T23:08:17.577ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
97442023-09-22T23:08:17.577ZINFOcrucible: Waiting for Close + ReOpen jobs
97452023-09-22T23:08:17.577ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
97462023-09-22T23:08:17.577ZINFOcrucible: RE:0 close id:1000 queued, notify DS
97472023-09-22T23:08:17.577ZINFOcrucible: RE:0 Wait for result from close command 1000:1
97482023-09-22T23:08:17.581ZINFOcrucible: Now move the NoOp job forward
97492023-09-22T23:08:17.581ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
97502023-09-22T23:08:17.581ZERROcrucible: [1] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
97512023-09-22T23:08:17.581ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
97522023-09-22T23:08:17.581ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
97532023-09-22T23:08:17.581ZINFOcrucible: [1] d53fef88-17c8-441f-9cbe-e9f976239c5e (36f4bb72-2a32-45e5-94a6-6cee1f536fe0) Active LiveRepair Active ds_transition to Faulted
97542023-09-22T23:08:17.581ZINFOcrucible: [1] Transition from LiveRepair to Faulted
97552023-09-22T23:08:17.581ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
97562023-09-22T23:08:17.581ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
97572023-09-22T23:08:17.581ZWARNcrucible: RE:0 Bailing with error
97582023-09-22T23:08:17.582ZINFOcrucible: Crucible stats registered with UUID: edd7a265-5253-4482-a9cb-a825dc1347c4
97592023-09-22T23:08:17.582ZINFOcrucible: Crucible edd7a265-5253-4482-a9cb-a825dc1347c4 has session id: cb7be0f7-fb1c-4503-846e-658817fd52af
97602023-09-22T23:08:17.582ZINFOcrucible: [0] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) New New New ds_transition to WaitActive
97612023-09-22T23:08:17.582ZINFOcrucible: [0] Transition from New to WaitActive
97622023-09-22T23:08:17.582ZINFOcrucible: [0] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) WaitActive New New ds_transition to WaitQuorum
97632023-09-22T23:08:17.582ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
97642023-09-22T23:08:17.582ZINFOcrucible: [0] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) WaitQuorum New New ds_transition to Active
97652023-09-22T23:08:17.582ZINFOcrucible: [0] Transition from WaitQuorum to Active
97662023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active New New ds_transition to WaitActive
97672023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from New to WaitActive
97682023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active WaitActive New ds_transition to WaitQuorum
97692023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
97702023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active WaitQuorum New ds_transition to Active
97712023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from WaitQuorum to Active
97722023-09-22T23:08:17.582ZINFOcrucible: [2] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active Active New ds_transition to WaitActive
97732023-09-22T23:08:17.582ZINFOcrucible: [2] Transition from New to WaitActive
97742023-09-22T23:08:17.582ZINFOcrucible: [2] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active Active WaitActive ds_transition to WaitQuorum
97752023-09-22T23:08:17.582ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
97762023-09-22T23:08:17.582ZINFOcrucible: [2] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active Active WaitQuorum ds_transition to Active
97772023-09-22T23:08:17.582ZINFOcrucible: [2] Transition from WaitQuorum to Active
97782023-09-22T23:08:17.582ZINFOcrucible: edd7a265-5253-4482-a9cb-a825dc1347c4 is now active with session: 1dd29b58-71e1-40af-b106-613dfa5b70bd
97792023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active Active Active ds_transition to Faulted
97802023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from Active to Faulted
97812023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active Faulted Active ds_transition to LiveRepairReady
97822023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
97832023-09-22T23:08:17.582ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active LiveRepairReady Active ds_transition to LiveRepair
97842023-09-22T23:08:17.582ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
97852023-09-22T23:08:17.582ZINFOcrucible: Waiting for Close + ReOpen jobs
97862023-09-22T23:08:17.582ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
97872023-09-22T23:08:17.582ZINFOcrucible: RE:0 close id:1000 queued, notify DS
97882023-09-22T23:08:17.582ZINFOcrucible: RE:0 Wait for result from close command 1000:1
97892023-09-22T23:08:17.694ZINFOcrucible: Now move the NoOp job forward
97902023-09-22T23:08:17.694ZINFOcrucible: Now ACK the NoOp job
97912023-09-22T23:08:17.694ZINFOcrucible: Finally, move the ReOpen job forward
97922023-09-22T23:08:17.694ZINFOcrucible: Now ACK the Reopen job
97932023-09-22T23:08:17.695ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
97942023-09-22T23:08:17.695ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
97952023-09-22T23:08:17.695ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
97962023-09-22T23:08:17.695ZWARNcrucible: RE:0 Bailing with error
97972023-09-22T23:08:17.695ZINFOcrucible: Crucible stats registered with UUID: 6e81afb6-3f64-41f6-85cf-6a14c224e762
97982023-09-22T23:08:17.695ZINFOcrucible: Crucible 6e81afb6-3f64-41f6-85cf-6a14c224e762 has session id: af80e566-5757-4e37-a104-3652dbed167d
97992023-09-22T23:08:17.695ZINFOcrucible: [0] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) New New New ds_transition to WaitActive
98002023-09-22T23:08:17.695ZINFOcrucible: [0] Transition from New to WaitActive
98012023-09-22T23:08:17.695ZINFOcrucible: [0] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) WaitActive New New ds_transition to WaitQuorum
98022023-09-22T23:08:17.695ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
98032023-09-22T23:08:17.695ZINFOcrucible: [0] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) WaitQuorum New New ds_transition to Active
98042023-09-22T23:08:17.695ZINFOcrucible: [0] Transition from WaitQuorum to Active
98052023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active New New ds_transition to WaitActive
98062023-09-22T23:08:17.695ZINFOcrucible: [1] Transition from New to WaitActive
98072023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active WaitActive New ds_transition to WaitQuorum
98082023-09-22T23:08:17.695ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
98092023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active WaitQuorum New ds_transition to Active
98102023-09-22T23:08:17.695ZINFOcrucible: [1] Transition from WaitQuorum to Active
98112023-09-22T23:08:17.695ZINFOcrucible: [2] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active Active New ds_transition to WaitActive
98122023-09-22T23:08:17.695ZINFOcrucible: [2] Transition from New to WaitActive
98132023-09-22T23:08:17.695ZINFOcrucible: [2] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active Active WaitActive ds_transition to WaitQuorum
98142023-09-22T23:08:17.695ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
98152023-09-22T23:08:17.695ZINFOcrucible: [2] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active Active WaitQuorum ds_transition to Active
98162023-09-22T23:08:17.695ZINFOcrucible: [2] Transition from WaitQuorum to Active
98172023-09-22T23:08:17.695ZINFOcrucible: 6e81afb6-3f64-41f6-85cf-6a14c224e762 is now active with session: 5769323c-5c76-4ebc-b603-7b2ed8f3d5cb
98182023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active Active Active ds_transition to Faulted
98192023-09-22T23:08:17.695ZINFOcrucible: [1] Transition from Active to Faulted
98202023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active Faulted Active ds_transition to LiveRepairReady
98212023-09-22T23:08:17.695ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
98222023-09-22T23:08:17.695ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active LiveRepairReady Active ds_transition to LiveRepair
98232023-09-22T23:08:17.696ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
98242023-09-22T23:08:17.696ZINFOcrucible: Waiting for Close + ReOpen jobs
98252023-09-22T23:08:17.696ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
98262023-09-22T23:08:17.696ZINFOcrucible: RE:0 close id:1000 queued, notify DS
98272023-09-22T23:08:17.696ZINFOcrucible: RE:0 Wait for result from close command 1000:1
9828 {"msg":"Waiting for 3 jobs (currently 2)","v":0,"name":"crucible","level":30{"msg":"[2] DS Reports error Err(GenericError(\"bad\")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }","v":0,","time":name"":"crucible"2023-09-22T23:08:18.577587237Z","level":,"50hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
9829 ,"time":"{2023-09-22T23:08:18.577636961Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4291No repair needed for extent 0",,""v"":":downstairs"0,"}
9830 name":"crucible","level":30{"msg":"[2] Reports error GenericError(\"bad\") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(2), repair_downstairs: [ClientId(1)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }",,""v":time0",":name":""crucible","level":502023-09-22T23:08:18.577683037Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs","}time
9831 ":"2023-09-22T23:08:18.57770613Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":""downstairs"msg"}:"
9832 RE:0 Wait for result from repair command 1001:2","v":0,"name{":"crucible"","msg":level"":30[2] client skip 2 in process jobs because fault","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:18.577743486Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,"":4291time",:""":"downstairs"}
9833 2023-09-22T23:08:18.577739993Z"{,""hostnamemsg":"":"[2] changed 1 jobs to fault skipped","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name",:""crucible"pid",:"4291level":30}
9834 ,"time":"2023-09-22T23:08:18.577773814Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
98352023-09-22T23:08:18.577ZINFOcrucible: [2] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active LiveRepair Active ds_transition to Faulted
98362023-09-22T23:08:18.577ZINFOcrucible: [2] Transition from Active to Faulted
98372023-09-22T23:08:18.577ZINFOcrucible: Now ACK the close job
98382023-09-22T23:08:18.577ZINFOcrucible: Waiting for 3 jobs (currently 2)
98392023-09-22T23:08:18.577ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
98402023-09-22T23:08:18.577ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
98412023-09-22T23:08:18.577ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
98422023-09-22T23:08:18.577ZINFOcrucible: [1] 179ddd3d-9699-42f1-8c1f-c406c6f24702 (0798fb5a-0860-4b6f-8556-63835c5b08b6) Active LiveRepair Faulted ds_transition to Faulted
98432023-09-22T23:08:18.577ZINFOcrucible: [1] Transition from LiveRepair to Faulted
98442023-09-22T23:08:18.577ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
98452023-09-22T23:08:18.583ZINFOcrucible: Waiting for 3 jobs (currently 2)
98462023-09-22T23:08:18.583ZINFOcrucible: No repair needed for extent 0 = downstairs
98472023-09-22T23:08:18.583ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
98482023-09-22T23:08:18.697ZINFOcrucible: Now ACK the close job
98492023-09-22T23:08:18.697ZINFOcrucible: Waiting for 3 jobs (currently 2)
98502023-09-22T23:08:18.697ZINFOcrucible: No repair needed for extent 0 = downstairs
98512023-09-22T23:08:18.697ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
98522023-09-22T23:08:19.579ZINFOcrucible: Waiting for 4 jobs (currently 3)
98532023-09-22T23:08:19.579ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
98542023-09-22T23:08:19.579ZINFOcrucible: Waiting for 4 jobs (currently 3)
98552023-09-22T23:08:19.579ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
98562023-09-22T23:08:19.579ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
98572023-09-22T23:08:19.585ZINFOcrucible: Waiting for 4 jobs (currently 3)
98582023-09-22T23:08:19.585ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
98592023-09-22T23:08:19.698ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
98602023-09-22T23:08:19.698ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
98612023-09-22T23:08:19.698ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
98622023-09-22T23:08:19.698ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
98632023-09-22T23:08:19.698ZINFOcrucible: [1] 6e81afb6-3f64-41f6-85cf-6a14c224e762 (5769323c-5c76-4ebc-b603-7b2ed8f3d5cb) Active LiveRepair Active ds_transition to Faulted
98642023-09-22T23:08:19.698ZINFOcrucible: [1] Transition from LiveRepair to Faulted
98652023-09-22T23:08:19.698ZINFOcrucible: Waiting for 4 jobs (currently 3)
98662023-09-22T23:08:19.698ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
98672023-09-22T23:08:19.698ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
9868 ----------------------------------------------------------------
9869 Crucible gen:0 GIO:true work queues: Upstairs:2 downstairs:4
9870 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
9871 1 Acked 1000 FClose 0 Done Done Done false
9872 2 Acked 1001 NoOp 0 Done Done Done false
9873 3 NotAcked 1002 NoOp 0 New New New false
9874 4 NotAcked 1003 Reopen 0 New New New false
9875 STATES DS:0 DS:1 DS:2 TOTAL
9876 New 2 2 2 6
9877 Sent 0 0 0 0
9878 Done 2 2 2 6
9879 Skipped 0 0 0 0
9880 Error 0 0 0 0
9881 Last Flush: 0 0 0
9882 Downstairs last five completed:
9883 Upstairs last five completed: 2 1
98842023-09-22T23:08:20.448ZINFOcrucible: Now move the NoOp job forward
98852023-09-22T23:08:20.448ZINFOcrucible: Finally, move the ReOpen job forward
98862023-09-22T23:08:20.448ZINFOcrucible: Now ACK the reopen job
98872023-09-22T23:08:20.448ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
98882023-09-22T23:08:20.448ZINFOcrucible: jobs are: 4
9889 test live_repair::repair_test::test_repair_extent_no_action_all ... ok
98902023-09-22T23:08:20.579ZINFOcrucible: Now move the NoOp job forward
98912023-09-22T23:08:20.579ZINFOcrucible: Now ACK the NoOp job
98922023-09-22T23:08:20.579ZINFOcrucible: Finally, move the ReOpen job forward
98932023-09-22T23:08:20.579ZINFOcrucible: Now ACK the Reopen job
98942023-09-22T23:08:20.579ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
98952023-09-22T23:08:20.579ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
98962023-09-22T23:08:20.579ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
98972023-09-22T23:08:20.579ZWARNcrucible: RE:0 Bailing with error
9898 ----------------------------------------------------------------
9899 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
9900 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
9901 1 Acked 1000 FClose 0 Done Done Err false
9902 2 Acked 1001 NoOp 0 Done Skip Skip false
9903 3 Acked 1002 NoOp 0 Done Skip Skip false
9904 4 Acked 1003 Reopen 0 Done Skip Skip false
9905 STATES DS:0 DS:1 DS:2 TOTAL
9906 New 0 0 0 0
9907 Sent 0 0 0 0
9908 Done 4 1 0 5
9909 Skipped 0 3 3 6
9910 Error 0 0 1 1
9911 Last Flush: 0 0 0
9912 Downstairs last five completed:
9913 Upstairs last five completed: 4 3 2 1
99142023-09-22T23:08:20.580ZINFOcrucible: Crucible stats registered with UUID: 447579bf-098c-4009-8361-0912b7f0245f
99152023-09-22T23:08:20.580ZINFOcrucible: Crucible 447579bf-098c-4009-8361-0912b7f0245f has session id: b510ee14-6aae-4b3a-bb93-47d3774c88ab
99162023-09-22T23:08:20.580ZINFOcrucible: [0] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) New New New ds_transition to WaitActive
9917 {"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"{2023-09-22T23:08:20.580256604Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pidNow move the NoOp job forward"":4291,"}v"
9918 :0,"{name":""crucible"msg":","level":30[0] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) WaitActive New New ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:08:20.580292073Z2023-09-22T23:08:20.580284253Z"",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"",pid"":pid"4291:4291}
9919 }
9920 {"{msg":""[0] Transition from WaitActive to WaitQuorummsg"":","v":0,"name":[0] DS Reports error Err(GenericError("\"crucible"bad\","level":30,"time":")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }2023-09-22T23:08:20.58033424Z"",,""v":hostname"0:","name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",,""levelpid""::504291}
9921 {"msg":","time":"2023-09-22T23:08:20.580354514Z[0] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) WaitQuorum New New ds_transition to Active"",,""v"hostname:":0","name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",,""pid"level:":429130,"":"downstairs"}
9922 {,""msgtime""::""2023-09-22T23:08:20.580373313Z[0] Reports error GenericError("\"bad,\""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
9923 {"msg":"[0] Transition from WaitQuorum to Active","v":0,") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }name"":","vcrucible"":0,","level"name:":30"crucible","level":50,"time":","2023-09-22T23:08:20.580405447Z"time":,""hostname":"2023-09-22T23:08:20.580407845Z","hostname"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291ip-10-150-1-74.us-west-2.compute.internal}"
9924 ,"pid":4291{,"":""msg":downstairs""}
9925 {"[1] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active New New ds_transition to WaitActive"msg":,""v":0,"name":[0] client skip 4 in process jobs because fault"",crucible""v",:"0,level"":name30":"crucible","level":30,"time":","2023-09-22T23:08:20.580440636Ztime"":","hostname":"2023-09-22T23:08:20.58044317Z","hostname":ip-10-150-1-74.us-west-2.compute.internal"","pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
9926 "pid":4291{,"":""msgdownstairs"":"}
9927 [1] Transition from New to WaitActive","v"{:0,""name"msg:"":"crucible","level"[0] changed 1 jobs to fault skipped:"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:20.58047223Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:08:20.580476858Z,"",pid"":4291hostname":"}
9928 ip-10-150-1-74.us-west-2.compute.internal","pid"{:4291,""":"msg":"downstairs"}
9929 {"[1] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active WaitActive New ds_transition to WaitQuorummsg"":","v":0,"name":"crucible","level":30[0] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Active LiveRepair Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:20.580507249Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"":","pid":42912023-09-22T23:08:20.580513342Z"},
9930 "hostname":"{"msg"ip-10-150-1-74.us-west-2.compute.internal:"","pid":4291}[1] Transition from WaitActive to WaitQuorum
9931 ","v":{0,""name":msg"":"crucible","level":[0] Transition from Active to Faulted30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:20.580539894Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:08:20.580544931Z"pid":,"4291hostname"}:
9932 "{ip-10-150-1-74.us-west-2.compute.internal",""pidmsg""::"4291}
9933 {"[1] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active WaitQuorum New ds_transition to Activemsg"":","v":0,"Extent 0 close id:1002 Failed: Error: badname"":","vcrucible"":,0","level":name"30:"crucible","level":50,"time":","time2023-09-22T23:08:20.580574739Z"":","hostname":2023-09-22T23:08:20.580577214Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291ip-10-150-1-74.us-west-2.compute.internal",}"
9934 pid":4291}
9935 {{"msg":""msg":"[1] Transition from WaitQuorum to Active","[1] client skip 4 in process jobs because fault"v":,"0v,"":0name",:""name"crucible:"","crucible"level",:"30level":30,,""timetime""::""2023-09-22T23:08:20.580613489Z2023-09-22T23:08:20.580614289Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291,}"
9936 ":"downstairs"{}
9937 "msg":"{"msg":"[1] changed 1 jobs to fault skipped","v"[2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active New ds_transition to WaitActive:"0,,""v"name:":0","crucible"name",:""level"crucible:"30,"level":30,",time"":time"":"2023-09-22T23:08:20.580650799Z"2023-09-22T23:08:20.580652094Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42914291,"}"
9938 :"downstairs"{}
9939 "msg":"{"[2] Transition from New to WaitActive"msg":,""v":0,"name":"crucible","level":30[1] 0c47db24-e385-42f3-95d6-fa49ef7706df (927b0333-54f5-4011-a1f3-3f18a06b5db1) Faulted LiveRepair Active ds_transition to Faulted","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:20.580683301Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,"":"pid":4291}2023-09-22T23:08:20.580689541Z
9940 ","hostname{":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
9941 [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active WaitActive ds_transition to WaitQuorum{",""v"msg:":0","name":"crucible[1] Transition from LiveRepair to Faulted"",","levelv""::300,"name":"crucible","level":30,"time":"2023-09-22T23:08:20.580718083Z",","time":hostname"":"2023-09-22T23:08:20.580721668Z","ip-10-150-1-74.us-west-2.compute.internal"hostname,"":"pid":4291}
9942 ip-10-150-1-74.us-west-2.compute.internal","pid":{4291}"
9943 msg":"{"[2] Transition from WaitActive to WaitQuorummsg"":","v":0,"nameRE:0 Wait for result from reopen command 1003:4"":","cruciblev"":,0","level"name:":30"crucible","level":30,"time":","time"2023-09-22T23:08:20.580751055Z:"","hostname"2023-09-22T23:08:20.580753324Z:"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291","}pid
9944 ":4291}
99452023-09-22T23:08:20.580ZINFOcrucible: [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active WaitQuorum ds_transition to Active
99462023-09-22T23:08:20.580ZINFOcrucible: [2] Transition from WaitQuorum to Active
99472023-09-22T23:08:20.580ZINFOcrucible: 447579bf-098c-4009-8361-0912b7f0245f is now active with session: bc569f92-47ce-43bf-a7ad-b0ac8e3af76e
99482023-09-22T23:08:20.580ZINFOcrucible: [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active Active ds_transition to Faulted
99492023-09-22T23:08:20.580ZINFOcrucible: [2] Transition from Active to Faulted
99502023-09-22T23:08:20.580ZINFOcrucible: [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active Faulted ds_transition to LiveRepairReady
99512023-09-22T23:08:20.580ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
99522023-09-22T23:08:20.580ZINFOcrucible: [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active LiveRepairReady ds_transition to LiveRepair
99532023-09-22T23:08:20.580ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
99542023-09-22T23:08:20.580ZINFOcrucible: Waiting for Close + ReOpen jobs
99552023-09-22T23:08:20.581ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
99562023-09-22T23:08:20.581ZINFOcrucible: RE:0 close id:1000 queued, notify DS
99572023-09-22T23:08:20.581ZINFOcrucible: RE:0 Wait for result from close command 1000:1
99582023-09-22T23:08:20.586ZINFOcrucible: Now move the NoOp job forward
99592023-09-22T23:08:20.586ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
99602023-09-22T23:08:20.586ZERROcrucible: [2] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
99612023-09-22T23:08:20.586ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
99622023-09-22T23:08:20.586ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
99632023-09-22T23:08:20.586ZINFOcrucible: [2] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active LiveRepair Active ds_transition to Faulted
99642023-09-22T23:08:20.586ZINFOcrucible: [2] Transition from Active to Faulted
99652023-09-22T23:08:20.586ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
99662023-09-22T23:08:20.586ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
99672023-09-22T23:08:20.586ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
99682023-09-22T23:08:20.586ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
99692023-09-22T23:08:20.586ZINFOcrucible: [1] edd7a265-5253-4482-a9cb-a825dc1347c4 (1dd29b58-71e1-40af-b106-613dfa5b70bd) Active LiveRepair Faulted ds_transition to Faulted
99702023-09-22T23:08:20.586ZINFOcrucible: [1] Transition from LiveRepair to Faulted
99712023-09-22T23:08:20.586ZWARNcrucible: RE:0 Bailing with error
99722023-09-22T23:08:20.587ZINFOcrucible: Crucible stats registered with UUID: 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4
99732023-09-22T23:08:20.587ZINFOcrucible: Crucible 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 has session id: 92a4d028-35da-4409-8cd6-38bb5d06dda2
99742023-09-22T23:08:20.587ZINFOcrucible: [0] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) New New New ds_transition to WaitActive
99752023-09-22T23:08:20.587ZINFOcrucible: [0] Transition from New to WaitActive
99762023-09-22T23:08:20.587ZINFOcrucible: [0] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) WaitActive New New ds_transition to WaitQuorum
99772023-09-22T23:08:20.587ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
99782023-09-22T23:08:20.587ZINFOcrucible: [0] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) WaitQuorum New New ds_transition to Active
99792023-09-22T23:08:20.587ZINFOcrucible: [0] Transition from WaitQuorum to Active
99802023-09-22T23:08:20.587ZINFOcrucible: [1] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active New New ds_transition to WaitActive
99812023-09-22T23:08:20.587ZINFOcrucible: [1] Transition from New to WaitActive
99822023-09-22T23:08:20.587ZINFOcrucible: [1] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active WaitActive New ds_transition to WaitQuorum
99832023-09-22T23:08:20.587ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
99842023-09-22T23:08:20.587ZINFOcrucible: [1] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active WaitQuorum New ds_transition to Active
99852023-09-22T23:08:20.587ZINFOcrucible: [1] Transition from WaitQuorum to Active
99862023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active New ds_transition to WaitActive
99872023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from New to WaitActive
99882023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active WaitActive ds_transition to WaitQuorum
99892023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
99902023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active WaitQuorum ds_transition to Active
99912023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from WaitQuorum to Active
99922023-09-22T23:08:20.587ZINFOcrucible: 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 is now active with session: 74a0a767-aee3-41c9-a191-b12e771ab110
99932023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active Active ds_transition to Faulted
99942023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from Active to Faulted
99952023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active Faulted ds_transition to LiveRepairReady
99962023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
99972023-09-22T23:08:20.587ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active LiveRepairReady ds_transition to LiveRepair
99982023-09-22T23:08:20.587ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
99992023-09-22T23:08:20.587ZINFOcrucible: Waiting for Close + ReOpen jobs
100002023-09-22T23:08:20.587ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
100012023-09-22T23:08:20.587ZINFOcrucible: RE:0 close id:1000 queued, notify DS
100022023-09-22T23:08:20.587ZINFOcrucible: RE:0 Wait for result from close command 1000:1
100032023-09-22T23:08:20.699ZINFOcrucible: Now move the NoOp job forward
100042023-09-22T23:08:20.699ZINFOcrucible: Now ACK the NoOp job
100052023-09-22T23:08:20.699ZINFOcrucible: Finally, move the ReOpen job forward
100062023-09-22T23:08:20.699ZINFOcrucible: Now ACK the Reopen job
100072023-09-22T23:08:20.699ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
100082023-09-22T23:08:20.699ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
100092023-09-22T23:08:20.699ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
100102023-09-22T23:08:20.699ZWARNcrucible: RE:0 Bailing with error
100112023-09-22T23:08:20.700ZINFOcrucible: Crucible stats registered with UUID: c43b617e-cb7a-45ad-bdb9-19a9531028df
100122023-09-22T23:08:20.700ZINFOcrucible: Crucible c43b617e-cb7a-45ad-bdb9-19a9531028df has session id: 6b8693a3-f5cb-4325-aa1b-5eb99cfa65bb
100132023-09-22T23:08:20.700ZINFOcrucible: [0] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) New New New ds_transition to WaitActive
100142023-09-22T23:08:20.700ZINFOcrucible: [0] Transition from New to WaitActive
100152023-09-22T23:08:20.700ZINFOcrucible: [0] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) WaitActive New New ds_transition to WaitQuorum
100162023-09-22T23:08:20.700ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
100172023-09-22T23:08:20.700ZINFOcrucible: [0] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) WaitQuorum New New ds_transition to Active
100182023-09-22T23:08:20.700ZINFOcrucible: [0] Transition from WaitQuorum to Active
100192023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active New New ds_transition to WaitActive
100202023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from New to WaitActive
100212023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active WaitActive New ds_transition to WaitQuorum
100222023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
100232023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active WaitQuorum New ds_transition to Active
100242023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from WaitQuorum to Active
100252023-09-22T23:08:20.700ZINFOcrucible: [2] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active Active New ds_transition to WaitActive
100262023-09-22T23:08:20.700ZINFOcrucible: [2] Transition from New to WaitActive
100272023-09-22T23:08:20.700ZINFOcrucible: [2] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active Active WaitActive ds_transition to WaitQuorum
100282023-09-22T23:08:20.700ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
100292023-09-22T23:08:20.700ZINFOcrucible: [2] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active Active WaitQuorum ds_transition to Active
100302023-09-22T23:08:20.700ZINFOcrucible: [2] Transition from WaitQuorum to Active
100312023-09-22T23:08:20.700ZINFOcrucible: c43b617e-cb7a-45ad-bdb9-19a9531028df is now active with session: 6036a6d9-4d85-440b-b090-7602e4205cf6
100322023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active Active Active ds_transition to Faulted
100332023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from Active to Faulted
100342023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active Faulted Active ds_transition to LiveRepairReady
100352023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
100362023-09-22T23:08:20.700ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active LiveRepairReady Active ds_transition to LiveRepair
100372023-09-22T23:08:20.700ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
100382023-09-22T23:08:20.700ZINFOcrucible: Waiting for Close + ReOpen jobs
100392023-09-22T23:08:20.700ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
100402023-09-22T23:08:20.700ZINFOcrucible: RE:0 close id:1000 queued, notify DS
100412023-09-22T23:08:20.700ZINFOcrucible: RE:0 Wait for result from close command 1000:1
100422023-09-22T23:08:21.581ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100432023-09-22T23:08:21.581ZERROcrucible: [0] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
100442023-09-22T23:08:21.581ZINFOcrucible: [0] client skip 2 in process jobs because fault = downstairs
100452023-09-22T23:08:21.581ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
100462023-09-22T23:08:21.581ZINFOcrucible: [0] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Active Active LiveRepair ds_transition to Faulted
100472023-09-22T23:08:21.581ZINFOcrucible: [0] Transition from Active to Faulted
100482023-09-22T23:08:21.581ZINFOcrucible: Now ACK the close job
100492023-09-22T23:08:21.581ZINFOcrucible: Waiting for 3 jobs (currently 2)
100502023-09-22T23:08:21.582ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
100512023-09-22T23:08:21.582ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
100522023-09-22T23:08:21.582ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
100532023-09-22T23:08:21.582ZINFOcrucible: [2] 447579bf-098c-4009-8361-0912b7f0245f (bc569f92-47ce-43bf-a7ad-b0ac8e3af76e) Faulted Active LiveRepair ds_transition to Faulted
100542023-09-22T23:08:21.582ZINFOcrucible: [2] Transition from LiveRepair to Faulted
100552023-09-22T23:08:21.582ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
100562023-09-22T23:08:21.588ZINFOcrucible: Waiting for 3 jobs (currently 2)
100572023-09-22T23:08:21.588ZINFOcrucible: No repair needed for extent 0 = downstairs
100582023-09-22T23:08:21.588ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
100592023-09-22T23:08:21.701ZINFOcrucible: Now ACK the close job
100602023-09-22T23:08:21.701ZINFOcrucible: Waiting for 3 jobs (currently 2)
100612023-09-22T23:08:21.701ZINFOcrucible: No repair needed for extent 0 = downstairs
100622023-09-22T23:08:21.701ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
100632023-09-22T23:08:22.426ZINFOcrucible: responded to ping downstairs = 1
100642023-09-22T23:08:22.427ZINFOcrucible: responded to ping downstairs = 1
100652023-09-22T23:08:22.581ZINFOcrucible: Finally, move the ReOpen job forward
100662023-09-22T23:08:22.581ZINFOcrucible: Now ACK the reopen job
100672023-09-22T23:08:22.581ZWARNcrucible: RE:0 Bailing with error
100682023-09-22T23:08:22.581ZINFOcrucible: err:0 or:1
100692023-09-22T23:08:22.582ZINFOcrucible: Waiting for 4 jobs (currently 3)
10070 {"msg":"Extent 0 close id:1001 Failed: Error: bad","v":0,"name":"crucible","level":50{"msg,"":"time":"2023-09-22T23:08:22.582337682Z","Crucible stats registered with UUID: 9a454318-3f11-4b24-be6e-b1033eaa5db9"hostname":","v":ip-10-150-1-74.us-west-2.compute.internal"0,",pid"":name":4291"crucible"}
10071 ,"level":30{"msg":"RE:0 Wait for result from NoOp command 1002:3","v":0,"name":"crucible","level":30,,""time":"time":"2023-09-22T23:08:22.58238439Z","hostname":2023-09-22T23:08:22.582373004Z""ip-10-150-1-74.us-west-2.compute.internal,"","hostnamepid":"4291:"}
10072 ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
100732023-09-22T23:08:22.582ZINFOcrucible: Crucible 9a454318-3f11-4b24-be6e-b1033eaa5db9 has session id: 09f21dfe-f6bc-4f91-af12-e181495c6b25
100742023-09-22T23:08:22.582ZINFOcrucible: [0] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) New New New ds_transition to WaitActive
100752023-09-22T23:08:22.582ZINFOcrucible: [0] Transition from New to WaitActive
100762023-09-22T23:08:22.582ZINFOcrucible: [0] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) WaitActive New New ds_transition to WaitQuorum
100772023-09-22T23:08:22.582ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
100782023-09-22T23:08:22.582ZINFOcrucible: [0] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) WaitQuorum New New ds_transition to Active
100792023-09-22T23:08:22.582ZINFOcrucible: [0] Transition from WaitQuorum to Active
100802023-09-22T23:08:22.582ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active New New ds_transition to WaitActive
100812023-09-22T23:08:22.582ZINFOcrucible: [1] Transition from New to WaitActive
100822023-09-22T23:08:22.582ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active WaitActive New ds_transition to WaitQuorum
100832023-09-22T23:08:22.582ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
100842023-09-22T23:08:22.582ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active WaitQuorum New ds_transition to Active
100852023-09-22T23:08:22.582ZINFOcrucible: [1] Transition from WaitQuorum to Active
100862023-09-22T23:08:22.583ZINFOcrucible: [2] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active Active New ds_transition to WaitActive
100872023-09-22T23:08:22.583ZINFOcrucible: [2] Transition from New to WaitActive
100882023-09-22T23:08:22.583ZINFOcrucible: [2] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active Active WaitActive ds_transition to WaitQuorum
100892023-09-22T23:08:22.583ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
100902023-09-22T23:08:22.583ZINFOcrucible: [2] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active Active WaitQuorum ds_transition to Active
100912023-09-22T23:08:22.583ZINFOcrucible: [2] Transition from WaitQuorum to Active
100922023-09-22T23:08:22.583ZINFOcrucible: 9a454318-3f11-4b24-be6e-b1033eaa5db9 is now active with session: 78e805e3-7c37-4f81-915e-a0267c342496
100932023-09-22T23:08:22.583ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active Active Active ds_transition to Faulted
100942023-09-22T23:08:22.583ZINFOcrucible: [1] Transition from Active to Faulted
100952023-09-22T23:08:22.583ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active Faulted Active ds_transition to LiveRepairReady
100962023-09-22T23:08:22.583ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
100972023-09-22T23:08:22.583ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active LiveRepairReady Active ds_transition to LiveRepair
100982023-09-22T23:08:22.583ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
100992023-09-22T23:08:22.583ZINFOcrucible: Waiting for Close + ReOpen jobs
101002023-09-22T23:08:22.583ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
101012023-09-22T23:08:22.583ZINFOcrucible: RE:0 close id:1000 queued, notify DS
101022023-09-22T23:08:22.583ZINFOcrucible: RE:0 Wait for result from close command 1000:1
101032023-09-22T23:08:22.589ZINFOcrucible: Waiting for 4 jobs (currently 3)
101042023-09-22T23:08:22.589ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
101052023-09-22T23:08:22.703ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101062023-09-22T23:08:22.703ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101072023-09-22T23:08:22.703ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
101082023-09-22T23:08:22.703ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
101092023-09-22T23:08:22.703ZINFOcrucible: [2] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active LiveRepair Active ds_transition to Faulted
101102023-09-22T23:08:22.703ZINFOcrucible: [2] Transition from Active to Faulted
101112023-09-22T23:08:22.703ZINFOcrucible: Waiting for 4 jobs (currently 3)
101122023-09-22T23:08:22.703ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
101132023-09-22T23:08:22.703ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
101142023-09-22T23:08:22.703ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
101152023-09-22T23:08:22.703ZINFOcrucible: [1] c43b617e-cb7a-45ad-bdb9-19a9531028df (6036a6d9-4d85-440b-b090-7602e4205cf6) Active LiveRepair Faulted ds_transition to Faulted
101162023-09-22T23:08:22.703ZINFOcrucible: [1] Transition from LiveRepair to Faulted
101172023-09-22T23:08:22.703ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
101182023-09-22T23:08:23.583ZINFOcrucible: Now move the NoOp job forward
101192023-09-22T23:08:23.583ZINFOcrucible: Now ACK the NoOp job
101202023-09-22T23:08:23.583ZINFOcrucible: Finally, move the ReOpen job forward
101212023-09-22T23:08:23.583ZINFOcrucible: Now ACK the Reopen job
101222023-09-22T23:08:23.583ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
101232023-09-22T23:08:23.583ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
10124 {"msg":"Extent 0 close id:1003 Failed: Error: bad","v":0,"name":"crucible","level":50{"msg":"Waiting for 3 jobs (currently 2)",",time"":"v":2023-09-22T23:08:23.583957399Z"0,","hostname":"name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",","pid"level:"4291:}----------------------------------------------------------------
10125 
10126 30 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10127 {"GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10128 msg":"RE:0 Bailing with error","v":0,"name":"crucible" 1 Acked 1000 FClose 0,"level": Err40 Done Done false
10129 ,"time": 2 Acked 1001 NoOp 0" Skip Done Skip false2023-09-22T23:08:23.583993099Z"
10130 ," 3 Acked 1002 NoOp 0hostname Skip": Done" Skip false
10131 ip-10-150-1-74.us-west-2.compute.internal", 4 Acked 1003 Reopen 0 Skip Done Skip false
10132 STATES DS:0 DS:1 DS:2 TOTAL
10133 , New " 0 time" 0 : 0 "" 0
10134 2023-09-22T23:08:23.584012214Z Sent " 0 pid", 0 " 0 hostname" 0
10135 : Done : 0 " 4 4291 1 5
10136 ip-10-150-1-74.us-west-2.compute.internal Skipped " 3 , 0 " 3 pid 6
10137 "} Error : 1 4291 0
10138 } 0
10139 1
10140 {Last Flush: 0 0 0 "
10141 msg":"No repair needed for extent 0"Downstairs last five completed:,"
10142 v":0,"name":"crucible","level":30Upstairs last five completed: 4 3 2 1
10143 ,"time":"2023-09-22T23:08:23.584092974Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291,"":"downstairs"}
101442023-09-22T23:08:23.584ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
101452023-09-22T23:08:23.584ZINFOcrucible: Crucible stats registered with UUID: f5357d68-1826-4d13-a77d-b2335da9d6f2
101462023-09-22T23:08:23.584ZINFOcrucible: Crucible f5357d68-1826-4d13-a77d-b2335da9d6f2 has session id: cd0c0a27-6e82-4f52-b38f-03711c7df63b
101472023-09-22T23:08:23.584ZINFOcrucible: [0] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) New New New ds_transition to WaitActive
101482023-09-22T23:08:23.584ZINFOcrucible: [0] Transition from New to WaitActive
101492023-09-22T23:08:23.584ZINFOcrucible: [0] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) WaitActive New New ds_transition to WaitQuorum
101502023-09-22T23:08:23.584ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
101512023-09-22T23:08:23.584ZINFOcrucible: [0] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) WaitQuorum New New ds_transition to Active
101522023-09-22T23:08:23.584ZINFOcrucible: [0] Transition from WaitQuorum to Active
101532023-09-22T23:08:23.584ZINFOcrucible: [1] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active New New ds_transition to WaitActive
101542023-09-22T23:08:23.584ZINFOcrucible: [1] Transition from New to WaitActive
101552023-09-22T23:08:23.584ZINFOcrucible: [1] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active WaitActive New ds_transition to WaitQuorum
101562023-09-22T23:08:23.584ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
101572023-09-22T23:08:23.584ZINFOcrucible: [1] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active WaitQuorum New ds_transition to Active
101582023-09-22T23:08:23.584ZINFOcrucible: [1] Transition from WaitQuorum to Active
101592023-09-22T23:08:23.584ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active New ds_transition to WaitActive
101602023-09-22T23:08:23.584ZINFOcrucible: [2] Transition from New to WaitActive
101612023-09-22T23:08:23.584ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active WaitActive ds_transition to WaitQuorum
101622023-09-22T23:08:23.585ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
101632023-09-22T23:08:23.585ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active WaitQuorum ds_transition to Active
101642023-09-22T23:08:23.585ZINFOcrucible: [2] Transition from WaitQuorum to Active
101652023-09-22T23:08:23.585ZINFOcrucible: f5357d68-1826-4d13-a77d-b2335da9d6f2 is now active with session: c3b8b937-bdf2-428d-b207-29fe37e50ee9
101662023-09-22T23:08:23.585ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active Active ds_transition to Faulted
101672023-09-22T23:08:23.585ZINFOcrucible: [2] Transition from Active to Faulted
101682023-09-22T23:08:23.585ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active Faulted ds_transition to LiveRepairReady
101692023-09-22T23:08:23.585ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
101702023-09-22T23:08:23.585ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active LiveRepairReady ds_transition to LiveRepair
101712023-09-22T23:08:23.585ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
101722023-09-22T23:08:23.585ZINFOcrucible: Waiting for Close + ReOpen jobs
101732023-09-22T23:08:23.585ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
101742023-09-22T23:08:23.585ZINFOcrucible: RE:0 close id:1000 queued, notify DS
101752023-09-22T23:08:23.585ZINFOcrucible: RE:0 Wait for result from close command 1000:1
101762023-09-22T23:08:23.591ZINFOcrucible: Now move the NoOp job forward
101772023-09-22T23:08:23.591ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101782023-09-22T23:08:23.591ZERROcrucible: [0] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
101792023-09-22T23:08:23.591ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
101802023-09-22T23:08:23.591ZINFOcrucible: [0] changed 0 jobs to fault skipped = downstairs
101812023-09-22T23:08:23.591ZINFOcrucible: [0] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Active Active LiveRepair ds_transition to Faulted
101822023-09-22T23:08:23.591ZINFOcrucible: [0] Transition from Active to Faulted
101832023-09-22T23:08:23.591ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
101842023-09-22T23:08:23.591ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
101852023-09-22T23:08:23.591ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
101862023-09-22T23:08:23.591ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
101872023-09-22T23:08:23.591ZINFOcrucible: [2] 3d1b0574-7dcd-4ea0-9e64-07ebc6e4e1b4 (74a0a767-aee3-41c9-a191-b12e771ab110) Faulted Active LiveRepair ds_transition to Faulted
101882023-09-22T23:08:23.591ZINFOcrucible: [2] Transition from LiveRepair to Faulted
101892023-09-22T23:08:23.591ZWARNcrucible: RE:0 Bailing with error
101902023-09-22T23:08:23.591ZINFOcrucible: Crucible stats registered with UUID: 2f3bf994-5f9f-4b02-b326-33674e795003
101912023-09-22T23:08:23.591ZINFOcrucible: Crucible 2f3bf994-5f9f-4b02-b326-33674e795003 has session id: bbb5cadd-c4c2-462b-aeb7-82647c4128c6
101922023-09-22T23:08:23.591ZINFOcrucible: [0] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) New New New ds_transition to WaitActive
101932023-09-22T23:08:23.591ZINFOcrucible: [0] Transition from New to WaitActive
101942023-09-22T23:08:23.591ZINFOcrucible: [0] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) WaitActive New New ds_transition to WaitQuorum
101952023-09-22T23:08:23.591ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
101962023-09-22T23:08:23.591ZINFOcrucible: [0] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) WaitQuorum New New ds_transition to Active
101972023-09-22T23:08:23.591ZINFOcrucible: [0] Transition from WaitQuorum to Active
101982023-09-22T23:08:23.592ZINFOcrucible: [1] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active New New ds_transition to WaitActive
101992023-09-22T23:08:23.592ZINFOcrucible: [1] Transition from New to WaitActive
102002023-09-22T23:08:23.592ZINFOcrucible: [1] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active WaitActive New ds_transition to WaitQuorum
102012023-09-22T23:08:23.592ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
102022023-09-22T23:08:23.592ZINFOcrucible: [1] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active WaitQuorum New ds_transition to Active
102032023-09-22T23:08:23.592ZINFOcrucible: [1] Transition from WaitQuorum to Active
102042023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active New ds_transition to WaitActive
102052023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from New to WaitActive
102062023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active WaitActive ds_transition to WaitQuorum
102072023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
102082023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active WaitQuorum ds_transition to Active
102092023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from WaitQuorum to Active
102102023-09-22T23:08:23.592ZINFOcrucible: 2f3bf994-5f9f-4b02-b326-33674e795003 is now active with session: 0a9fd518-e87c-445f-bf08-874431d889a8
102112023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active Active ds_transition to Faulted
102122023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from Active to Faulted
102132023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active Faulted ds_transition to LiveRepairReady
102142023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
102152023-09-22T23:08:23.592ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active LiveRepairReady ds_transition to LiveRepair
102162023-09-22T23:08:23.592ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
102172023-09-22T23:08:23.592ZINFOcrucible: Waiting for Close + ReOpen jobs
102182023-09-22T23:08:23.592ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
102192023-09-22T23:08:23.592ZINFOcrucible: RE:0 close id:1000 queued, notify DS
102202023-09-22T23:08:23.592ZINFOcrucible: RE:0 Wait for result from close command 1000:1
102212023-09-22T23:08:23.704ZINFOcrucible: Now move the NoOp job forward
102222023-09-22T23:08:23.704ZINFOcrucible: Now ACK the NoOp job
102232023-09-22T23:08:23.704ZINFOcrucible: Finally, move the ReOpen job forward
102242023-09-22T23:08:23.704ZINFOcrucible: Now ACK the Reopen job
102252023-09-22T23:08:23.704ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
102262023-09-22T23:08:23.704ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
102272023-09-22T23:08:23.704ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
102282023-09-22T23:08:23.704ZWARNcrucible: RE:0 Bailing with error
102292023-09-22T23:08:23.705ZINFOcrucible: Crucible stats registered with UUID: 550be307-1aa8-42ac-b59e-9ea2749e822e
102302023-09-22T23:08:23.705ZINFOcrucible: Crucible 550be307-1aa8-42ac-b59e-9ea2749e822e has session id: a598f884-43b9-4928-b779-fd14ffe82b81
102312023-09-22T23:08:23.705ZINFOcrucible: [0] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) New New New ds_transition to WaitActive
102322023-09-22T23:08:23.705ZINFOcrucible: [0] Transition from New to WaitActive
102332023-09-22T23:08:23.705ZINFOcrucible: [0] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) WaitActive New New ds_transition to WaitQuorum
102342023-09-22T23:08:23.705ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
102352023-09-22T23:08:23.705ZINFOcrucible: [0] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) WaitQuorum New New ds_transition to Active
102362023-09-22T23:08:23.705ZINFOcrucible: [0] Transition from WaitQuorum to Active
102372023-09-22T23:08:23.705ZINFOcrucible: [1] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active New New ds_transition to WaitActive
102382023-09-22T23:08:23.705ZINFOcrucible: [1] Transition from New to WaitActive
102392023-09-22T23:08:23.705ZINFOcrucible: [1] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active WaitActive New ds_transition to WaitQuorum
102402023-09-22T23:08:23.705ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
102412023-09-22T23:08:23.705ZINFOcrucible: [1] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active WaitQuorum New ds_transition to Active
102422023-09-22T23:08:23.705ZINFOcrucible: [1] Transition from WaitQuorum to Active
102432023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active New ds_transition to WaitActive
102442023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from New to WaitActive
102452023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active WaitActive ds_transition to WaitQuorum
102462023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
102472023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active WaitQuorum ds_transition to Active
102482023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from WaitQuorum to Active
102492023-09-22T23:08:23.705ZINFOcrucible: 550be307-1aa8-42ac-b59e-9ea2749e822e is now active with session: 5aed1069-eb98-435d-bbfb-e191ed7d789f
102502023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active Active ds_transition to Faulted
102512023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from Active to Faulted
102522023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active Faulted ds_transition to LiveRepairReady
102532023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
102542023-09-22T23:08:23.705ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active LiveRepairReady ds_transition to LiveRepair
102552023-09-22T23:08:23.705ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
102562023-09-22T23:08:23.705ZINFOcrucible: Waiting for Close + ReOpen jobs
102572023-09-22T23:08:23.705ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
102582023-09-22T23:08:23.705ZINFOcrucible: RE:0 close id:1000 queued, notify DS
102592023-09-22T23:08:23.705ZINFOcrucible: RE:0 Wait for result from close command 1000:1
102602023-09-22T23:08:24.585ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102612023-09-22T23:08:24.585ZERROcrucible: [1] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102622023-09-22T23:08:24.585ZINFOcrucible: [1] client skip 2 in process jobs because fault = downstairs
102632023-09-22T23:08:24.585ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
10264 {{"msg":""msg":"Waiting for 4 jobs (currently 3)",[1] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Active LiveRepair ds_transition to Faulted""v":,"v0":0,","name"name"::""cruciblecrucible"",,""levellevel""::3030,"time":","time":"2023-09-22T23:08:24.585382998Z","2023-09-22T23:08:24.585383202Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4291","}pid"
10265 :4291}
10266 {"msg":"{[1] Transition from Active to Faulted",""v"msg:":0","name":"crucibleRE:0 Wait for result from NoOp command 1002:3"",","levelv""::300,"name":"crucible","level":30,"time":",2023-09-22T23:08:24.585424085Z""time",:""hostname":"2023-09-22T23:08:24.585427723Z","hostnameip-10-150-1-74.us-west-2.compute.internal"":","pid":4291ip-10-150-1-74.us-west-2.compute.internal"},
10267 "pid":4291{}
10268 "msg":"Now ACK the close job","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:08:24.585456617Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
102692023-09-22T23:08:24.585ZINFOcrucible: Waiting for 3 jobs (currently 2)
102702023-09-22T23:08:24.585ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
102712023-09-22T23:08:24.585ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
102722023-09-22T23:08:24.585ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
102732023-09-22T23:08:24.585ZINFOcrucible: [2] f5357d68-1826-4d13-a77d-b2335da9d6f2 (c3b8b937-bdf2-428d-b207-29fe37e50ee9) Active Faulted LiveRepair ds_transition to Faulted
102742023-09-22T23:08:24.585ZINFOcrucible: [2] Transition from LiveRepair to Faulted
102752023-09-22T23:08:24.585ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102762023-09-22T23:08:24.593ZINFOcrucible: Waiting for 3 jobs (currently 2)
102772023-09-22T23:08:24.593ZINFOcrucible: No repair needed for extent 0 = downstairs
102782023-09-22T23:08:24.593ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
102792023-09-22T23:08:24.706ZINFOcrucible: Now ACK the close job
102802023-09-22T23:08:24.706ZINFOcrucible: Waiting for 3 jobs (currently 2)
102812023-09-22T23:08:24.706ZINFOcrucible: No repair needed for extent 0 = downstairs
102822023-09-22T23:08:24.706ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
10283 {"msg":"Now move the NoOp job forward","v":0,"name":"crucible","level":30{"msg":"Waiting for 4 jobs (currently 3)","v":0,",time"":name"":"crucible"2023-09-22T23:08:25.586693211Z","level":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
10284 {"msg":"[1] DS Reports error Err(GenericError(\"bad\","time":")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) }"2023-09-22T23:08:25.5867399Z","v":0,","hostnamename""::""crucible","levelip-10-150-1-74.us-west-2.compute.internal"":,50"pid":4291}
10285 {"msg":","Extent 0 close id:1001 Failed: Error: bad"time":","v":02023-09-22T23:08:25.586840291Z",",name"":"hostname":crucible"","level":ip-10-150-1-74.us-west-2.compute.internal50","pid":4291,"":"downstairs"}
10286 {"msg":","time":"[1] Reports error GenericError(\"bad\"2023-09-22T23:08:25.586862343Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4291}
10287 {"msg":"RE:0 Wait for result from NoOp command 1002:3") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) },""v,"":v"0:,0","name"name:"":"crucible"crucible",","level"level:"30:50,,""timetime""::""2023-09-22T23:08:25.586898551Z2023-09-22T23:08:25.586898295Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42914291,}"
10288 ":"downstairs"}
102892023-09-22T23:08:25.587ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
102902023-09-22T23:08:25.587ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
102912023-09-22T23:08:25.587ZINFOcrucible: [1] 9a454318-3f11-4b24-be6e-b1033eaa5db9 (78e805e3-7c37-4f81-915e-a0267c342496) Active LiveRepair Active ds_transition to Faulted
102922023-09-22T23:08:25.587ZINFOcrucible: [1] Transition from LiveRepair to Faulted
102932023-09-22T23:08:25.587ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
102942023-09-22T23:08:25.587ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
102952023-09-22T23:08:25.594ZINFOcrucible: Waiting for 4 jobs (currently 3)
102962023-09-22T23:08:25.594ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
102972023-09-22T23:08:25.707ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102982023-09-22T23:08:25.707ZERROcrucible: [0] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
102992023-09-22T23:08:25.707ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs
103002023-09-22T23:08:25.707ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
103012023-09-22T23:08:25.707ZINFOcrucible: [0] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Active Active LiveRepair ds_transition to Faulted
103022023-09-22T23:08:25.708ZINFOcrucible: [0] Transition from Active to Faulted
103032023-09-22T23:08:25.708ZINFOcrucible: Waiting for 4 jobs (currently 3)
103042023-09-22T23:08:25.708ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
103052023-09-22T23:08:25.708ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
103062023-09-22T23:08:25.708ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
103072023-09-22T23:08:25.708ZINFOcrucible: [2] 550be307-1aa8-42ac-b59e-9ea2749e822e (5aed1069-eb98-435d-bbfb-e191ed7d789f) Faulted Active LiveRepair ds_transition to Faulted
103082023-09-22T23:08:25.708ZINFOcrucible: [2] Transition from LiveRepair to Faulted
103092023-09-22T23:08:25.708ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
103102023-09-22T23:08:26.587ZINFOcrucible: Now move the NoOp job forward
103112023-09-22T23:08:26.587ZINFOcrucible: Now ACK the NoOp job
103122023-09-22T23:08:26.587ZINFOcrucible: Finally, move the ReOpen job forward
103132023-09-22T23:08:26.587ZINFOcrucible: Now ACK the Reopen job
103142023-09-22T23:08:26.587ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
103152023-09-22T23:08:26.587ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
103162023-09-22T23:08:26.587ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
103172023-09-22T23:08:26.587ZWARNcrucible: RE:0 Bailing with error
10318 ----------------------------------------------------------------
10319 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10320 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10321 1 Acked 1000 FClose 0 Done Err Done false
10322 2 Acked 1001 NoOp 0 Done Skip Skip false
10323 3 Acked 1002 NoOp 0 Done Skip Skip false
10324 4 Acked 1003 Reopen 0 Done Skip Skip false
10325 STATES DS:0 DS:1 DS:2 TOTAL
10326 New 0 0 0 0
10327 Sent 0 0 0 0
10328 Done 4 0 1 5
10329 Skipped 0 3 3 6
10330 Error 0 1 0 1
10331 Last Flush: 0 0 0
10332 Downstairs last five completed:
10333 Upstairs last five completed: 4 3 2 1
103342023-09-22T23:08:26.587ZINFOcrucible: Crucible stats registered with UUID: 322c5d16-5794-4781-8bc0-a7be748f5fb6
103352023-09-22T23:08:26.587ZINFOcrucible: Crucible 322c5d16-5794-4781-8bc0-a7be748f5fb6 has session id: f9f78e0a-83f0-444c-baec-2ff3c6918482
103362023-09-22T23:08:26.587ZINFOcrucible: [0] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) New New New ds_transition to WaitActive
103372023-09-22T23:08:26.587ZINFOcrucible: [0] Transition from New to WaitActive
103382023-09-22T23:08:26.587ZINFOcrucible: [0] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) WaitActive New New ds_transition to WaitQuorum
103392023-09-22T23:08:26.587ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
103402023-09-22T23:08:26.588ZINFOcrucible: [0] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) WaitQuorum New New ds_transition to Active
103412023-09-22T23:08:26.588ZINFOcrucible: [0] Transition from WaitQuorum to Active
103422023-09-22T23:08:26.588ZINFOcrucible: [1] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active New New ds_transition to WaitActive
103432023-09-22T23:08:26.588ZINFOcrucible: [1] Transition from New to WaitActive
103442023-09-22T23:08:26.588ZINFOcrucible: [1] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active WaitActive New ds_transition to WaitQuorum
103452023-09-22T23:08:26.588ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
103462023-09-22T23:08:26.588ZINFOcrucible: [1] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active WaitQuorum New ds_transition to Active
103472023-09-22T23:08:26.588ZINFOcrucible: [1] Transition from WaitQuorum to Active
103482023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active New ds_transition to WaitActive
103492023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from New to WaitActive
103502023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active WaitActive ds_transition to WaitQuorum
103512023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
103522023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active WaitQuorum ds_transition to Active
103532023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from WaitQuorum to Active
103542023-09-22T23:08:26.588ZINFOcrucible: 322c5d16-5794-4781-8bc0-a7be748f5fb6 is now active with session: aa3992cb-83bf-4a9e-ae74-60b531d1ace4
103552023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active Active ds_transition to Faulted
103562023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from Active to Faulted
103572023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active Faulted ds_transition to LiveRepairReady
103582023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
103592023-09-22T23:08:26.588ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active LiveRepairReady ds_transition to LiveRepair
103602023-09-22T23:08:26.588ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
103612023-09-22T23:08:26.588ZINFOcrucible: Waiting for Close + ReOpen jobs
103622023-09-22T23:08:26.588ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
103632023-09-22T23:08:26.588ZINFOcrucible: RE:0 close id:1000 queued, notify DS
103642023-09-22T23:08:26.588ZINFOcrucible: RE:0 Wait for result from close command 1000:1
103652023-09-22T23:08:26.595ZINFOcrucible: Now move the NoOp job forward
103662023-09-22T23:08:26.596ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
103672023-09-22T23:08:26.596ZERROcrucible: [1] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
103682023-09-22T23:08:26.596ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
103692023-09-22T23:08:26.596ZINFOcrucible: [1] changed 0 jobs to fault skipped = downstairs
103702023-09-22T23:08:26.596ZINFOcrucible: [1] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Active LiveRepair ds_transition to Faulted
103712023-09-22T23:08:26.596ZINFOcrucible: [1] Transition from Active to Faulted
103722023-09-22T23:08:26.596ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
103732023-09-22T23:08:26.596ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
103742023-09-22T23:08:26.596ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
103752023-09-22T23:08:26.596ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
103762023-09-22T23:08:26.596ZINFOcrucible: [2] 2f3bf994-5f9f-4b02-b326-33674e795003 (0a9fd518-e87c-445f-bf08-874431d889a8) Active Faulted LiveRepair ds_transition to Faulted
103772023-09-22T23:08:26.596ZINFOcrucible: [2] Transition from LiveRepair to Faulted
103782023-09-22T23:08:26.596ZWARNcrucible: RE:0 Bailing with error
103792023-09-22T23:08:26.596ZINFOcrucible: Crucible stats registered with UUID: 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad
103802023-09-22T23:08:26.596ZINFOcrucible: Crucible 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad has session id: 96273c4d-251b-433b-b209-311dd73d94f3
103812023-09-22T23:08:26.596ZINFOcrucible: [0] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) New New New ds_transition to WaitActive
103822023-09-22T23:08:26.596ZINFOcrucible: [0] Transition from New to WaitActive
103832023-09-22T23:08:26.596ZINFOcrucible: [0] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) WaitActive New New ds_transition to WaitQuorum
103842023-09-22T23:08:26.596ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
103852023-09-22T23:08:26.596ZINFOcrucible: [0] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) WaitQuorum New New ds_transition to Active
103862023-09-22T23:08:26.596ZINFOcrucible: [0] Transition from WaitQuorum to Active
103872023-09-22T23:08:26.596ZINFOcrucible: [1] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active New New ds_transition to WaitActive
103882023-09-22T23:08:26.596ZINFOcrucible: [1] Transition from New to WaitActive
103892023-09-22T23:08:26.596ZINFOcrucible: [1] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active WaitActive New ds_transition to WaitQuorum
103902023-09-22T23:08:26.596ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
103912023-09-22T23:08:26.596ZINFOcrucible: [1] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active WaitQuorum New ds_transition to Active
103922023-09-22T23:08:26.596ZINFOcrucible: [1] Transition from WaitQuorum to Active
103932023-09-22T23:08:26.596ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active New ds_transition to WaitActive
103942023-09-22T23:08:26.596ZINFOcrucible: [2] Transition from New to WaitActive
103952023-09-22T23:08:26.596ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active WaitActive ds_transition to WaitQuorum
103962023-09-22T23:08:26.596ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
103972023-09-22T23:08:26.596ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active WaitQuorum ds_transition to Active
103982023-09-22T23:08:26.597ZINFOcrucible: [2] Transition from WaitQuorum to Active
103992023-09-22T23:08:26.597ZINFOcrucible: 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad is now active with session: 05a7d3c1-8da7-4ab0-8e59-ef61aff4b565
104002023-09-22T23:08:26.597ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active Active ds_transition to Faulted
104012023-09-22T23:08:26.597ZINFOcrucible: [2] Transition from Active to Faulted
104022023-09-22T23:08:26.597ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active Faulted ds_transition to LiveRepairReady
104032023-09-22T23:08:26.597ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
104042023-09-22T23:08:26.597ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active LiveRepairReady ds_transition to LiveRepair
104052023-09-22T23:08:26.597ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
104062023-09-22T23:08:26.597ZINFOcrucible: Waiting for Close + ReOpen jobs
104072023-09-22T23:08:26.597ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
104082023-09-22T23:08:26.597ZINFOcrucible: RE:0 close id:1000 queued, notify DS
104092023-09-22T23:08:26.597ZINFOcrucible: RE:0 Wait for result from close command 1000:1
104102023-09-22T23:08:26.708ZINFOcrucible: Now move the NoOp job forward
104112023-09-22T23:08:26.708ZINFOcrucible: Now ACK the NoOp job
104122023-09-22T23:08:26.708ZINFOcrucible: Finally, move the ReOpen job forward
104132023-09-22T23:08:26.708ZINFOcrucible: Now ACK the Reopen job
104142023-09-22T23:08:26.708ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
104152023-09-22T23:08:26.708ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
104162023-09-22T23:08:26.708ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
104172023-09-22T23:08:26.708ZWARNcrucible: RE:0 Bailing with error
104182023-09-22T23:08:26.708ZINFOcrucible: Crucible stats registered with UUID: b90a2e95-f256-40ff-a6c3-b578ad79bb91
104192023-09-22T23:08:26.708ZINFOcrucible: Crucible b90a2e95-f256-40ff-a6c3-b578ad79bb91 has session id: b423ec84-3a14-4a06-b44f-c9ac8aa2686b
104202023-09-22T23:08:26.708ZINFOcrucible: [0] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) New New New ds_transition to WaitActive
104212023-09-22T23:08:26.708ZINFOcrucible: [0] Transition from New to WaitActive
104222023-09-22T23:08:26.708ZINFOcrucible: [0] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) WaitActive New New ds_transition to WaitQuorum
104232023-09-22T23:08:26.708ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
104242023-09-22T23:08:26.709ZINFOcrucible: [0] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) WaitQuorum New New ds_transition to Active
104252023-09-22T23:08:26.709ZINFOcrucible: [0] Transition from WaitQuorum to Active
104262023-09-22T23:08:26.709ZINFOcrucible: [1] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active New New ds_transition to WaitActive
104272023-09-22T23:08:26.709ZINFOcrucible: [1] Transition from New to WaitActive
104282023-09-22T23:08:26.709ZINFOcrucible: [1] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active WaitActive New ds_transition to WaitQuorum
104292023-09-22T23:08:26.709ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
104302023-09-22T23:08:26.709ZINFOcrucible: [1] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active WaitQuorum New ds_transition to Active
104312023-09-22T23:08:26.709ZINFOcrucible: [1] Transition from WaitQuorum to Active
104322023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active New ds_transition to WaitActive
104332023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from New to WaitActive
104342023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active WaitActive ds_transition to WaitQuorum
104352023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
104362023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active WaitQuorum ds_transition to Active
104372023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from WaitQuorum to Active
104382023-09-22T23:08:26.709ZINFOcrucible: b90a2e95-f256-40ff-a6c3-b578ad79bb91 is now active with session: 70f32c98-85ac-4a49-8e97-4e66c551292d
104392023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active Active ds_transition to Faulted
104402023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from Active to Faulted
104412023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active Faulted ds_transition to LiveRepairReady
104422023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
104432023-09-22T23:08:26.709ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active LiveRepairReady ds_transition to LiveRepair
104442023-09-22T23:08:26.709ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
104452023-09-22T23:08:26.709ZINFOcrucible: Waiting for Close + ReOpen jobs
104462023-09-22T23:08:26.709ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
104472023-09-22T23:08:26.709ZINFOcrucible: RE:0 close id:1000 queued, notify DS
104482023-09-22T23:08:26.709ZINFOcrucible: RE:0 Wait for result from close command 1000:1
104492023-09-22T23:08:27.588ZINFOcrucible: Finally, move the ReOpen job forward
104502023-09-22T23:08:27.588ZINFOcrucible: Now ACK the reopen job
104512023-09-22T23:08:27.588ZWARNcrucible: RE:0 Bailing with error
104522023-09-22T23:08:27.588ZINFOcrucible: Crucible stats registered with UUID: b122e61b-d947-42d8-bfa7-060e8c7d400f
104532023-09-22T23:08:27.588ZINFOcrucible: Crucible b122e61b-d947-42d8-bfa7-060e8c7d400f has session id: 30c9ce5b-de1a-48b2-8413-140dc7077875
104542023-09-22T23:08:27.588ZINFOcrucible: [0] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) New New New ds_transition to WaitActive
104552023-09-22T23:08:27.588ZINFOcrucible: [0] Transition from New to WaitActive
104562023-09-22T23:08:27.588ZINFOcrucible: [0] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) WaitActive New New ds_transition to WaitQuorum
104572023-09-22T23:08:27.588ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
104582023-09-22T23:08:27.588ZINFOcrucible: [0] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) WaitQuorum New New ds_transition to Active
104592023-09-22T23:08:27.588ZINFOcrucible: [0] Transition from WaitQuorum to Active
104602023-09-22T23:08:27.588ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active New New ds_transition to WaitActive
104612023-09-22T23:08:27.588ZINFOcrucible: [1] Transition from New to WaitActive
104622023-09-22T23:08:27.589ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active WaitActive New ds_transition to WaitQuorum
104632023-09-22T23:08:27.589ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
104642023-09-22T23:08:27.589ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active WaitQuorum New ds_transition to Active
104652023-09-22T23:08:27.589ZINFOcrucible: [1] Transition from WaitQuorum to Active
104662023-09-22T23:08:27.589ZINFOcrucible: [2] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active Active New ds_transition to WaitActive
104672023-09-22T23:08:27.589ZINFOcrucible: [2] Transition from New to WaitActive
104682023-09-22T23:08:27.589ZINFOcrucible: [2] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active Active WaitActive ds_transition to WaitQuorum
104692023-09-22T23:08:27.589ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
104702023-09-22T23:08:27.589ZINFOcrucible: [2] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active Active WaitQuorum ds_transition to Active
104712023-09-22T23:08:27.589ZINFOcrucible: [2] Transition from WaitQuorum to Active
104722023-09-22T23:08:27.589ZINFOcrucible: b122e61b-d947-42d8-bfa7-060e8c7d400f is now active with session: bacc12da-7d6f-49e4-a766-3a01fd8288ee
104732023-09-22T23:08:27.589ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active Active Active ds_transition to Faulted
104742023-09-22T23:08:27.589ZINFOcrucible: [1] Transition from Active to Faulted
104752023-09-22T23:08:27.589ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active Faulted Active ds_transition to LiveRepairReady
104762023-09-22T23:08:27.589ZINFOcrucible: [1] Transition from Faulted to LiveRepairReady
104772023-09-22T23:08:27.589ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active LiveRepairReady Active ds_transition to LiveRepair
104782023-09-22T23:08:27.589ZINFOcrucible: [1] Transition from LiveRepairReady to LiveRepair
104792023-09-22T23:08:27.589ZINFOcrucible: Waiting for Close + ReOpen jobs
104802023-09-22T23:08:27.589ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
104812023-09-22T23:08:27.589ZINFOcrucible: RE:0 close id:1000 queued, notify DS
104822023-09-22T23:08:27.589ZINFOcrucible: RE:0 Wait for result from close command 1000:1
104832023-09-22T23:08:27.589ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104842023-09-22T23:08:27.589ZERROcrucible: [2] Reports error GenericError("bad") on job 1000, DownstairsIO { ds_id: JobId(1000), guest_id: 1, work: ExtentFlushClose { dependencies: [], extent: 0, flush_number: 0, gen_number: 0, source_downstairs: ClientId(0), repair_downstairs: [ClientId(2)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
104852023-09-22T23:08:27.589ZINFOcrucible: [2] client skip 2 in process jobs because fault = downstairs
104862023-09-22T23:08:27.589ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
104872023-09-22T23:08:27.589ZINFOcrucible: [2] 322c5d16-5794-4781-8bc0-a7be748f5fb6 (aa3992cb-83bf-4a9e-ae74-60b531d1ace4) Active Active LiveRepair ds_transition to Faulted
104882023-09-22T23:08:27.589ZINFOcrucible: [2] Transition from LiveRepair to Faulted
104892023-09-22T23:08:27.589ZINFOcrucible: Now ACK the close job
104902023-09-22T23:08:27.589ZINFOcrucible: Waiting for 3 jobs (currently 2)
104912023-09-22T23:08:27.589ZERROcrucible: Extent 0 close id:1000 Failed: Error: bad
104922023-09-22T23:08:27.589ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
104932023-09-22T23:08:27.598ZINFOcrucible: Waiting for 3 jobs (currently 2)
104942023-09-22T23:08:27.598ZINFOcrucible: No repair needed for extent 0 = downstairs
104952023-09-22T23:08:27.598ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
104962023-09-22T23:08:27.710ZINFOcrucible: Now ACK the close job
104972023-09-22T23:08:27.710ZINFOcrucible: Waiting for 3 jobs (currently 2)
104982023-09-22T23:08:27.710ZINFOcrucible: No repair needed for extent 0 = downstairs
104992023-09-22T23:08:27.710ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
105002023-09-22T23:08:28.589ZINFOcrucible: Waiting for 3 jobs (currently 2)
105012023-09-22T23:08:28.589ZINFOcrucible: No repair needed for extent 0 = downstairs
105022023-09-22T23:08:28.589ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
105032023-09-22T23:08:28.589ZINFOcrucible: Waiting for 4 jobs (currently 3)
105042023-09-22T23:08:28.589ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
105052023-09-22T23:08:28.590ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
105062023-09-22T23:08:28.599ZINFOcrucible: Waiting for 4 jobs (currently 3)
105072023-09-22T23:08:28.599ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
105082023-09-22T23:08:28.711ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
105092023-09-22T23:08:28.711ZERROcrucible: [1] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
105102023-09-22T23:08:28.711ZINFOcrucible: [1] client skip 3 in process jobs because fault = downstairs
105112023-09-22T23:08:28.711ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
105122023-09-22T23:08:28.711ZINFOcrucible: [1] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Active LiveRepair ds_transition to Faulted
105132023-09-22T23:08:28.711ZINFOcrucible: [1] Transition from Active to Faulted
105142023-09-22T23:08:28.711ZINFOcrucible: Waiting for 4 jobs (currently 3)
105152023-09-22T23:08:28.711ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
105162023-09-22T23:08:28.711ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
105172023-09-22T23:08:28.711ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
105182023-09-22T23:08:28.711ZINFOcrucible: [2] b90a2e95-f256-40ff-a6c3-b578ad79bb91 (70f32c98-85ac-4a49-8e97-4e66c551292d) Active Faulted LiveRepair ds_transition to Faulted
105192023-09-22T23:08:28.711ZINFOcrucible: [2] Transition from LiveRepair to Faulted
105202023-09-22T23:08:28.711ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
105212023-09-22T23:08:29.590ZINFOcrucible: Waiting for 4 jobs (currently 3)
105222023-09-22T23:08:29.590ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
105232023-09-22T23:08:29.591ZINFOcrucible: Now move the NoOp job forward
105242023-09-22T23:08:29.591ZINFOcrucible: Now ACK the NoOp job
105252023-09-22T23:08:29.591ZINFOcrucible: Finally, move the ReOpen job forward
105262023-09-22T23:08:29.591ZINFOcrucible: Now ACK the Reopen job
105272023-09-22T23:08:29.591ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
105282023-09-22T23:08:29.591ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
105292023-09-22T23:08:29.591ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
105302023-09-22T23:08:29.591ZWARNcrucible: RE:0 Bailing with error
10531 ----------------------------------------------------------------
10532 Crucible gen:0 GIO:true work queues: Upstairs:0 downstairs:4
10533 GW_ID ACK DSID TYPE BLOCKS DS:0 DS:1 DS:2 REPLAY
10534 1 Acked 1000 FClose 0 Done Done Err false
10535 2 Acked 1001 NoOp 0 Done Done Skip false
10536 3 Acked 1002 NoOp 0 Done Done Skip false
10537 4 Acked 1003 Reopen 0 Done Done Skip false
10538 STATES DS:0 DS:1 DS:2 TOTAL
10539 New 0 0 0 0
10540 Sent 0 0 0 0
10541 Done 4 4 0 8
10542 Skipped 0 0 3 3
10543 Error 0 0 1 1
10544 Last Flush: 0 0 0
10545 Downstairs last five completed:
10546 Upstairs last five completed: 4 3 2 1
10547 test live_repair::repair_test::test_repair_extent_close_fails_all ... ok
105482023-09-22T23:08:29.600ZINFOcrucible: Now move the NoOp job forward
105492023-09-22T23:08:29.600ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
105502023-09-22T23:08:29.600ZERROcrucible: [2] Reports error GenericError("bad") on job 1003, DownstairsIO { ds_id: JobId(1003), guest_id: 4, work: ExtentLiveReopen { dependencies: [JobId(1000), JobId(1001), JobId(1002)], extent: 0 }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
105512023-09-22T23:08:29.600ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
105522023-09-22T23:08:29.601ZINFOcrucible: [2] changed 0 jobs to fault skipped = downstairs
105532023-09-22T23:08:29.601ZINFOcrucible: [2] 9f99bbef-a6b4-4677-9b80-a44b3f26f5ad (05a7d3c1-8da7-4ab0-8e59-ef61aff4b565) Active Active LiveRepair ds_transition to Faulted
105542023-09-22T23:08:29.601ZINFOcrucible: [2] Transition from LiveRepair to Faulted
105552023-09-22T23:08:29.601ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
105562023-09-22T23:08:29.601ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
105572023-09-22T23:08:29.601ZWARNcrucible: RE:0 Bailing with error
10558 test live_repair::repair_test::test_repair_extent_fail_reopen_all ... ok
105592023-09-22T23:08:29.712ZINFOcrucible: Now move the NoOp job forward
105602023-09-22T23:08:29.712ZINFOcrucible: Now ACK the NoOp job
105612023-09-22T23:08:29.712ZINFOcrucible: Finally, move the ReOpen job forward
105622023-09-22T23:08:29.712ZINFOcrucible: Now ACK the Reopen job
105632023-09-22T23:08:29.712ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
105642023-09-22T23:08:29.712ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
105652023-09-22T23:08:29.712ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
105662023-09-22T23:08:29.712ZWARNcrucible: RE:0 Bailing with error
105672023-09-22T23:08:29.712ZINFOcrucible: Crucible stats registered with UUID: bd11c632-367b-41e3-be1c-5e1fdf25ae23
105682023-09-22T23:08:29.712ZINFOcrucible: Crucible bd11c632-367b-41e3-be1c-5e1fdf25ae23 has session id: 4f378124-a171-4d44-b978-e414771e8790
105692023-09-22T23:08:29.712ZINFOcrucible: [0] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) New New New ds_transition to WaitActive
105702023-09-22T23:08:29.712ZINFOcrucible: [0] Transition from New to WaitActive
105712023-09-22T23:08:29.712ZINFOcrucible: [0] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) WaitActive New New ds_transition to WaitQuorum
105722023-09-22T23:08:29.712ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
105732023-09-22T23:08:29.712ZINFOcrucible: [0] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) WaitQuorum New New ds_transition to Active
105742023-09-22T23:08:29.712ZINFOcrucible: [0] Transition from WaitQuorum to Active
105752023-09-22T23:08:29.712ZINFOcrucible: [1] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active New New ds_transition to WaitActive
105762023-09-22T23:08:29.712ZINFOcrucible: [1] Transition from New to WaitActive
105772023-09-22T23:08:29.712ZINFOcrucible: [1] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active WaitActive New ds_transition to WaitQuorum
105782023-09-22T23:08:29.712ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
105792023-09-22T23:08:29.712ZINFOcrucible: [1] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active WaitQuorum New ds_transition to Active
105802023-09-22T23:08:29.712ZINFOcrucible: [1] Transition from WaitQuorum to Active
105812023-09-22T23:08:29.712ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active New ds_transition to WaitActive
105822023-09-22T23:08:29.712ZINFOcrucible: [2] Transition from New to WaitActive
105832023-09-22T23:08:29.713ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active WaitActive ds_transition to WaitQuorum
105842023-09-22T23:08:29.713ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
105852023-09-22T23:08:29.713ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active WaitQuorum ds_transition to Active
105862023-09-22T23:08:29.713ZINFOcrucible: [2] Transition from WaitQuorum to Active
105872023-09-22T23:08:29.713ZINFOcrucible: bd11c632-367b-41e3-be1c-5e1fdf25ae23 is now active with session: a74b0c5b-941d-45d5-babc-e7b7f16307d0
105882023-09-22T23:08:29.713ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active Active ds_transition to Faulted
105892023-09-22T23:08:29.713ZINFOcrucible: [2] Transition from Active to Faulted
105902023-09-22T23:08:29.713ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active Faulted ds_transition to LiveRepairReady
105912023-09-22T23:08:29.713ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
105922023-09-22T23:08:29.713ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active LiveRepairReady ds_transition to LiveRepair
105932023-09-22T23:08:29.713ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
105942023-09-22T23:08:29.713ZINFOcrucible: Waiting for Close + ReOpen jobs
105952023-09-22T23:08:29.713ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
105962023-09-22T23:08:29.713ZINFOcrucible: RE:0 close id:1000 queued, notify DS
105972023-09-22T23:08:29.713ZINFOcrucible: RE:0 Wait for result from close command 1000:1
105982023-09-22T23:08:30.592ZINFOcrucible: Now move the NoOp job forward
105992023-09-22T23:08:30.592ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106002023-09-22T23:08:30.592ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106012023-09-22T23:08:30.592ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
106022023-09-22T23:08:30.592ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
106032023-09-22T23:08:30.592ZINFOcrucible: [2] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active LiveRepair Active ds_transition to Faulted
106042023-09-22T23:08:30.592ZINFOcrucible: [2] Transition from Active to Faulted
106052023-09-22T23:08:30.592ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
106062023-09-22T23:08:30.592ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
106072023-09-22T23:08:30.592ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
106082023-09-22T23:08:30.592ZINFOcrucible: [1] b122e61b-d947-42d8-bfa7-060e8c7d400f (bacc12da-7d6f-49e4-a766-3a01fd8288ee) Active LiveRepair Faulted ds_transition to Faulted
106092023-09-22T23:08:30.592ZINFOcrucible: [1] Transition from LiveRepair to Faulted
106102023-09-22T23:08:30.592ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
106112023-09-22T23:08:30.713ZINFOcrucible: Now ACK the close job
106122023-09-22T23:08:30.713ZINFOcrucible: Waiting for 3 jobs (currently 2)
106132023-09-22T23:08:30.713ZINFOcrucible: No repair needed for extent 0 = downstairs
106142023-09-22T23:08:30.713ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106152023-09-22T23:08:31.714ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106162023-09-22T23:08:31.714ZERROcrucible: [2] Reports error GenericError("bad") on job 1001, DownstairsIO { ds_id: JobId(1001), guest_id: 2, work: ExtentLiveNoOp { dependencies: [JobId(1000)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106172023-09-22T23:08:31.714ZINFOcrucible: [2] client skip 3 in process jobs because fault = downstairs
106182023-09-22T23:08:31.714ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
106192023-09-22T23:08:31.714ZINFOcrucible: [2] bd11c632-367b-41e3-be1c-5e1fdf25ae23 (a74b0c5b-941d-45d5-babc-e7b7f16307d0) Active Active LiveRepair ds_transition to Faulted
106202023-09-22T23:08:31.714ZINFOcrucible: [2] Transition from LiveRepair to Faulted
106212023-09-22T23:08:31.714ZINFOcrucible: Waiting for 4 jobs (currently 3)
106222023-09-22T23:08:31.714ZERROcrucible: Extent 0 close id:1001 Failed: Error: bad
106232023-09-22T23:08:31.714ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106242023-09-22T23:08:32.428ZINFOcrucible: responded to ping downstairs = 1
106252023-09-22T23:08:32.429ZINFOcrucible: responded to ping downstairs = 1
106262023-09-22T23:08:32.593ZINFOcrucible: Finally, move the ReOpen job forward
106272023-09-22T23:08:32.593ZINFOcrucible: Now ACK the reopen job
106282023-09-22T23:08:32.593ZWARNcrucible: RE:0 Bailing with error
106292023-09-22T23:08:32.593ZINFOcrucible: err:2 or:1
106302023-09-22T23:08:32.594ZINFOcrucible: Crucible stats registered with UUID: 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d
106312023-09-22T23:08:32.594ZINFOcrucible: Crucible 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d has session id: 760323dd-3f10-4c50-b9bd-60d87e8dbf17
106322023-09-22T23:08:32.594ZINFOcrucible: [0] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) New New New ds_transition to WaitActive
106332023-09-22T23:08:32.594ZINFOcrucible: [0] Transition from New to WaitActive
106342023-09-22T23:08:32.594ZINFOcrucible: [0] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) WaitActive New New ds_transition to WaitQuorum
106352023-09-22T23:08:32.594ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
106362023-09-22T23:08:32.594ZINFOcrucible: [0] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) WaitQuorum New New ds_transition to Active
106372023-09-22T23:08:32.594ZINFOcrucible: [0] Transition from WaitQuorum to Active
106382023-09-22T23:08:32.594ZINFOcrucible: [1] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active New New ds_transition to WaitActive
106392023-09-22T23:08:32.594ZINFOcrucible: [1] Transition from New to WaitActive
106402023-09-22T23:08:32.594ZINFOcrucible: [1] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active WaitActive New ds_transition to WaitQuorum
106412023-09-22T23:08:32.594ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
106422023-09-22T23:08:32.594ZINFOcrucible: [1] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active WaitQuorum New ds_transition to Active
106432023-09-22T23:08:32.594ZINFOcrucible: [1] Transition from WaitQuorum to Active
106442023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active New ds_transition to WaitActive
106452023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from New to WaitActive
106462023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active WaitActive ds_transition to WaitQuorum
106472023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
106482023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active WaitQuorum ds_transition to Active
106492023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from WaitQuorum to Active
106502023-09-22T23:08:32.594ZINFOcrucible: 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d is now active with session: e3e27907-0d47-47a5-8211-bf45df96075d
106512023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active Active ds_transition to Faulted
106522023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from Active to Faulted
106532023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active Faulted ds_transition to LiveRepairReady
106542023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
106552023-09-22T23:08:32.594ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active LiveRepairReady ds_transition to LiveRepair
106562023-09-22T23:08:32.594ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
106572023-09-22T23:08:32.594ZINFOcrucible: Waiting for Close + ReOpen jobs
106582023-09-22T23:08:32.594ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
106592023-09-22T23:08:32.594ZINFOcrucible: RE:0 close id:1000 queued, notify DS
106602023-09-22T23:08:32.594ZINFOcrucible: RE:0 Wait for result from close command 1000:1
106612023-09-22T23:08:32.715ZINFOcrucible: Now move the NoOp job forward
106622023-09-22T23:08:32.715ZINFOcrucible: Now ACK the NoOp job
106632023-09-22T23:08:32.715ZINFOcrucible: Finally, move the ReOpen job forward
106642023-09-22T23:08:32.715ZINFOcrucible: Now ACK the Reopen job
106652023-09-22T23:08:32.715ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
106662023-09-22T23:08:32.715ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
106672023-09-22T23:08:32.715ZERROcrucible: Extent 0 close id:1003 Failed: Error: bad
106682023-09-22T23:08:32.715ZWARNcrucible: RE:0 Bailing with error
10669 test live_repair::repair_test::test_repair_extent_repair_fails_all ... ok
106702023-09-22T23:08:33.595ZINFOcrucible: Waiting for 3 jobs (currently 2)
106712023-09-22T23:08:33.595ZINFOcrucible: No repair needed for extent 0 = downstairs
106722023-09-22T23:08:33.595ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
106732023-09-22T23:08:34.596ZINFOcrucible: Waiting for 4 jobs (currently 3)
106742023-09-22T23:08:34.596ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
106752023-09-22T23:08:35.597ZINFOcrucible: Now move the NoOp job forward
106762023-09-22T23:08:35.597ZERROcrucible: [0] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106772023-09-22T23:08:35.597ZERROcrucible: [0] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([InProgress, New, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
106782023-09-22T23:08:35.597ZINFOcrucible: [0] client skip 4 in process jobs because fault = downstairs
106792023-09-22T23:08:35.597ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs
106802023-09-22T23:08:35.597ZINFOcrucible: [0] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Active Active LiveRepair ds_transition to Faulted
106812023-09-22T23:08:35.597ZINFOcrucible: [0] Transition from Active to Faulted
106822023-09-22T23:08:35.597ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
106832023-09-22T23:08:35.597ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
106842023-09-22T23:08:35.597ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
106852023-09-22T23:08:35.597ZINFOcrucible: [2] 5c83a6ac-c2a3-4701-911d-c42a2b3e2a6d (e3e27907-0d47-47a5-8211-bf45df96075d) Faulted Active LiveRepair ds_transition to Faulted
106862023-09-22T23:08:35.597ZINFOcrucible: [2] Transition from LiveRepair to Faulted
106872023-09-22T23:08:35.597ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
106882023-09-22T23:08:37.597ZINFOcrucible: Finally, move the ReOpen job forward
106892023-09-22T23:08:37.597ZINFOcrucible: Now ACK the reopen job
106902023-09-22T23:08:37.597ZWARNcrucible: RE:0 Bailing with error
106912023-09-22T23:08:37.598ZINFOcrucible: err:0 or:2
106922023-09-22T23:08:37.598ZINFOcrucible: Crucible stats registered with UUID: fe831f8d-4eb5-44ca-a62d-c61c3e0540f9
106932023-09-22T23:08:37.598ZINFOcrucible: Crucible fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 has session id: b65f4444-6904-40f8-8cb2-293cdf26dc74
106942023-09-22T23:08:37.598ZINFOcrucible: [0] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) New New New ds_transition to WaitActive
106952023-09-22T23:08:37.598ZINFOcrucible: [0] Transition from New to WaitActive
106962023-09-22T23:08:37.598ZINFOcrucible: [0] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) WaitActive New New ds_transition to WaitQuorum
106972023-09-22T23:08:37.598ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
106982023-09-22T23:08:37.598ZINFOcrucible: [0] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) WaitQuorum New New ds_transition to Active
106992023-09-22T23:08:37.598ZINFOcrucible: [0] Transition from WaitQuorum to Active
107002023-09-22T23:08:37.598ZINFOcrucible: [1] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active New New ds_transition to WaitActive
107012023-09-22T23:08:37.598ZINFOcrucible: [1] Transition from New to WaitActive
107022023-09-22T23:08:37.598ZINFOcrucible: [1] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active WaitActive New ds_transition to WaitQuorum
107032023-09-22T23:08:37.598ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
107042023-09-22T23:08:37.598ZINFOcrucible: [1] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active WaitQuorum New ds_transition to Active
107052023-09-22T23:08:37.599ZINFOcrucible: [1] Transition from WaitQuorum to Active
107062023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active New ds_transition to WaitActive
107072023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from New to WaitActive
107082023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active WaitActive ds_transition to WaitQuorum
107092023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
107102023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active WaitQuorum ds_transition to Active
107112023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from WaitQuorum to Active
107122023-09-22T23:08:37.599ZINFOcrucible: fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 is now active with session: 826d5ec6-dae1-4ee1-a079-b1a0132403d0
107132023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active Active ds_transition to Faulted
107142023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from Active to Faulted
107152023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active Faulted ds_transition to LiveRepairReady
107162023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
107172023-09-22T23:08:37.599ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active LiveRepairReady ds_transition to LiveRepair
107182023-09-22T23:08:37.599ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
107192023-09-22T23:08:37.599ZINFOcrucible: Waiting for Close + ReOpen jobs
107202023-09-22T23:08:37.599ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
107212023-09-22T23:08:37.599ZINFOcrucible: RE:0 close id:1000 queued, notify DS
107222023-09-22T23:08:37.599ZINFOcrucible: RE:0 Wait for result from close command 1000:1
107232023-09-22T23:08:38.600ZINFOcrucible: Waiting for 3 jobs (currently 2)
107242023-09-22T23:08:38.600ZINFOcrucible: No repair needed for extent 0 = downstairs
107252023-09-22T23:08:38.600ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
107262023-09-22T23:08:39.601ZINFOcrucible: Waiting for 4 jobs (currently 3)
107272023-09-22T23:08:39.601ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
107282023-09-22T23:08:40.603ZINFOcrucible: Now move the NoOp job forward
107292023-09-22T23:08:40.603ZERROcrucible: [1] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107302023-09-22T23:08:40.603ZERROcrucible: [1] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, InProgress, New]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107312023-09-22T23:08:40.603ZINFOcrucible: [1] client skip 4 in process jobs because fault = downstairs
107322023-09-22T23:08:40.603ZINFOcrucible: [1] changed 1 jobs to fault skipped = downstairs
107332023-09-22T23:08:40.603ZINFOcrucible: [1] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Active LiveRepair ds_transition to Faulted
107342023-09-22T23:08:40.603ZINFOcrucible: [1] Transition from Active to Faulted
107352023-09-22T23:08:40.603ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
107362023-09-22T23:08:40.603ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
107372023-09-22T23:08:40.603ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
107382023-09-22T23:08:40.603ZINFOcrucible: [2] fe831f8d-4eb5-44ca-a62d-c61c3e0540f9 (826d5ec6-dae1-4ee1-a079-b1a0132403d0) Active Faulted LiveRepair ds_transition to Faulted
107392023-09-22T23:08:40.603ZINFOcrucible: [2] Transition from LiveRepair to Faulted
107402023-09-22T23:08:40.603ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
107412023-09-22T23:08:42.430ZINFOcrucible: responded to ping downstairs = 1
107422023-09-22T23:08:42.433ZINFOcrucible: responded to ping downstairs = 1
107432023-09-22T23:08:42.604ZINFOcrucible: Finally, move the ReOpen job forward
107442023-09-22T23:08:42.604ZINFOcrucible: Now ACK the reopen job
107452023-09-22T23:08:42.604ZWARNcrucible: RE:0 Bailing with error
107462023-09-22T23:08:42.604ZINFOcrucible: err:1 or:2
107472023-09-22T23:08:42.605ZINFOcrucible: Crucible stats registered with UUID: 67cad85b-0cae-4869-903a-ed4f571a48bf
107482023-09-22T23:08:42.605ZINFOcrucible: Crucible 67cad85b-0cae-4869-903a-ed4f571a48bf has session id: 8b541110-c8e0-4fbc-af9b-0181a0db9f0d
107492023-09-22T23:08:42.605ZINFOcrucible: [0] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) New New New ds_transition to WaitActive
107502023-09-22T23:08:42.605ZINFOcrucible: [0] Transition from New to WaitActive
107512023-09-22T23:08:42.605ZINFOcrucible: [0] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) WaitActive New New ds_transition to WaitQuorum
107522023-09-22T23:08:42.605ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
107532023-09-22T23:08:42.605ZINFOcrucible: [0] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) WaitQuorum New New ds_transition to Active
107542023-09-22T23:08:42.605ZINFOcrucible: [0] Transition from WaitQuorum to Active
107552023-09-22T23:08:42.605ZINFOcrucible: [1] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active New New ds_transition to WaitActive
107562023-09-22T23:08:42.605ZINFOcrucible: [1] Transition from New to WaitActive
107572023-09-22T23:08:42.605ZINFOcrucible: [1] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active WaitActive New ds_transition to WaitQuorum
107582023-09-22T23:08:42.605ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
107592023-09-22T23:08:42.605ZINFOcrucible: [1] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active WaitQuorum New ds_transition to Active
107602023-09-22T23:08:42.605ZINFOcrucible: [1] Transition from WaitQuorum to Active
107612023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active New ds_transition to WaitActive
107622023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from New to WaitActive
107632023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active WaitActive ds_transition to WaitQuorum
107642023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
107652023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active WaitQuorum ds_transition to Active
107662023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from WaitQuorum to Active
107672023-09-22T23:08:42.605ZINFOcrucible: 67cad85b-0cae-4869-903a-ed4f571a48bf is now active with session: 33ea95ab-5a5e-42bf-8c5f-88a32110e72d
107682023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active Active ds_transition to Faulted
107692023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from Active to Faulted
107702023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active Faulted ds_transition to LiveRepairReady
107712023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from Faulted to LiveRepairReady
107722023-09-22T23:08:42.605ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active LiveRepairReady ds_transition to LiveRepair
107732023-09-22T23:08:42.605ZINFOcrucible: [2] Transition from LiveRepairReady to LiveRepair
107742023-09-22T23:08:42.605ZINFOcrucible: Waiting for Close + ReOpen jobs
107752023-09-22T23:08:42.605ZINFOcrucible: RE:0 repair extent with ids 1000,1001,1002,1003 deps:[]
107762023-09-22T23:08:42.605ZINFOcrucible: RE:0 close id:1000 queued, notify DS
107772023-09-22T23:08:42.605ZINFOcrucible: RE:0 Wait for result from close command 1000:1
107782023-09-22T23:08:43.605ZINFOcrucible: Waiting for 3 jobs (currently 2)
107792023-09-22T23:08:43.605ZINFOcrucible: No repair needed for extent 0 = downstairs
107802023-09-22T23:08:43.605ZINFOcrucible: RE:0 Wait for result from repair command 1001:2
107812023-09-22T23:08:44.607ZINFOcrucible: Waiting for 4 jobs (currently 3)
107822023-09-22T23:08:44.607ZINFOcrucible: RE:0 Wait for result from NoOp command 1002:3
107832023-09-22T23:08:45.608ZINFOcrucible: Now move the NoOp job forward
107842023-09-22T23:08:45.608ZERROcrucible: [2] DS Reports error Err(GenericError("bad")) on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107852023-09-22T23:08:45.608ZERROcrucible: [2] Reports error GenericError("bad") on job 1002, DownstairsIO { ds_id: JobId(1002), guest_id: 3, work: ExtentLiveNoOp { dependencies: [JobId(1000), JobId(1001)] }, state: ClientData([Done, Done, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 2 }) } = downstairs
107862023-09-22T23:08:45.608ZINFOcrucible: [2] client skip 4 in process jobs because fault = downstairs
107872023-09-22T23:08:45.608ZINFOcrucible: [2] changed 1 jobs to fault skipped = downstairs
107882023-09-22T23:08:45.608ZINFOcrucible: [2] 67cad85b-0cae-4869-903a-ed4f571a48bf (33ea95ab-5a5e-42bf-8c5f-88a32110e72d) Active Active LiveRepair ds_transition to Faulted
107892023-09-22T23:08:45.608ZINFOcrucible: [2] Transition from LiveRepair to Faulted
107902023-09-22T23:08:45.608ZERROcrucible: Extent 0 close id:1002 Failed: Error: bad
107912023-09-22T23:08:45.608ZINFOcrucible: RE:0 Wait for result from reopen command 1003:4
107922023-09-22T23:08:47.609ZINFOcrucible: Finally, move the ReOpen job forward
107932023-09-22T23:08:47.609ZINFOcrucible: Now ACK the reopen job
107942023-09-22T23:08:47.609ZWARNcrucible: RE:0 Bailing with error
10795 test live_repair::repair_test::test_repair_extent_fail_noop_all ... ok
107962023-09-22T23:08:52.433ZINFOcrucible: responded to ping downstairs = 1
107972023-09-22T23:08:52.436ZINFOcrucible: responded to ping downstairs = 1
10798 test dummy_downstairs_tests::protocol_test::test_error_during_live_repair_no_halt has been running for over 60 seconds
10799 test dummy_downstairs_tests::protocol_test::test_successful_live_repair has been running for over 60 seconds
108002023-09-22T23:09:02.438ZINFOcrucible: responded to ping downstairs = 1
108012023-09-22T23:09:02.439ZINFOcrucible: responded to ping downstairs = 1
10802 {"msg":"responded to ping","v":0,"name":"crucible"{,"level":30"msg":"responded to ping","v":0,"name":"crucible","level":30,",time"":"time":"2023-09-22T23:09:12.440106039Z"2023-09-22T23:09:12.440096784Z",,""hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4291pid":4291,"downstairs":,"downstairs":11}}
10803 
108042023-09-22T23:09:22.441ZINFOcrucible: responded to ping downstairs = 1
108052023-09-22T23:09:22.443ZINFOcrucible: responded to ping downstairs = 1
108062023-09-22T23:09:32.444ZINFOcrucible: responded to ping downstairs = 1
108072023-09-22T23:09:32.449ZINFOcrucible: responded to ping downstairs = 1
108082023-09-22T23:09:42.450ZINFOcrucible: responded to ping downstairs = 1
108092023-09-22T23:09:42.456ZINFOcrucible: responded to ping downstairs = 1
108102023-09-22T23:09:52.453ZINFOcrucible: responded to ping downstairs = 1
108112023-09-22T23:09:52.463ZINFOcrucible: responded to ping downstairs = 1
108122023-09-22T23:10:02.453ZINFOcrucible: responded to ping downstairs = 1
108132023-09-22T23:10:02.470ZINFOcrucible: responded to ping downstairs = 1
108142023-09-22T23:10:12.456ZINFOcrucible: responded to ping downstairs = 1
108152023-09-22T23:10:12.471ZINFOcrucible: responded to ping downstairs = 1
108162023-09-22T23:10:22.457ZINFOcrucible: responded to ping downstairs = 1
108172023-09-22T23:10:22.471ZINFOcrucible: responded to ping downstairs = 1
108182023-09-22T23:10:32.463ZINFOcrucible: responded to ping downstairs = 1
108192023-09-22T23:10:32.477ZINFOcrucible: responded to ping downstairs = 1
108202023-09-22T23:10:42.470ZINFOcrucible: responded to ping downstairs = 1
108212023-09-22T23:10:42.485ZINFOcrucible: responded to ping downstairs = 1
108222023-09-22T23:10:52.477ZINFOcrucible: responded to ping downstairs = 1
108232023-09-22T23:10:52.485ZINFOcrucible: responded to ping downstairs = 1
108242023-09-22T23:11:02.478ZINFOcrucible: responded to ping downstairs = 1
108252023-09-22T23:11:02.495ZINFOcrucible: responded to ping downstairs = 1
108262023-09-22T23:11:12.479ZINFOcrucible: responded to ping downstairs = 1
108272023-09-22T23:11:12.501ZINFOcrucible: responded to ping downstairs = 1
108282023-09-22T23:11:22.487ZINFOcrucible: responded to ping downstairs = 1
108292023-09-22T23:11:22.506ZINFOcrucible: responded to ping downstairs = 1
108302023-09-22T23:11:32.495ZINFOcrucible: responded to ping downstairs = 1
108312023-09-22T23:11:32.506ZINFOcrucible: responded to ping downstairs = 1
108322023-09-22T23:11:42.504ZINFOcrucible: responded to ping downstairs = 1
108332023-09-22T23:11:42.515ZINFOcrucible: responded to ping downstairs = 1
108342023-09-22T23:11:52.515ZINFOcrucible: responded to ping downstairs = 1
108352023-09-22T23:11:52.524ZINFOcrucible: responded to ping downstairs = 1
108362023-09-22T23:12:02.523ZINFOcrucible: responded to ping downstairs = 1
108372023-09-22T23:12:02.534ZINFOcrucible: responded to ping downstairs = 1
108382023-09-22T23:12:12.525ZINFOcrucible: responded to ping downstairs = 1
108392023-09-22T23:12:12.541ZINFOcrucible: responded to ping downstairs = 1
108402023-09-22T23:12:22.525ZINFOcrucible: responded to ping downstairs = 1
108412023-09-22T23:12:22.549ZINFOcrucible: responded to ping downstairs = 1
108422023-09-22T23:12:32.536ZINFOcrucible: responded to ping downstairs = 1
108432023-09-22T23:12:32.555ZINFOcrucible: responded to ping downstairs = 1
108442023-09-22T23:12:42.540ZINFOcrucible: responded to ping downstairs = 1
108452023-09-22T23:12:42.564ZINFOcrucible: responded to ping downstairs = 1
108462023-09-22T23:12:52.552ZINFOcrucible: responded to ping downstairs = 1
108472023-09-22T23:12:52.572ZINFOcrucible: responded to ping downstairs = 1
108482023-09-22T23:13:02.560ZINFOcrucible: responded to ping downstairs = 1
108492023-09-22T23:13:02.582ZINFOcrucible: responded to ping downstairs = 1
108502023-09-22T23:13:12.562ZINFOcrucible: responded to ping downstairs = 1
108512023-09-22T23:13:12.583ZINFOcrucible: responded to ping downstairs = 1
108522023-09-22T23:13:22.565ZINFOcrucible: responded to ping downstairs = 1
108532023-09-22T23:13:22.586ZINFOcrucible: responded to ping downstairs = 1
108542023-09-22T23:13:32.571ZINFOcrucible: responded to ping downstairs = 1
108552023-09-22T23:13:32.592ZINFOcrucible: responded to ping downstairs = 1
108562023-09-22T23:13:42.573ZINFOcrucible: responded to ping downstairs = 1
108572023-09-22T23:13:42.597ZINFOcrucible: responded to ping downstairs = 1
108582023-09-22T23:13:52.586ZINFOcrucible: responded to ping downstairs = 1
108592023-09-22T23:13:52.600ZINFOcrucible: responded to ping downstairs = 1
108602023-09-22T23:14:02.600ZINFOcrucible: responded to ping downstairs = 1
108612023-09-22T23:14:02.602ZINFOcrucible: responded to ping downstairs = 1
108622023-09-22T23:14:03.999ZWARNcrucible: [up] downstairs 0 failed, too many outstanding jobs 57001 upstairs = 1
108632023-09-22T23:14:03.999ZINFOcrucible: [0] client skip 57001 in process jobs because fault = downstairs upstairs = 1
108642023-09-22T23:14:04.069ZINFOcrucible: [0] changed 57001 jobs to fault skipped = downstairs upstairs = 1
108652023-09-22T23:14:04.158ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) Active Active Active ds_transition to Faulted upstairs = 1
108662023-09-22T23:14:04.158ZINFOcrucible: [0] Transition from Active to Faulted upstairs = 1
108672023-09-22T23:14:04.253ZWARNcrucible: [0] flow control end upstairs = 1
108682023-09-22T23:14:11.221ZWARNcrucible: [up] downstairs 0 failed, too many outstanding jobs 57001 upstairs = 1
108692023-09-22T23:14:11.221ZINFOcrucible: [0] client skip 57001 in process jobs because fault = downstairs upstairs = 1
108702023-09-22T23:14:11.286ZINFOcrucible: [0] changed 57001 jobs to fault skipped = downstairs upstairs = 1
108712023-09-22T23:14:11.371ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) Active Active Active ds_transition to Faulted upstairs = 1
108722023-09-22T23:14:11.371ZINFOcrucible: [0] Transition from Active to Faulted upstairs = 1
108732023-09-22T23:14:11.372ZWARNcrucible: [0] flow control end upstairs = 1
108742023-09-22T23:14:12.604ZINFOcrucible: responded to ping downstairs = 1
108752023-09-22T23:14:12.604ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
108762023-09-22T23:14:12.604ZERROcrucible: 127.0.0.1:39941: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
108772023-09-22T23:14:12.604ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Gone missing, transition from Faulted to Faulted upstairs = 1
108782023-09-22T23:14:12.604ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 connection to 127.0.0.1:39941 closed looper = 0 upstairs = 1
108792023-09-22T23:14:12.604ZINFOcrucible: [0] 127.0.0.1:39941 task reports connection:false upstairs = 1
108802023-09-22T23:14:12.604ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Faulted Active Active upstairs = 1
108812023-09-22T23:14:12.604ZINFOcrucible: [0] 127.0.0.1:39941 task reports offline upstairs = 1
108822023-09-22T23:14:12.604ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
108832023-09-22T23:14:16.166ZINFOcrucible: responded to ping downstairs = 1
108842023-09-22T23:14:16.166ZERROcrucible: [0] Dropping job 1000, this downstairs is faulted upstairs = 1
108852023-09-22T23:14:16.166ZERROcrucible: process_ds_operation error: No longer active upstairs = 1
108862023-09-22T23:14:16.166ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
108872023-09-22T23:14:16.166ZERROcrucible: 127.0.0.1:52643: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
108882023-09-22T23:14:16.166ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 Gone missing, transition from Faulted to Faulted upstairs = 1
108892023-09-22T23:14:16.166ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 connection to 127.0.0.1:52643 closed looper = 0 upstairs = 1
108902023-09-22T23:14:16.166ZINFOcrucible: [0] 127.0.0.1:52643 task reports connection:false upstairs = 1
108912023-09-22T23:14:16.166ZINFOcrucible: 024c3783-b9b4-4453-823f-769dad90d4f2 Faulted Active Active upstairs = 1
108922023-09-22T23:14:16.166ZINFOcrucible: [0] 127.0.0.1:52643 task reports offline upstairs = 1
108932023-09-22T23:14:16.166ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
108942023-09-22T23:14:16.166ZERROcrucible: could not send read response for job 3 = 1003: Broken pipe (os error 32)
108952023-09-22T23:14:16.928ZINFOcrucible: responded to ping downstairs = 2
108962023-09-22T23:14:16.928ZINFOcrucible: responded to ping downstairs = 3
108972023-09-22T23:14:17.166ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 looper connected looper = 0 upstairs = 1
108982023-09-22T23:14:17.166ZINFOcrucible: [0] Proc runs for 127.0.0.1:52643 in state Faulted upstairs = 1
108992023-09-22T23:14:17.166ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
109002023-09-22T23:14:17.167ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session db076280-2529-45a1-a093-a3cd3f0799dc upstairs = 1
109012023-09-22T23:14:17.167ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: 024c3783-b9b4-4453-823f-769dad90d4f2, session_id: db076280-2529-45a1-a093-a3cd3f0799dc, gen: 1 } downstairs = 1
109022023-09-22T23:14:17.167ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
109032023-09-22T23:14:17.167ZINFOcrucible: [0] downstairs client at 127.0.0.1:52643 has UUID af1eb908-dd0e-4704-a165-8346bf74d171 upstairs = 1
109042023-09-22T23:14:17.167ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: af1eb908-dd0e-4704-a165-8346bf74d171, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
109052023-09-22T23:14:17.167ZINFOcrucible: Returning client:0 UUID:af1eb908-dd0e-4704-a165-8346bf74d171 matches upstairs = 1
109062023-09-22T23:14:17.167ZINFOcrucible: 024c3783-b9b4-4453-823f-769dad90d4f2 Faulted Active Active upstairs = 1
109072023-09-22T23:14:17.167ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
109082023-09-22T23:14:17.167ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
109092023-09-22T23:14:17.167ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
109102023-09-22T23:14:17.167ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
109112023-09-22T23:14:17.167ZWARNcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 Enter Ready for LiveRepair mode upstairs = 1
109122023-09-22T23:14:17.167ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
109132023-09-22T23:14:17.167ZINFOcrucible: [0] 127.0.0.1:52643 task reports connection:true upstairs = 1
109142023-09-22T23:14:17.167ZINFOcrucible: 024c3783-b9b4-4453-823f-769dad90d4f2 LiveRepairReady Active Active upstairs = 1
109152023-09-22T23:14:17.167ZINFOcrucible: Set check for repair upstairs = 1
109162023-09-22T23:14:18.169ZINFOcrucible: Checking if live repair is needed upstairs = 1
109172023-09-22T23:14:18.169ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
109182023-09-22T23:14:18.169ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
109192023-09-22T23:14:18.169ZINFOcrucible: Live Repair started upstairs = 1
109202023-09-22T23:14:18.169ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
109212023-09-22T23:14:18.169ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
109222023-09-22T23:14:18.169ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
109232023-09-22T23:14:18.169ZINFOcrucible: RE:0 repair extent with ids 58201,58202,58203,58204 deps:[] upstairs = 1
109242023-09-22T23:14:18.169ZINFOcrucible: RE:0 close id:58201 queued, notify DS upstairs = 1
109252023-09-22T23:14:18.169ZINFOcrucible: RE:0 Wait for result from close command 58201:57202 upstairs = 1
109262023-09-22T23:14:18.169ZINFOcrucible: [0] 58201 final dependency list [] = downstairs upstairs = 1
109272023-09-22T23:14:18.169ZINFOcrucible: [0] 58204 final dependency list [JobId(58201), JobId(58202), JobId(58203)] = downstairs upstairs = 1
109282023-09-22T23:14:18.170ZINFOcrucible: [0] 58205 final dependency list [JobId(58204), JobId(58201)] = downstairs upstairs = 1
109292023-09-22T23:14:18.170ZWARNcrucible: Write to Extent 0:0:9 under repair upstairs = 1
109302023-09-22T23:14:18.170ZINFOcrucible: [0] 58206 final dependency list [JobId(58205), JobId(58204), JobId(58201)] = downstairs upstairs = 1
109312023-09-22T23:14:18.179ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(0)] = downstairs upstairs = 1
109322023-09-22T23:14:18.179ZINFOcrucible: RE:0 Wait for result from repair command 58202:57203 upstairs = 1
109332023-09-22T23:14:18.179ZINFOcrucible: [0] 58202 final dependency list [JobId(58201)] = downstairs upstairs = 1
109342023-09-22T23:14:18.180ZINFOcrucible: RE:0 Wait for result from NoOp command 58203:57204 upstairs = 1
109352023-09-22T23:14:18.180ZINFOcrucible: [0] 58203 final dependency list [JobId(58201), JobId(58202)] = downstairs upstairs = 1
109362023-09-22T23:14:18.183ZINFOcrucible: RE:0 Wait for result from reopen command 58204:57205 upstairs = 1
109372023-09-22T23:14:18.183ZINFOcrucible: Start extent 1 repair task = repair upstairs = 1
109382023-09-22T23:14:18.183ZINFOcrucible: RE:1 repair extent with ids 58225,58226,58227,58228 deps:[JobId(58208), JobId(58207)] upstairs = 1
109392023-09-22T23:14:18.183ZINFOcrucible: RE:1 close id:58225 queued, notify DS upstairs = 1
109402023-09-22T23:14:18.183ZINFOcrucible: RE:1 Wait for result from close command 58225:57226 upstairs = 1
109412023-09-22T23:14:18.183ZINFOcrucible: [0] 58225 final dependency list [] = downstairs upstairs = 1
109422023-09-22T23:14:18.183ZINFOcrucible: [0] 58228 final dependency list [JobId(58225), JobId(58226), JobId(58227)] = downstairs upstairs = 1
109432023-09-22T23:14:18.184ZINFOcrucible: [0] 58229 final dependency list [JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109442023-09-22T23:14:18.184ZINFOcrucible: [0] 58230 final dependency list [JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109452023-09-22T23:14:18.185ZINFOcrucible: [0] 58231 final dependency list [JobId(58228), JobId(58225)] = downstairs upstairs = 1
109462023-09-22T23:14:18.185ZWARNcrucible: Write to Extent 1:0:9 under repair upstairs = 1
109472023-09-22T23:14:18.185ZINFOcrucible: [0] 58232 final dependency list [JobId(58231), JobId(58228), JobId(58225)] = downstairs upstairs = 1
109482023-09-22T23:14:18.193ZINFOcrucible: Repair for extent 1 s:2 d:[ClientId(0)] = downstairs upstairs = 1
109492023-09-22T23:14:18.193ZINFOcrucible: RE:1 Wait for result from repair command 58226:57227 upstairs = 1
109502023-09-22T23:14:18.193ZINFOcrucible: [0] 58226 final dependency list [JobId(58225)] = downstairs upstairs = 1
109512023-09-22T23:14:18.194ZINFOcrucible: RE:1 Wait for result from NoOp command 58227:57228 upstairs = 1
109522023-09-22T23:14:18.194ZINFOcrucible: [0] 58227 final dependency list [JobId(58225), JobId(58226)] = downstairs upstairs = 1
109532023-09-22T23:14:18.197ZINFOcrucible: RE:1 Wait for result from reopen command 58228:57229 upstairs = 1
109542023-09-22T23:14:18.197ZINFOcrucible: Start extent 2 repair task = repair upstairs = 1
109552023-09-22T23:14:18.197ZINFOcrucible: RE:2 repair extent with ids 58249,58250,58251,58252 deps:[JobId(58234), JobId(58233), JobId(58210), JobId(58209)] upstairs = 1
109562023-09-22T23:14:18.197ZINFOcrucible: RE:2 close id:58249 queued, notify DS upstairs = 1
109572023-09-22T23:14:18.197ZINFOcrucible: RE:2 Wait for result from close command 58249:57250 upstairs = 1
109582023-09-22T23:14:18.197ZINFOcrucible: [0] 58249 final dependency list [] = downstairs upstairs = 1
109592023-09-22T23:14:18.197ZINFOcrucible: [0] 58252 final dependency list [JobId(58249), JobId(58250), JobId(58251)] = downstairs upstairs = 1
109602023-09-22T23:14:18.198ZINFOcrucible: [0] 58253 final dependency list [JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109612023-09-22T23:14:18.198ZINFOcrucible: [0] 58254 final dependency list [JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109622023-09-22T23:14:18.199ZINFOcrucible: [0] 58255 final dependency list [JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
109632023-09-22T23:14:18.199ZINFOcrucible: [0] 58256 final dependency list [JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
109642023-09-22T23:14:18.200ZINFOcrucible: [0] 58257 final dependency list [JobId(58252), JobId(58249)] = downstairs upstairs = 1
109652023-09-22T23:14:18.200ZWARNcrucible: Write to Extent 2:0:9 under repair upstairs = 1
109662023-09-22T23:14:18.201ZINFOcrucible: [0] 58258 final dependency list [JobId(58257), JobId(58252), JobId(58249)] = downstairs upstairs = 1
109672023-09-22T23:14:18.208ZINFOcrucible: Repair for extent 2 s:2 d:[ClientId(0)] = downstairs upstairs = 1
109682023-09-22T23:14:18.208ZINFOcrucible: RE:2 Wait for result from repair command 58250:57251 upstairs = 1
109692023-09-22T23:14:18.208ZINFOcrucible: [0] 58250 final dependency list [JobId(58249)] = downstairs upstairs = 1
109702023-09-22T23:14:18.209ZINFOcrucible: RE:2 Wait for result from NoOp command 58251:57252 upstairs = 1
109712023-09-22T23:14:18.209ZINFOcrucible: [0] 58251 final dependency list [JobId(58249), JobId(58250)] = downstairs upstairs = 1
109722023-09-22T23:14:18.211ZINFOcrucible: RE:2 Wait for result from reopen command 58252:57253 upstairs = 1
109732023-09-22T23:14:18.211ZINFOcrucible: Start extent 3 repair task = repair upstairs = 1
109742023-09-22T23:14:18.212ZINFOcrucible: RE:3 repair extent with ids 58273,58274,58275,58276 deps:[JobId(58260), JobId(58259), JobId(58236), JobId(58235), JobId(58212), JobId(58211)] upstairs = 1
109752023-09-22T23:14:18.212ZINFOcrucible: RE:3 close id:58273 queued, notify DS upstairs = 1
109762023-09-22T23:14:18.212ZINFOcrucible: RE:3 Wait for result from close command 58273:57274 upstairs = 1
109772023-09-22T23:14:18.212ZINFOcrucible: [0] 58273 final dependency list [] = downstairs upstairs = 1
109782023-09-22T23:14:18.212ZINFOcrucible: [0] 58276 final dependency list [JobId(58273), JobId(58274), JobId(58275)] = downstairs upstairs = 1
109792023-09-22T23:14:18.212ZINFOcrucible: [0] 58277 final dependency list [JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109802023-09-22T23:14:18.213ZINFOcrucible: [0] 58278 final dependency list [JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
109812023-09-22T23:14:18.214ZINFOcrucible: [0] 58279 final dependency list [JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
109822023-09-22T23:14:18.214ZINFOcrucible: [0] 58280 final dependency list [JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
109832023-09-22T23:14:18.215ZINFOcrucible: [0] 58281 final dependency list [JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
109842023-09-22T23:14:18.215ZINFOcrucible: [0] 58282 final dependency list [JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
109852023-09-22T23:14:18.216ZINFOcrucible: [0] 58283 final dependency list [JobId(58276), JobId(58273)] = downstairs upstairs = 1
109862023-09-22T23:14:18.216ZWARNcrucible: Write to Extent 3:0:9 under repair upstairs = 1
109872023-09-22T23:14:18.217ZINFOcrucible: [0] 58284 final dependency list [JobId(58283), JobId(58276), JobId(58273)] = downstairs upstairs = 1
109882023-09-22T23:14:18.223ZINFOcrucible: Repair for extent 3 s:2 d:[ClientId(0)] = downstairs upstairs = 1
109892023-09-22T23:14:18.223ZINFOcrucible: RE:3 Wait for result from repair command 58274:57275 upstairs = 1
109902023-09-22T23:14:18.223ZINFOcrucible: [0] 58274 final dependency list [JobId(58273)] = downstairs upstairs = 1
109912023-09-22T23:14:18.224ZINFOcrucible: RE:3 Wait for result from NoOp command 58275:57276 upstairs = 1
109922023-09-22T23:14:18.224ZINFOcrucible: [0] 58275 final dependency list [JobId(58273), JobId(58274)] = downstairs upstairs = 1
109932023-09-22T23:14:18.227ZINFOcrucible: RE:3 Wait for result from reopen command 58276:57277 upstairs = 1
109942023-09-22T23:14:18.227ZINFOcrucible: Start extent 4 repair task = repair upstairs = 1
109952023-09-22T23:14:18.227ZINFOcrucible: RE:4 repair extent with ids 58297,58298,58299,58300 deps:[JobId(58286), JobId(58285), JobId(58262), JobId(58261), JobId(58238), JobId(58237), JobId(58214), JobId(58213)] upstairs = 1
109962023-09-22T23:14:18.227ZINFOcrucible: RE:4 close id:58297 queued, notify DS upstairs = 1
109972023-09-22T23:14:18.227ZINFOcrucible: RE:4 Wait for result from close command 58297:57298 upstairs = 1
109982023-09-22T23:14:18.227ZINFOcrucible: [0] 58297 final dependency list [] = downstairs upstairs = 1
109992023-09-22T23:14:18.227ZINFOcrucible: [0] 58300 final dependency list [JobId(58297), JobId(58298), JobId(58299)] = downstairs upstairs = 1
110002023-09-22T23:14:18.227ZINFOcrucible: [0] 58301 final dependency list [JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110012023-09-22T23:14:18.228ZINFOcrucible: [0] 58302 final dependency list [JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110022023-09-22T23:14:18.229ZINFOcrucible: [0] 58303 final dependency list [JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110032023-09-22T23:14:18.229ZINFOcrucible: [0] 58304 final dependency list [JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110042023-09-22T23:14:18.230ZINFOcrucible: [0] 58305 final dependency list [JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110052023-09-22T23:14:18.231ZINFOcrucible: [0] 58306 final dependency list [JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110062023-09-22T23:14:18.231ZINFOcrucible: [0] 58307 final dependency list [JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110072023-09-22T23:14:18.232ZINFOcrucible: [0] 58308 final dependency list [JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110082023-09-22T23:14:18.232ZINFOcrucible: [0] 58309 final dependency list [JobId(58300), JobId(58297)] = downstairs upstairs = 1
110092023-09-22T23:14:18.233ZWARNcrucible: Write to Extent 4:0:9 under repair upstairs = 1
110102023-09-22T23:14:18.233ZINFOcrucible: [0] 58310 final dependency list [JobId(58309), JobId(58300), JobId(58297)] = downstairs upstairs = 1
110112023-09-22T23:14:18.239ZINFOcrucible: Repair for extent 4 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110122023-09-22T23:14:18.239ZINFOcrucible: RE:4 Wait for result from repair command 58298:57299 upstairs = 1
110132023-09-22T23:14:18.239ZINFOcrucible: [0] 58298 final dependency list [JobId(58297)] = downstairs upstairs = 1
110142023-09-22T23:14:18.239ZINFOcrucible: RE:4 Wait for result from NoOp command 58299:57300 upstairs = 1
110152023-09-22T23:14:18.239ZINFOcrucible: [0] 58299 final dependency list [JobId(58297), JobId(58298)] = downstairs upstairs = 1
110162023-09-22T23:14:18.242ZINFOcrucible: RE:4 Wait for result from reopen command 58300:57301 upstairs = 1
110172023-09-22T23:14:18.242ZINFOcrucible: Start extent 5 repair task = repair upstairs = 1
110182023-09-22T23:14:18.242ZINFOcrucible: RE:5 repair extent with ids 58321,58322,58323,58324 deps:[JobId(58312), JobId(58311), JobId(58288), JobId(58287), JobId(58264), JobId(58263), JobId(58240), JobId(58239), JobId(58216), JobId(58215)] upstairs = 1
110192023-09-22T23:14:18.242ZINFOcrucible: RE:5 close id:58321 queued, notify DS upstairs = 1
110202023-09-22T23:14:18.242ZINFOcrucible: RE:5 Wait for result from close command 58321:57322 upstairs = 1
110212023-09-22T23:14:18.242ZINFOcrucible: [0] 58321 final dependency list [] = downstairs upstairs = 1
110222023-09-22T23:14:18.242ZINFOcrucible: [0] 58324 final dependency list [JobId(58321), JobId(58322), JobId(58323)] = downstairs upstairs = 1
110232023-09-22T23:14:18.243ZINFOcrucible: [0] 58325 final dependency list [JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110242023-09-22T23:14:18.244ZINFOcrucible: [0] 58326 final dependency list [JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110252023-09-22T23:14:18.244ZINFOcrucible: [0] 58327 final dependency list [JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110262023-09-22T23:14:18.245ZINFOcrucible: [0] 58328 final dependency list [JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110272023-09-22T23:14:18.246ZINFOcrucible: [0] 58329 final dependency list [JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110282023-09-22T23:14:18.246ZINFOcrucible: [0] 58330 final dependency list [JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110292023-09-22T23:14:18.247ZINFOcrucible: [0] 58331 final dependency list [JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110302023-09-22T23:14:18.247ZINFOcrucible: [0] 58332 final dependency list [JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110312023-09-22T23:14:18.248ZINFOcrucible: [0] 58333 final dependency list [JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110322023-09-22T23:14:18.249ZINFOcrucible: [0] 58334 final dependency list [JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110332023-09-22T23:14:18.249ZINFOcrucible: [0] 58335 final dependency list [JobId(58324), JobId(58321)] = downstairs upstairs = 1
110342023-09-22T23:14:18.250ZWARNcrucible: Write to Extent 5:0:9 under repair upstairs = 1
110352023-09-22T23:14:18.250ZINFOcrucible: [0] 58336 final dependency list [JobId(58335), JobId(58324), JobId(58321)] = downstairs upstairs = 1
110362023-09-22T23:14:18.255ZINFOcrucible: Repair for extent 5 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110372023-09-22T23:14:18.255ZINFOcrucible: RE:5 Wait for result from repair command 58322:57323 upstairs = 1
110382023-09-22T23:14:18.255ZINFOcrucible: [0] 58322 final dependency list [JobId(58321)] = downstairs upstairs = 1
110392023-09-22T23:14:18.255ZINFOcrucible: RE:5 Wait for result from NoOp command 58323:57324 upstairs = 1
110402023-09-22T23:14:18.255ZINFOcrucible: [0] 58323 final dependency list [JobId(58321), JobId(58322)] = downstairs upstairs = 1
110412023-09-22T23:14:18.258ZINFOcrucible: RE:5 Wait for result from reopen command 58324:57325 upstairs = 1
110422023-09-22T23:14:18.258ZINFOcrucible: Start extent 6 repair task = repair upstairs = 1
110432023-09-22T23:14:18.258ZINFOcrucible: RE:6 repair extent with ids 58345,58346,58347,58348 deps:[JobId(58338), JobId(58337), JobId(58314), JobId(58313), JobId(58290), JobId(58289), JobId(58266), JobId(58265), JobId(58242), JobId(58241), JobId(58218), JobId(58217)] upstairs = 1
110442023-09-22T23:14:18.258ZINFOcrucible: RE:6 close id:58345 queued, notify DS upstairs = 1
110452023-09-22T23:14:18.258ZINFOcrucible: RE:6 Wait for result from close command 58345:57346 upstairs = 1
110462023-09-22T23:14:18.259ZINFOcrucible: [0] 58345 final dependency list [] = downstairs upstairs = 1
110472023-09-22T23:14:18.259ZINFOcrucible: [0] 58348 final dependency list [JobId(58345), JobId(58346), JobId(58347)] = downstairs upstairs = 1
110482023-09-22T23:14:18.259ZINFOcrucible: [0] 58349 final dependency list [JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110492023-09-22T23:14:18.260ZINFOcrucible: [0] 58350 final dependency list [JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110502023-09-22T23:14:18.260ZINFOcrucible: [0] 58351 final dependency list [JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110512023-09-22T23:14:18.261ZINFOcrucible: [0] 58352 final dependency list [JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110522023-09-22T23:14:18.262ZINFOcrucible: [0] 58353 final dependency list [JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110532023-09-22T23:14:18.262ZINFOcrucible: [0] 58354 final dependency list [JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110542023-09-22T23:14:18.263ZINFOcrucible: [0] 58355 final dependency list [JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110552023-09-22T23:14:18.264ZINFOcrucible: [0] 58356 final dependency list [JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110562023-09-22T23:14:18.264ZINFOcrucible: [0] 58357 final dependency list [JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110572023-09-22T23:14:18.265ZINFOcrucible: [0] 58358 final dependency list [JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110582023-09-22T23:14:18.265ZINFOcrucible: [0] 58359 final dependency list [JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
110592023-09-22T23:14:18.266ZINFOcrucible: [0] 58360 final dependency list [JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
110602023-09-22T23:14:18.267ZINFOcrucible: [0] 58361 final dependency list [JobId(58348), JobId(58345)] = downstairs upstairs = 1
110612023-09-22T23:14:18.267ZWARNcrucible: Write to Extent 6:0:9 under repair upstairs = 1
110622023-09-22T23:14:18.267ZINFOcrucible: [0] 58362 final dependency list [JobId(58361), JobId(58348), JobId(58345)] = downstairs upstairs = 1
110632023-09-22T23:14:18.271ZINFOcrucible: Repair for extent 6 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110642023-09-22T23:14:18.271ZINFOcrucible: RE:6 Wait for result from repair command 58346:57347 upstairs = 1
110652023-09-22T23:14:18.271ZINFOcrucible: [0] 58346 final dependency list [JobId(58345)] = downstairs upstairs = 1
110662023-09-22T23:14:18.272ZINFOcrucible: RE:6 Wait for result from NoOp command 58347:57348 upstairs = 1
110672023-09-22T23:14:18.272ZINFOcrucible: [0] 58347 final dependency list [JobId(58345), JobId(58346)] = downstairs upstairs = 1
110682023-09-22T23:14:18.275ZINFOcrucible: RE:6 Wait for result from reopen command 58348:57349 upstairs = 1
110692023-09-22T23:14:18.275ZINFOcrucible: Start extent 7 repair task = repair upstairs = 1
110702023-09-22T23:14:18.275ZINFOcrucible: RE:7 repair extent with ids 58369,58370,58371,58372 deps:[JobId(58364), JobId(58363), JobId(58340), JobId(58339), JobId(58316), JobId(58315), JobId(58292), JobId(58291), JobId(58268), JobId(58267), JobId(58244), JobId(58243), JobId(58220), JobId(58219)] upstairs = 1
110712023-09-22T23:14:18.275ZINFOcrucible: RE:7 close id:58369 queued, notify DS upstairs = 1
110722023-09-22T23:14:18.275ZINFOcrucible: RE:7 Wait for result from close command 58369:57370 upstairs = 1
110732023-09-22T23:14:18.275ZINFOcrucible: [0] 58369 final dependency list [] = downstairs upstairs = 1
110742023-09-22T23:14:18.275ZINFOcrucible: [0] 58372 final dependency list [JobId(58369), JobId(58370), JobId(58371)] = downstairs upstairs = 1
110752023-09-22T23:14:18.276ZINFOcrucible: [0] 58373 final dependency list [JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110762023-09-22T23:14:18.276ZINFOcrucible: [0] 58374 final dependency list [JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
110772023-09-22T23:14:18.277ZINFOcrucible: [0] 58375 final dependency list [JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110782023-09-22T23:14:18.278ZINFOcrucible: [0] 58376 final dependency list [JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
110792023-09-22T23:14:18.278ZINFOcrucible: [0] 58377 final dependency list [JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110802023-09-22T23:14:18.279ZINFOcrucible: [0] 58378 final dependency list [JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
110812023-09-22T23:14:18.280ZINFOcrucible: [0] 58379 final dependency list [JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110822023-09-22T23:14:18.280ZINFOcrucible: [0] 58380 final dependency list [JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
110832023-09-22T23:14:18.281ZINFOcrucible: [0] 58381 final dependency list [JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110842023-09-22T23:14:18.282ZINFOcrucible: [0] 58382 final dependency list [JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
110852023-09-22T23:14:18.282ZINFOcrucible: [0] 58383 final dependency list [JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
110862023-09-22T23:14:18.283ZINFOcrucible: [0] 58384 final dependency list [JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
110872023-09-22T23:14:18.283ZINFOcrucible: [0] 58385 final dependency list [JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
110882023-09-22T23:14:18.284ZINFOcrucible: [0] 58386 final dependency list [JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
110892023-09-22T23:14:18.285ZINFOcrucible: [0] 58387 final dependency list [JobId(58372), JobId(58369)] = downstairs upstairs = 1
110902023-09-22T23:14:18.285ZWARNcrucible: Write to Extent 7:0:9 under repair upstairs = 1
110912023-09-22T23:14:18.285ZINFOcrucible: [0] 58388 final dependency list [JobId(58387), JobId(58372), JobId(58369)] = downstairs upstairs = 1
110922023-09-22T23:14:18.288ZINFOcrucible: Repair for extent 7 s:2 d:[ClientId(0)] = downstairs upstairs = 1
110932023-09-22T23:14:18.288ZINFOcrucible: RE:7 Wait for result from repair command 58370:57371 upstairs = 1
110942023-09-22T23:14:18.288ZINFOcrucible: [0] 58370 final dependency list [JobId(58369)] = downstairs upstairs = 1
110952023-09-22T23:14:18.289ZINFOcrucible: RE:7 Wait for result from NoOp command 58371:57372 upstairs = 1
110962023-09-22T23:14:18.289ZINFOcrucible: [0] 58371 final dependency list [JobId(58369), JobId(58370)] = downstairs upstairs = 1
110972023-09-22T23:14:18.292ZINFOcrucible: RE:7 Wait for result from reopen command 58372:57373 upstairs = 1
110982023-09-22T23:14:18.292ZINFOcrucible: Start extent 8 repair task = repair upstairs = 1
110992023-09-22T23:14:18.292ZINFOcrucible: RE:8 repair extent with ids 58393,58394,58395,58396 deps:[JobId(58390), JobId(58389), JobId(58366), JobId(58365), JobId(58342), JobId(58341), JobId(58318), JobId(58317), JobId(58294), JobId(58293), JobId(58270), JobId(58269), JobId(58246), JobId(58245), JobId(58222), JobId(58221)] upstairs = 1
111002023-09-22T23:14:18.292ZINFOcrucible: RE:8 close id:58393 queued, notify DS upstairs = 1
111012023-09-22T23:14:18.292ZINFOcrucible: RE:8 Wait for result from close command 58393:57394 upstairs = 1
111022023-09-22T23:14:18.292ZINFOcrucible: [0] 58393 final dependency list [] = downstairs upstairs = 1
111032023-09-22T23:14:18.292ZINFOcrucible: [0] 58396 final dependency list [JobId(58393), JobId(58394), JobId(58395)] = downstairs upstairs = 1
111042023-09-22T23:14:18.293ZINFOcrucible: [0] 58397 final dependency list [JobId(58374), JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111052023-09-22T23:14:18.294ZINFOcrucible: [0] 58398 final dependency list [JobId(58397), JobId(58374), JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111062023-09-22T23:14:18.294ZINFOcrucible: [0] 58399 final dependency list [JobId(58376), JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111072023-09-22T23:14:18.295ZINFOcrucible: [0] 58400 final dependency list [JobId(58399), JobId(58376), JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111082023-09-22T23:14:18.296ZINFOcrucible: [0] 58401 final dependency list [JobId(58378), JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111092023-09-22T23:14:18.296ZINFOcrucible: [0] 58402 final dependency list [JobId(58401), JobId(58378), JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111102023-09-22T23:14:18.297ZINFOcrucible: [0] 58403 final dependency list [JobId(58380), JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111112023-09-22T23:14:18.297ZINFOcrucible: [0] 58404 final dependency list [JobId(58403), JobId(58380), JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111122023-09-22T23:14:18.298ZINFOcrucible: [0] 58405 final dependency list [JobId(58382), JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111132023-09-22T23:14:18.299ZINFOcrucible: [0] 58406 final dependency list [JobId(58405), JobId(58382), JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111142023-09-22T23:14:18.299ZINFOcrucible: [0] 58407 final dependency list [JobId(58384), JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111152023-09-22T23:14:18.300ZINFOcrucible: [0] 58408 final dependency list [JobId(58407), JobId(58384), JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111162023-09-22T23:14:18.301ZINFOcrucible: [0] 58409 final dependency list [JobId(58386), JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
111172023-09-22T23:14:18.301ZINFOcrucible: [0] 58410 final dependency list [JobId(58409), JobId(58386), JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
111182023-09-22T23:14:18.302ZINFOcrucible: [0] 58411 final dependency list [JobId(58388), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
111192023-09-22T23:14:18.303ZINFOcrucible: [0] 58412 final dependency list [JobId(58411), JobId(58388), JobId(58387), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
111202023-09-22T23:14:18.303ZINFOcrucible: [0] 58413 final dependency list [JobId(58396), JobId(58393)] = downstairs upstairs = 1
111212023-09-22T23:14:18.304ZWARNcrucible: Write to Extent 8:0:9 under repair upstairs = 1
111222023-09-22T23:14:18.304ZINFOcrucible: [0] 58414 final dependency list [JobId(58413), JobId(58396), JobId(58393)] = downstairs upstairs = 1
111232023-09-22T23:14:18.306ZINFOcrucible: Repair for extent 8 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111242023-09-22T23:14:18.306ZINFOcrucible: RE:8 Wait for result from repair command 58394:57395 upstairs = 1
111252023-09-22T23:14:18.306ZINFOcrucible: [0] 58394 final dependency list [JobId(58393)] = downstairs upstairs = 1
111262023-09-22T23:14:18.306ZINFOcrucible: RE:8 Wait for result from NoOp command 58395:57396 upstairs = 1
111272023-09-22T23:14:18.306ZINFOcrucible: [0] 58395 final dependency list [JobId(58393), JobId(58394)] = downstairs upstairs = 1
111282023-09-22T23:14:18.310ZINFOcrucible: RE:8 Wait for result from reopen command 58396:57397 upstairs = 1
111292023-09-22T23:14:18.310ZINFOcrucible: Start extent 9 repair task = repair upstairs = 1
111302023-09-22T23:14:18.310ZINFOcrucible: RE:9 repair extent with ids 58417,58418,58419,58420 deps:[JobId(58416), JobId(58415), JobId(58392), JobId(58391), JobId(58368), JobId(58367), JobId(58344), JobId(58343), JobId(58320), JobId(58319), JobId(58296), JobId(58295), JobId(58272), JobId(58271), JobId(58248), JobId(58247), JobId(58224), JobId(58223)] upstairs = 1
111312023-09-22T23:14:18.310ZINFOcrucible: RE:9 close id:58417 queued, notify DS upstairs = 1
111322023-09-22T23:14:18.310ZINFOcrucible: RE:9 Wait for result from close command 58417:57418 upstairs = 1
111332023-09-22T23:14:18.310ZINFOcrucible: [0] 58417 final dependency list [] = downstairs upstairs = 1
111342023-09-22T23:14:18.310ZINFOcrucible: [0] 58420 final dependency list [JobId(58417), JobId(58418), JobId(58419)] = downstairs upstairs = 1
111352023-09-22T23:14:18.311ZINFOcrucible: [0] 58421 final dependency list [JobId(58398), JobId(58374), JobId(58350), JobId(58326), JobId(58302), JobId(58278), JobId(58254), JobId(58230), JobId(58206), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111362023-09-22T23:14:18.311ZINFOcrucible: [0] 58422 final dependency list [JobId(58421), JobId(58398), JobId(58397), JobId(58374), JobId(58373), JobId(58350), JobId(58349), JobId(58326), JobId(58325), JobId(58302), JobId(58301), JobId(58278), JobId(58277), JobId(58254), JobId(58253), JobId(58230), JobId(58229), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111372023-09-22T23:14:18.312ZINFOcrucible: [0] 58423 final dependency list [JobId(58400), JobId(58376), JobId(58352), JobId(58328), JobId(58304), JobId(58280), JobId(58256), JobId(58232), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111382023-09-22T23:14:18.313ZINFOcrucible: [0] 58424 final dependency list [JobId(58423), JobId(58400), JobId(58399), JobId(58376), JobId(58375), JobId(58352), JobId(58351), JobId(58328), JobId(58327), JobId(58304), JobId(58303), JobId(58280), JobId(58279), JobId(58256), JobId(58255), JobId(58232), JobId(58231), JobId(58228), JobId(58227), JobId(58226), JobId(58225)] = downstairs upstairs = 1
111392023-09-22T23:14:18.313ZINFOcrucible: [0] 58425 final dependency list [JobId(58402), JobId(58378), JobId(58354), JobId(58330), JobId(58306), JobId(58282), JobId(58258), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111402023-09-22T23:14:18.314ZINFOcrucible: [0] 58426 final dependency list [JobId(58425), JobId(58402), JobId(58401), JobId(58378), JobId(58377), JobId(58354), JobId(58353), JobId(58330), JobId(58329), JobId(58306), JobId(58305), JobId(58282), JobId(58281), JobId(58258), JobId(58257), JobId(58252), JobId(58251), JobId(58250), JobId(58249)] = downstairs upstairs = 1
111412023-09-22T23:14:18.315ZINFOcrucible: [0] 58427 final dependency list [JobId(58404), JobId(58380), JobId(58356), JobId(58332), JobId(58308), JobId(58284), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111422023-09-22T23:14:18.315ZINFOcrucible: [0] 58428 final dependency list [JobId(58427), JobId(58404), JobId(58403), JobId(58380), JobId(58379), JobId(58356), JobId(58355), JobId(58332), JobId(58331), JobId(58308), JobId(58307), JobId(58284), JobId(58283), JobId(58276), JobId(58275), JobId(58274), JobId(58273)] = downstairs upstairs = 1
111432023-09-22T23:14:18.316ZINFOcrucible: [0] 58429 final dependency list [JobId(58406), JobId(58382), JobId(58358), JobId(58334), JobId(58310), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111442023-09-22T23:14:18.317ZINFOcrucible: [0] 58430 final dependency list [JobId(58429), JobId(58406), JobId(58405), JobId(58382), JobId(58381), JobId(58358), JobId(58357), JobId(58334), JobId(58333), JobId(58310), JobId(58309), JobId(58300), JobId(58299), JobId(58298), JobId(58297)] = downstairs upstairs = 1
111452023-09-22T23:14:18.317ZINFOcrucible: [0] 58431 final dependency list [JobId(58408), JobId(58384), JobId(58360), JobId(58336), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111462023-09-22T23:14:18.318ZINFOcrucible: [0] 58432 final dependency list [JobId(58431), JobId(58408), JobId(58407), JobId(58384), JobId(58383), JobId(58360), JobId(58359), JobId(58336), JobId(58335), JobId(58324), JobId(58323), JobId(58322), JobId(58321)] = downstairs upstairs = 1
111472023-09-22T23:14:18.318ZINFOcrucible: [0] 58433 final dependency list [JobId(58410), JobId(58386), JobId(58362), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
111482023-09-22T23:14:18.319ZINFOcrucible: [0] 58434 final dependency list [JobId(58433), JobId(58410), JobId(58409), JobId(58386), JobId(58385), JobId(58362), JobId(58361), JobId(58348), JobId(58347), JobId(58346), JobId(58345)] = downstairs upstairs = 1
111492023-09-22T23:14:18.320ZINFOcrucible: [0] 58435 final dependency list [JobId(58412), JobId(58388), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
111502023-09-22T23:14:18.320ZINFOcrucible: [0] 58436 final dependency list [JobId(58435), JobId(58412), JobId(58411), JobId(58388), JobId(58387), JobId(58372), JobId(58371), JobId(58370), JobId(58369)] = downstairs upstairs = 1
111512023-09-22T23:14:18.321ZINFOcrucible: [0] 58437 final dependency list [JobId(58414), JobId(58396), JobId(58395), JobId(58394), JobId(58393)] = downstairs upstairs = 1
111522023-09-22T23:14:18.322ZINFOcrucible: [0] 58438 final dependency list [JobId(58437), JobId(58414), JobId(58413), JobId(58396), JobId(58395), JobId(58394), JobId(58393)] = downstairs upstairs = 1
111532023-09-22T23:14:18.322ZINFOcrucible: [0] 58439 final dependency list [JobId(58420), JobId(58417)] = downstairs upstairs = 1
111542023-09-22T23:14:18.323ZWARNcrucible: Write to Extent 9:0:9 under repair upstairs = 1
111552023-09-22T23:14:18.323ZINFOcrucible: [0] 58440 final dependency list [JobId(58439), JobId(58420), JobId(58417)] = downstairs upstairs = 1
111562023-09-22T23:14:18.324ZINFOcrucible: Repair for extent 9 s:2 d:[ClientId(0)] = downstairs upstairs = 1
111572023-09-22T23:14:18.324ZINFOcrucible: RE:9 Wait for result from repair command 58418:57419 upstairs = 1
111582023-09-22T23:14:18.324ZINFOcrucible: [0] 58418 final dependency list [JobId(58417)] = downstairs upstairs = 1
111592023-09-22T23:14:18.324ZINFOcrucible: RE:9 Wait for result from NoOp command 58419:57420 upstairs = 1
111602023-09-22T23:14:18.324ZINFOcrucible: [0] 58419 final dependency list [JobId(58417), JobId(58418)] = downstairs upstairs = 1
111612023-09-22T23:14:18.328ZINFOcrucible: RE:9 Wait for result from reopen command 58420:57421 upstairs = 1
111622023-09-22T23:14:18.328ZINFOcrucible: LiveRepair final flush submitted upstairs = 1
111632023-09-22T23:14:18.328ZINFOcrucible: [0] 58441 final dependency list [JobId(58440), JobId(58439), JobId(58438), JobId(58437), JobId(58436), JobId(58435), JobId(58434), JobId(58433), JobId(58432), JobId(58431), JobId(58430), JobId(58429), JobId(58428), JobId(58427), JobId(58426), JobId(58425), JobId(58424), JobId(58423), JobId(58422), JobId(58421), JobId(58420), JobId(58419), JobId(58418), JobId(58417), JobId(58414), JobId(58413), JobId(58412), JobId(58411), JobId(58410), JobId(58409), JobId(58408), JobId(58407), JobId(58406), JobId(58405), JobId(58404), JobId(58403), JobId(58402), JobId(58401), JobId(58400), JobId(58399), JobId(58398), JobId(58397), JobId(58396), JobId(58395), JobId(58394), JobId(58393), JobId(58388), JobId(58387), JobId(58386), JobId(58385), JobId(58384), JobId(58383), JobId(58382), JobId(58381), JobId(58380), JobId(58379), JobId(58378), JobId(58377), JobId(58376), JobId(58375), JobId(58374), JobId(58373), JobId(58372), JobId(58371), JobId(58370), JobId(58369), JobId(58362), JobId(58361), JobId(58360), JobId(58359), JobId(58358), JobId(58357), JobId(58356), JobId(58355), JobId(58354), JobId(58353), JobId(58352), JobId(58351), JobId(58350), JobId(58349), JobId(58348), JobId(58347), JobId(58346), JobId(58345), JobId(58336), JobId(58335), JobId(58334), JobId(58333), JobId(58332), JobId(58331), JobId(58330), JobId(58329), JobId(58328), JobId(58327), JobId(58326), JobId(58325), JobId(58324), JobId(58323), JobId(58322), JobId(58321), JobId(58310), JobId(58309), JobId(58308), JobId(58307), JobId(58306), JobId(58305), JobId(58304), JobId(58303), JobId(58302), JobId(58301), JobId(58300), JobId(58299), JobId(58298), JobId(58297), JobId(58284), JobId(58283), JobId(58282), JobId(58281), JobId(58280), JobId(58279), JobId(58278), JobId(58277), JobId(58276), JobId(58275), JobId(58274), JobId(58273), JobId(58258), JobId(58257), JobId(58256), JobId(58255), JobId(58254), JobId(58253), JobId(58252), JobId(58251), JobId(58250), JobId(58249), JobId(58232), JobId(58231), JobId(58230), JobId(58229), JobId(58228), JobId(58227), JobId(58226), JobId(58225), JobId(58206), JobId(58205), JobId(58204), JobId(58203), JobId(58202), JobId(58201)] = downstairs upstairs = 1
111642023-09-22T23:14:18.330ZINFOcrucible: LiveRepair final flush completed upstairs = 1
111652023-09-22T23:14:18.330ZINFOcrucible: [0] 024c3783-b9b4-4453-823f-769dad90d4f2 (db076280-2529-45a1-a093-a3cd3f0799dc) LiveRepair Active Active ds_transition to Active upstairs = 1
111662023-09-22T23:14:18.330ZINFOcrucible: [0] Transition from LiveRepair to Active upstairs = 1
111672023-09-22T23:14:18.330ZWARNcrucible: Live Repair returns Ok(()) upstairs = 1
11168 test dummy_downstairs_tests::protocol_test::test_successful_live_repair ... ok
111692023-09-22T23:14:22.825ZINFOcrucible: sent read response for job 0 = 1000
111702023-09-22T23:14:22.825ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 looper connected looper = 0 upstairs = 1
111712023-09-22T23:14:22.825ZINFOcrucible: [0] Proc runs for 127.0.0.1:39941 in state Faulted upstairs = 1
111722023-09-22T23:14:22.825ZERROcrucible: could not send read response for job 1 = 1001: Broken pipe (os error 32)
111732023-09-22T23:14:22.825ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
111742023-09-22T23:14:22.825ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session b793572f-19dc-423b-877e-0f0cfe9147d9 upstairs = 1
111752023-09-22T23:14:22.825ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1 } downstairs = 1
111762023-09-22T23:14:22.825ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
111772023-09-22T23:14:22.825ZINFOcrucible: [0] downstairs client at 127.0.0.1:39941 has UUID c7df82f0-e00e-410b-ac45-3ec316aefac5 upstairs = 1
111782023-09-22T23:14:22.825ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: c7df82f0-e00e-410b-ac45-3ec316aefac5, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
111792023-09-22T23:14:22.825ZINFOcrucible: Returning client:0 UUID:c7df82f0-e00e-410b-ac45-3ec316aefac5 matches upstairs = 1
111802023-09-22T23:14:22.825ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Faulted Active Active upstairs = 1
111812023-09-22T23:14:22.825ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
111822023-09-22T23:14:22.826ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
111832023-09-22T23:14:22.826ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
111842023-09-22T23:14:22.826ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
111852023-09-22T23:14:22.826ZWARNcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Enter Ready for LiveRepair mode upstairs = 1
111862023-09-22T23:14:22.826ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
111872023-09-22T23:14:22.826ZINFOcrucible: [0] 127.0.0.1:39941 task reports connection:true upstairs = 1
111882023-09-22T23:14:22.826ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 LiveRepairReady Active Active upstairs = 1
111892023-09-22T23:14:22.826ZINFOcrucible: Set check for repair upstairs = 1
111902023-09-22T23:14:23.587ZINFOcrucible: responded to ping downstairs = 2
111912023-09-22T23:14:23.587ZINFOcrucible: responded to ping downstairs = 3
111922023-09-22T23:14:23.828ZINFOcrucible: Checking if live repair is needed upstairs = 1
111932023-09-22T23:14:23.828ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
111942023-09-22T23:14:23.828ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
111952023-09-22T23:14:23.828ZINFOcrucible: Live Repair started upstairs = 1
111962023-09-22T23:14:23.828ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
111972023-09-22T23:14:23.828ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
111982023-09-22T23:14:23.828ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
111992023-09-22T23:14:23.828ZINFOcrucible: RE:0 repair extent with ids 58201,58202,58203,58204 deps:[] upstairs = 1
112002023-09-22T23:14:23.828ZINFOcrucible: RE:0 close id:58201 queued, notify DS upstairs = 1
112012023-09-22T23:14:23.828ZINFOcrucible: RE:0 Wait for result from close command 58201:57202 upstairs = 1
112022023-09-22T23:14:23.828ZINFOcrucible: [0] 58201 final dependency list [] = downstairs upstairs = 1
112032023-09-22T23:14:23.828ZINFOcrucible: [0] 58204 final dependency list [JobId(58201), JobId(58202), JobId(58203)] = downstairs upstairs = 1
112042023-09-22T23:14:23.829ZINFOcrucible: Repair for extent 0 s:2 d:[ClientId(0)] = downstairs upstairs = 1
112052023-09-22T23:14:23.829ZINFOcrucible: RE:0 Wait for result from repair command 58202:57203 upstairs = 1
112062023-09-22T23:14:23.829ZINFOcrucible: [0] 58202 final dependency list [JobId(58201)] = downstairs upstairs = 1
112072023-09-22T23:14:23.829ZERROcrucible: dropping ds1 now!
112082023-09-22T23:14:23.829ZERROcrucible: reconnecting ds1 now!
112092023-09-22T23:14:23.829ZERROcrucible: [0] job id 58202 saw error GenericError("bad news, networks are tricky") upstairs = 1
112102023-09-22T23:14:23.829ZERROcrucible: [0] DS Reports error Err(GenericError("bad news, networks are tricky")) on job 58202, DownstairsIO { ds_id: JobId(58202), guest_id: 57203, work: ExtentLiveRepair { dependencies: [JobId(58201)], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:54125, repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 9 }) } = downstairs upstairs = 1
112112023-09-22T23:14:23.829ZERROcrucible: [0] Reports error GenericError("bad news, networks are tricky") on job 58202, DownstairsIO { ds_id: JobId(58202), guest_id: 57203, work: ExtentLiveRepair { dependencies: [JobId(58201)], extent: 0, source_downstairs: ClientId(2), source_repair_address: 127.0.0.1:54125, repair_downstairs: [ClientId(0)] }, state: ClientData([InProgress, InProgress, InProgress]), ack_status: NotAcked, replay: false, data: None, read_response_hashes: [], impacted_blocks: InclusiveRange(ImpactedAddr { extent_id: 0, block: 0 }, ImpactedAddr { extent_id: 0, block: 9 }) } = downstairs upstairs = 1
112122023-09-22T23:14:23.829ZINFOcrucible: [0] client skip 3 in process jobs because fault = downstairs upstairs = 1
112132023-09-22T23:14:23.829ZINFOcrucible: [0] changed 1 jobs to fault skipped = downstairs upstairs = 1
112142023-09-22T23:14:23.829ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) LiveRepair Active Active ds_transition to Faulted upstairs = 1
112152023-09-22T23:14:23.829ZINFOcrucible: [0] Transition from LiveRepair to Faulted upstairs = 1
112162023-09-22T23:14:23.829ZWARNcrucible: [0] will exit pm_task, this downstairs Faulted upstairs = 1
112172023-09-22T23:14:23.829ZERROcrucible: 127.0.0.1:39941: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Faulted)), so we end too looper = 0 upstairs = 1
112182023-09-22T23:14:23.830ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Gone missing, transition from Faulted to Faulted upstairs = 1
112192023-09-22T23:14:23.830ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 connection to 127.0.0.1:39941 closed looper = 0 upstairs = 1
112202023-09-22T23:14:23.830ZINFOcrucible: [0] 127.0.0.1:39941 task reports connection:false upstairs = 1
112212023-09-22T23:14:23.830ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Faulted Active Active upstairs = 1
112222023-09-22T23:14:23.830ZINFOcrucible: [0] 127.0.0.1:39941 task reports offline upstairs = 1
112232023-09-22T23:14:23.830ZERROcrucible: Extent 0 close id:58202 Failed: IO Error: 1 out of 3 downstairs failed to complete this IO upstairs = 1
112242023-09-22T23:14:23.830ZINFOcrucible: RE:0 Wait for result from NoOp command 58203:57204 upstairs = 1
112252023-09-22T23:14:23.830ZERROcrucible: spawn_message_receiver saw disconnect, bailing downstairs = 1
112262023-09-22T23:14:24.831ZERROcrucible: ds1 negotiate start now!
112272023-09-22T23:14:24.831ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 looper connected looper = 0 upstairs = 1
112282023-09-22T23:14:24.831ZINFOcrucible: [0] Proc runs for 127.0.0.1:39941 in state Faulted upstairs = 1
112292023-09-22T23:14:24.831ZINFOcrucible: negotiate packet HereIAm { version: 4, upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1, read_only: false, encrypted: false, alternate_versions: [] } downstairs = 1
112302023-09-22T23:14:24.831ZINFOcrucible: [0] upstairs guest_io_ready=TRUE, promote! session b793572f-19dc-423b-877e-0f0cfe9147d9 upstairs = 1
112312023-09-22T23:14:24.831ZINFOcrucible: negotiate packet PromoteToActive { upstairs_id: df6648fa-e480-4dd9-b7c4-7fec45dbddf8, session_id: b793572f-19dc-423b-877e-0f0cfe9147d9, gen: 1 } downstairs = 1
112322023-09-22T23:14:24.831ZINFOcrucible: negotiate packet RegionInfoPlease downstairs = 1
112332023-09-22T23:14:24.831ZERROcrucible: ds1 negotiate extent versions please now!
112342023-09-22T23:14:24.831ZINFOcrucible: [0] downstairs client at 127.0.0.1:39941 has UUID c7df82f0-e00e-410b-ac45-3ec316aefac5 upstairs = 1
112352023-09-22T23:14:24.831ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 10, shift: 9 }, extent_count: 10, uuid: c7df82f0-e00e-410b-ac45-3ec316aefac5, encrypted: false, database_read_version: 1, database_write_version: 1 } upstairs = 1
112362023-09-22T23:14:24.831ZINFOcrucible: Returning client:0 UUID:c7df82f0-e00e-410b-ac45-3ec316aefac5 matches upstairs = 1
112372023-09-22T23:14:24.831ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Faulted Active Active upstairs = 1
112382023-09-22T23:14:24.831ZINFOcrucible: negotiate packet ExtentVersionsPlease downstairs = 1
112392023-09-22T23:14:24.831ZERROcrucible: ds1 spawn message receiver now!
112402023-09-22T23:14:24.832ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) Faulted Active Active ds_transition to LiveRepairReady upstairs = 1
112412023-09-22T23:14:24.832ZINFOcrucible: [0] Transition from Faulted to LiveRepairReady upstairs = 1
112422023-09-22T23:14:24.832ZWARNcrucible: [0] new RM replaced this: Some(RegionMetadata { generation: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], flush_numbers: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dirty: [false, false, false, false, false, false, false, false, false, false] }) upstairs = 1
112432023-09-22T23:14:24.832ZWARNcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 Enter Ready for LiveRepair mode upstairs = 1
112442023-09-22T23:14:24.832ZINFOcrucible: [0] Starts cmd_loop upstairs = 1
112452023-09-22T23:14:24.832ZINFOcrucible: [0] 127.0.0.1:39941 task reports connection:true upstairs = 1
112462023-09-22T23:14:24.832ZINFOcrucible: df6648fa-e480-4dd9-b7c4-7fec45dbddf8 LiveRepairReady Active Active upstairs = 1
112472023-09-22T23:14:24.832ZINFOcrucible: Set check for repair upstairs = 1
112482023-09-22T23:14:24.832ZINFOcrucible: RE:0 Wait for result from reopen command 58204:57205 upstairs = 1
112492023-09-22T23:14:24.832ZWARNcrucible: RE:0 Bailing with error upstairs = 1
112502023-09-22T23:14:24.832ZWARNcrucible: Error After extent 0 repair task = repair upstairs = 1
112512023-09-22T23:14:24.832ZINFOcrucible: Start extent 1 repair task = repair upstairs = 1
112522023-09-22T23:14:24.832ZINFOcrucible: extent 1 repair has failed task = repair upstairs = 1
112532023-09-22T23:14:24.832ZWARNcrucible: Exit repair at extent 1 task = repair upstairs = 1
112542023-09-22T23:14:24.832ZINFOcrucible: LiveRepair final flush submitted upstairs = 1
112552023-09-22T23:14:24.834ZINFOcrucible: LiveRepair final flush completed upstairs = 1
112562023-09-22T23:14:24.834ZWARNcrucible: Live Repair returns Ok(()) upstairs = 1
112572023-09-22T23:14:25.833ZINFOcrucible: Checking if live repair is needed upstairs = 1
112582023-09-22T23:14:25.834ZINFOcrucible: [0] df6648fa-e480-4dd9-b7c4-7fec45dbddf8 (b793572f-19dc-423b-877e-0f0cfe9147d9) LiveRepairReady Active Active ds_transition to LiveRepair upstairs = 1
112592023-09-22T23:14:25.834ZINFOcrucible: [0] Transition from LiveRepairReady to LiveRepair upstairs = 1
112602023-09-22T23:14:25.834ZINFOcrucible: Live Repair started upstairs = 1
112612023-09-22T23:14:25.834ZWARNcrucible: Live Repair main task begins. task = repair upstairs = 1
112622023-09-22T23:14:25.834ZINFOcrucible: Start Live Repair of extents 0 to 10 task = repair upstairs = 1
112632023-09-22T23:14:25.834ZINFOcrucible: Start extent 0 repair task = repair upstairs = 1
112642023-09-22T23:14:25.834ZINFOcrucible: RE:0 repair extent with ids 58206,58207,58208,58209 deps:[] upstairs = 1
112652023-09-22T23:14:25.834ZINFOcrucible: RE:0 close id:58206 queued, notify DS upstairs = 1
112662023-09-22T23:14:25.834ZINFOcrucible: RE:0 Wait for result from close command 58206:57207 upstairs = 1
112672023-09-22T23:14:25.834ZINFOcrucible: [0] 58206 final dependency list [] = downstairs upstairs = 1
112682023-09-22T23:14:25.834ZINFOcrucible: [0] 58209 final dependency list [JobId(58206), JobId(58207), JobId(58208)] = downstairs upstairs = 1
11269 test dummy_downstairs_tests::protocol_test::test_error_during_live_repair_no_halt ... ok
11270 
11271 test result: ok. 351 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 383.45s
11272 
11273 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_agent-acbf118b39db417b --nocapture`
11274 
11275 running 11 tests
11276 {"id":"abc","state":"requested","block_size":4096,"extent_size":4096,"extent_count":100,"encrypted":false,"port_number":1701,"cert_pem":null,"key_pem":null,"root_pem":null}
11277 Sep 22 23:14:25.892 INFO region 4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4 state: Requested, component: test_harness_datafile
11278 Sep 22 23:14:25.892 INFO region 6ed761bc-0273-4c2f-b2aa-b5a77c461371 state: Requested, component: test_harness_datafile
11279 Sep 22 23:14:25.892 INFO region cd2faf68-2161-44ae-80cd-734d2ef30327 state: Requested, component: test_harness_datafile
11280 Sep 22 23:14:25.892 INFO region 7c0b8c8e-1d24-4cd7-97b0-47ad837e7f01 state: Requested, component: test_harness_datafile
11281 Sep 22 23:14:25.892 INFO region bd788714-d366-4573-ac0f-64f52c45fa42 state: Requested, component: test_harness_datafile
11282 Sep 22 23:14:25.894 INFO region cd2faf68-2161-44ae-80cd-734d2ef30327 state: Requested -> Tombstoned, component: test_harness_datafile
11283 Sep 22 23:14:25.894 INFO region bd788714-d366-4573-ac0f-64f52c45fa42 state: Requested -> Created, component: test_harness_datafile
11284 Sep 22 23:14:25.894 INFO region 4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4 state: Requested -> Created, component: test_harness_datafile
11285 test model::test::basic ... ok
11286 test test::test_collect_behaviour ... ok
11287 Sep 22 23:14:25.895 INFO region 7c0b8c8e-1d24-4cd7-97b0-47ad837e7f01 state: Requested -> Tombstoned, component: test_harness_datafile
11288 Sep 22 23:14:25.895 INFO region 6ed761bc-0273-4c2f-b2aa-b5a77c461371 state: Requested -> Created, component: test_harness_datafile
11289 Sep 22 23:14:25.895 INFO region c1564dbf-e201-4b70-845d-fcb34017f4f7 state: Requested, component: test_harness_datafile
11290 Sep 22 23:14:25.895 INFO region c1564dbf-e201-4b70-845d-fcb34017f4f7 state: Requested -> Failed, component: test_harness_datafile
11291 Sep 22 23:14:25.895 INFO region b8985630-b020-47f6-828d-9cc22ed12f56 state: Requested, component: test_harness_datafile
11292 Sep 22 23:14:25.895 INFO creating missing downstairs instance downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11293 Sep 22 23:14:25.895 INFO creating missing downstairs instance downstairs-6ed761bc-0273-4c2f-b2aa-b5a77c461371
11294 Sep 22 23:14:25.895 INFO creating missing downstairs instance downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11295 Sep 22 23:14:25.895 INFO region b8985630-b020-47f6-828d-9cc22ed12f56 state: Requested -> Created, component: test_harness_datafile
11296 test datafile::test::test_stat_parsing ... ok
11297 Sep 22 23:14:25.896 INFO ok, have oxide/crucible/downstairs:downstairs-6ed761bc-0273-4c2f-b2aa-b5a77c461371
11298 Sep 22 23:14:25.896 INFO creating missing downstairs instance downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11299 Sep 22 23:14:25.896 INFO ok, have oxide/crucible/downstairs:downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11300 Sep 22 23:14:25.896 INFO reconfig required, no property group
11301 Sep 22 23:14:25.896 INFO ok, have oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11302 Sep 22 23:14:25.896 INFO reconfig required, no property group
11303 Sep 22 23:14:25.896 INFO ok, have oxide/crucible/downstairs:downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11304 Sep 22 23:14:25.896 INFO reconfig required, no property group
11305 Sep 22 23:14:25.896 INFO reconfig required, no property group
11306 Sep 22 23:14:25.896 INFO creating config property group
11307 Sep 22 23:14:25.896 INFO creating config property group
11308 Sep 22 23:14:25.896 INFO creating config property group
11309 Sep 22 23:14:25.896 INFO reconfiguring oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11310 Sep 22 23:14:25.896 INFO reconfiguring oxide/crucible/downstairs:downstairs-6ed761bc-0273-4c2f-b2aa-b5a77c461371
11311 Sep 22 23:14:25.896 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp95MCpd/regions/bd788714-d366-4573-ac0f-64f52c45fa42
11312 Sep 22 23:14:25.896 INFO reconfiguring oxide/crucible/downstairs:downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11313 Sep 22 23:14:25.896 INFO ensure port SCF_TYPE_COUNT 1000
11314 Sep 22 23:14:25.896 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpITQLSK/regions/b8985630-b020-47f6-828d-9cc22ed12f56
11315 Sep 22 23:14:25.896 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpm5Gb9w/regions/6ed761bc-0273-4c2f-b2aa-b5a77c461371
11316 Sep 22 23:14:25.896 INFO creating config property group
11317 Sep 22 23:14:25.896 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11318 Sep 22 23:14:25.896 INFO commit
11319 Sep 22 23:14:25.896 INFO ensure port SCF_TYPE_COUNT 1000
11320 Sep 22 23:14:25.896 INFO ok!
11321 Sep 22 23:14:25.896 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11322 Sep 22 23:14:25.896 INFO commit
11323 Sep 22 23:14:25.896 INFO ensure port SCF_TYPE_COUNT 1000
11324 Sep 22 23:14:25.896 INFO ok!
11325 Sep 22 23:14:25.896 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11326 Sep 22 23:14:25.896 INFO reconfiguring oxide/crucible/downstairs:downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11327 Sep 22 23:14:25.896 INFO commit
11328 Sep 22 23:14:25.896 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpN2BXAr/regions/4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11329 Sep 22 23:14:25.896 INFO ok!
11330 Sep 22 23:14:25.896 INFO ensure port SCF_TYPE_COUNT 1000
11331 test test::test_smf_region_create_then_destroy ... ok
11332 Sep 22 23:14:25.896 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11333 Sep 22 23:14:25.896 INFO commit
11334 Sep 22 23:14:25.896 INFO region 6ed761bc-0273-4c2f-b2aa-b5a77c461371 state: Created -> Tombstoned, component: test_harness_datafile
11335 Sep 22 23:14:25.896 INFO ok!
11336 Sep 22 23:14:25.896 INFO creating region b8985630-b020-47f6-828d-9cc22ed12f56 snapshot d7527af5-48af-4f15-92ae-500ac492e369, component: test_snapshot_interface
11337 Sep 22 23:14:25.896 INFO creating region bd788714-d366-4573-ac0f-64f52c45fa42 snapshot 3a99346d-25cc-4829-a850-fc77546058b5, component: test_snapshot_interface
11338 test test::test_smf_region_failed ... ok
11339 Sep 22 23:14:25.896 INFO creating missing downstairs instance downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11340 Sep 22 23:14:25.896 INFO ok, have oxide/crucible/downstairs:downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11341 Sep 22 23:14:25.896 INFO reconfig required, no property group
11342 Sep 22 23:14:25.896 INFO creating config property group
11343 Sep 22 23:14:25.896 INFO disabling downstairs instance: downstairs-6ed761bc-0273-4c2f-b2aa-b5a77c461371 (instance states: (Some(Online), None))
11344 Sep 22 23:14:25.896 INFO reconfiguring oxide/crucible/downstairs:downstairs-4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11345 Sep 22 23:14:25.896 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpN2BXAr/regions/4d83cb9a-5f4f-448d-919c-4d7ce8fc18b4
11346 test test::test_smf_datafile_race_region ... ok
11347 Sep 22 23:14:25.896 INFO creating region b8985630-b020-47f6-828d-9cc22ed12f56 snapshot d7527af5-48af-4f15-92ae-500ac492e369 dir "/tmp/.tmpITQLSK/regions/b8985630-b020-47f6-828d-9cc22ed12f56/.zfs/snapshot/d7527af5-48af-4f15-92ae-500ac492e369", component: test_snapshot_interface
11348 Sep 22 23:14:25.896 INFO ensure port SCF_TYPE_COUNT 1000
11349 Sep 22 23:14:25.896 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11350 Sep 22 23:14:25.896 INFO creating region bd788714-d366-4573-ac0f-64f52c45fa42 snapshot 3a99346d-25cc-4829-a850-fc77546058b5 dir "/tmp/.tmp95MCpd/regions/bd788714-d366-4573-ac0f-64f52c45fa42/.zfs/snapshot/3a99346d-25cc-4829-a850-fc77546058b5", component: test_snapshot_interface
11351 Sep 22 23:14:25.896 INFO commit
11352 Sep 22 23:14:25.896 INFO ok!
11353 Sep 22 23:14:25.896 INFO requesting running snapshot b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369 state: Requested, component: test_harness_datafile
11354 Sep 22 23:14:25.896 INFO requesting running snapshot bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5 state: Requested, component: test_harness_datafile
11355 test test::test_smf_region ... ok
11356 Sep 22 23:14:25.897 DEBG found expected downstairs instance: downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11357 Sep 22 23:14:25.897 DEBG found expected downstairs instance: downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11358 test test::test_smf_region_bounce_idempotent ... ok
11359 Sep 22 23:14:25.897 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11360 Sep 22 23:14:25.897 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11361 Sep 22 23:14:25.897 INFO creating missing snapshot instance snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11362 Sep 22 23:14:25.897 INFO creating missing snapshot instance snapshot-bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5
11363 Sep 22 23:14:25.897 INFO ok, have oxide/crucible/downstairs:snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11364 Sep 22 23:14:25.897 INFO ok, have oxide/crucible/downstairs:snapshot-bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5
11365 Sep 22 23:14:25.897 INFO reconfig required, no property group
11366 Sep 22 23:14:25.897 INFO reconfig required, no property group
11367 Sep 22 23:14:25.897 INFO reconfiguring oxide/crucible/downstairs:snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11368 Sep 22 23:14:25.897 INFO reconfiguring oxide/crucible/downstairs:snapshot-bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5
11369 Sep 22 23:14:25.897 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpITQLSK/regions/b8985630-b020-47f6-828d-9cc22ed12f56/.zfs/snapshot/d7527af5-48af-4f15-92ae-500ac492e369
11370 Sep 22 23:14:25.897 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp95MCpd/regions/bd788714-d366-4573-ac0f-64f52c45fa42/.zfs/snapshot/3a99346d-25cc-4829-a850-fc77546058b5
11371 Sep 22 23:14:25.897 INFO ensure port SCF_TYPE_COUNT 1001
11372 Sep 22 23:14:25.897 INFO ensure port SCF_TYPE_COUNT 1001
11373 Sep 22 23:14:25.897 INFO ensure mode SCF_TYPE_ASTRING ro
11374 Sep 22 23:14:25.897 INFO ensure mode SCF_TYPE_ASTRING ro
11375 Sep 22 23:14:25.897 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11376 Sep 22 23:14:25.897 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11377 Sep 22 23:14:25.897 INFO commit
11378 Sep 22 23:14:25.897 INFO commit
11379 Sep 22 23:14:25.897 INFO ok!
11380 Sep 22 23:14:25.897 INFO ok!
11381 Sep 22 23:14:25.897 INFO region b8985630-b020-47f6-828d-9cc22ed12f56 running snapshot d7527af5-48af-4f15-92ae-500ac492e369 state: Requested -> Created, component: test_harness_datafile
11382 Sep 22 23:14:25.897 INFO removing running snapshot bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5, component: test_harness_datafile
11383 Sep 22 23:14:25.897 INFO creating missing downstairs instance downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11384 Sep 22 23:14:25.897 DEBG found expected downstairs instance: downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11385 Sep 22 23:14:25.897 INFO ok, have oxide/crucible/downstairs:downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11386 Sep 22 23:14:25.897 INFO reconfig required, no property group
11387 Sep 22 23:14:25.897 INFO disabling snapshot instance: snapshot-bd788714-d366-4573-ac0f-64f52c45fa42-3a99346d-25cc-4829-a850-fc77546058b5 (instance states: (Some(Online), None))
11388 Sep 22 23:14:25.897 INFO creating config property group
11389 Sep 22 23:14:25.897 INFO reconfiguring oxide/crucible/downstairs:downstairs-b8985630-b020-47f6-828d-9cc22ed12f56
11390 Sep 22 23:14:25.897 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpITQLSK/regions/b8985630-b020-47f6-828d-9cc22ed12f56
11391 Sep 22 23:14:25.897 DEBG do not need to reconfigure oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11392 Sep 22 23:14:25.897 INFO ensure port SCF_TYPE_COUNT 1000
11393 Sep 22 23:14:25.897 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11394 Sep 22 23:14:25.897 INFO commit
11395 Sep 22 23:14:25.897 INFO creating missing downstairs instance downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11396 Sep 22 23:14:25.897 INFO ok, have oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11397 Sep 22 23:14:25.897 INFO ok!
11398 Sep 22 23:14:25.897 INFO reconfig required, no property group
11399 Sep 22 23:14:25.897 INFO creating config property group
11400 Sep 22 23:14:25.897 INFO creating missing snapshot instance snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11401 Sep 22 23:14:25.897 INFO reconfiguring oxide/crucible/downstairs:downstairs-bd788714-d366-4573-ac0f-64f52c45fa42
11402 Sep 22 23:14:25.897 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmp95MCpd/regions/bd788714-d366-4573-ac0f-64f52c45fa42
11403 Sep 22 23:14:25.897 INFO ok, have oxide/crucible/downstairs:snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11404 Sep 22 23:14:25.897 INFO reconfig required, no property group
11405 Sep 22 23:14:25.897 INFO ensure port SCF_TYPE_COUNT 1000
11406 Sep 22 23:14:25.897 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11407 Sep 22 23:14:25.897 INFO reconfiguring oxide/crucible/downstairs:snapshot-b8985630-b020-47f6-828d-9cc22ed12f56-d7527af5-48af-4f15-92ae-500ac492e369
11408 Sep 22 23:14:25.897 INFO commit
11409 Sep 22 23:14:25.897 INFO ensure directory SCF_TYPE_ASTRING /tmp/.tmpITQLSK/regions/b8985630-b020-47f6-828d-9cc22ed12f56/.zfs/snapshot/d7527af5-48af-4f15-92ae-500ac492e369
11410 Sep 22 23:14:25.897 INFO ok!
11411 Sep 22 23:14:25.897 INFO ensure port SCF_TYPE_COUNT 1001
11412 Sep 22 23:14:25.897 INFO ensure mode SCF_TYPE_ASTRING ro
11413 Sep 22 23:14:25.897 INFO ensure address SCF_TYPE_ASTRING 127.0.0.1
11414 Sep 22 23:14:25.897 INFO commit
11415 Sep 22 23:14:25.897 INFO ok!
11416 test test::test_smf_datafile_race_running_snapshots ... ok
11417 test test::test_smf_running_snapshot ... ok
11418 test tests::test_crucible_agent_openapi ... ok
11419 
11420 test result: ok. 11 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
11421 
11422 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_agent_client-6f3900e8033b57ec --nocapture`
11423 
11424 running 0 tests
11425 
11426 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11427 
11428 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_client_types-5ba9f9d411803900 --nocapture`
11429 
11430 running 0 tests
11431 
11432 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11433 
11434 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_common-9a32809bdbdf85c4 --nocapture`
11435 
11436 running 2 tests
11437 test region::test::test_basic_region ... ok
11438 test region::test::test_region_validate_io ... ok
11439 
11440 test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11441 
11442 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_control_client-d0a58354872d46d9 --nocapture`
11443 
11444 running 0 tests
11445 
11446 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
11447 
11448 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_downstairs-dce67baac661a5f4 --nocapture`
11449 
11450 running 156 tests
11451 test dump::test::color_compare ... ok
11452 test dump::test::color_compare_one ... ok
11453 test dump::test::color_compare_red0 ... ok
11454 test dump::test::color_compare_red01 ... ok
11455 test dump::test::color_compare_red01_2 ... ok
11456 test dump::test::color_compare_red02 ... ok
11457 test dump::test::color_compare_red02_2 ... ok
11458 test dump::test::color_compare_red1 ... ok
11459 test dump::test::color_compare_red12 ... ok
11460 test dump::test::color_compare_red12_2 ... ok
11461 test dump::test::color_compare_red2 ... ok
11462 test dump::test::color_compare_two ... ok
11463 test dump::test::color_compare_two_red0 ... ok
11464 test dump::test::color_compare_two_red1 ... ok
11465 test region::test::copy_path_basic ... ok
11466 thread 'region::test::bad_import_region' panicked at 'called `Result::unwrap()` on an `Err` value: Error open "/tmp/12345678-1111-2222-3333-123456789999/notadir/region.json": file not found opening region config "/tmp/12345678-1111-2222-3333-123456789999/notadir/region.json"', downstairs/src/region.rs:3466:10
11467 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
11468 {{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30{"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,,""time":"time":"2023-09-22T23:14:25.972128532Z"2023-09-22T23:14:25.972083262Z","hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4298pid":4298}
11469 }
11470 {{,"time":"""msgmsg"2023-09-22T23:14:25.972249247Z:""":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11471 Created new region file \"/tmp/.tmptl1G8F/region.json\"","{v":0,""name"msg"::""crucible",Created new region file ,\"""level"/tmp/.tmpUgERFY/region.json:\"time"30":","v":02023-09-22T23:14:25.972467365Z,""name":"crucible",",hostname""{,level"":":time"30ip-10-150-1-74.us-west-2.compute.internal"":",msg"2023-09-22T23:14:25.97206699Z",",pid"":time"::"",4298""hostname"current number of open files limit 65536 is already the maximum2023-09-22T23:14:25.972734886Ztime"",:""":hostname""2023-09-22T23:14:25.972749447Z:""ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""hostname},""
11472 pid:"":,4298ip-10-150-1-74.us-west-2.compute.internal",}"
11473 pid""{:pid"4298:}4298
11474 }"
11475 msg":"Created new region file \"{/tmp/.tmpr5qBKq/region.json\"","v":0",msg"":"name":"crucible"Created new region file \","level":/tmp/.tmp34a3XL/region.json\"30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:25.972829052Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4298"time":"}
11476 2023-09-22T23:14:25.972836155Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11477 Created new region file \"/tmp/.tmpEqmWYa/region.json\"",,""v":v0",:"0name":","crucible"name,"":level"":crucible"30,"level":30,"time":"2023-09-22T23:14:25.972915198Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11478 ,,""time"time":":"2023-09-22T23:14:25.972917321Z"2023-09-22T23:14:25.972222273Z","hostname":","hostnameip-10-150-1-74.us-west-2.compute.internal"",":"pid":4298}ip-10-150-1-74.us-west-2.compute.internal"
11479 ,"pid":4298}
11480 {"msg":"Created new region file {\"/tmp/.tmpHNTfYZ/region.json\"","v":0,""name":"msg":"crucible","level":30Created new region file \"/tmp/.tmpbmowMo/region.json\"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:25.972984691Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11481 ,"time":"2023-09-22T23:14:25.972993254Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
114822023-09-22T23:14:25.976ZINFOcrucible: current number of open files limit 65536 is already the maximum
114832023-09-22T23:14:25.976ZINFOcrucible: Created new region file "/tmp/.tmpuVAiXy/region.json"
11484 test region::test::bad_import_region - should panic ... ok
114852023-09-22T23:14:25.978ZINFOcrucible: current number of open files limit 65536 is already the maximum
114862023-09-22T23:14:25.979ZINFOcrucible: Created new region file "/tmp/.tmpAYVwJc/region.json"
114872023-09-22T23:14:25.990ZINFOcrucible: current number of open files limit 65536 is already the maximum
114882023-09-22T23:14:25.990ZINFOcrucible: Database read version 1
114892023-09-22T23:14:25.990ZINFOcrucible: Database write version 1
11490 test region::test::duplicate_context_insert ... ok
11491 test region::test::encryption_context ... ok
11492 test region::test::extent_dir_max ... ok
11493 test region::test::extent_dir_basic ... ok
11494 test region::test::extent_dir_min ... ok
11495 thread 'region::test::extent_io_invalid_block_buf' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3516:52
11496 thread 'region::test::extent_io_bad_block' panicked at 'called `Result::unwrap()` on an `Err` value: OffsetInvalid', downstairs/src/region.rs:3506:53
114972023-09-22T23:14:25.994ZINFOcrucible: current number of open files limit 65536 is already the maximum
11498 test region::test::extent_io_invalid_block_buf - should panic ... ok
114992023-09-22T23:14:25.995ZINFOcrucible: Created new region file "/tmp/.tmpzs8jke/region.json"
11500 test region::test::extent_io_bad_block - should panic ... ok
11501 thread 'region::test::extent_io_non_aligned_large' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3486:51
11502 thread 'region::test::extent_io_invalid_large' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3526:51
11503 test region::test::extent_io_non_aligned_large - should panic ... ok
11504 test region::test::extent_io_invalid_large - should panic ... ok
11505 thread 'region::test::extent_io_non_aligned_small' panicked at 'called `Result::unwrap()` on an `Err` value: DataLenUnaligned', downstairs/src/region.rs:3496:51
11506 test region::test::copy_extent_dir_twice ... ok
11507 test region::test::extent_name_basic ... ok
11508 test region::test::extent_io_non_aligned_small - should panic ... ok
11509 test region::test::extent_io_valid ... ok
11510 test region::test::copy_extent_dir ... ok
11511 EXT BLOCKS GEN0 test region::test::extent_name_basic_ext ... ok
11512 FL0 D0
11513 0 000-009test region::test::extent_name_basic_ext_shm ... ok
11514  0  0 test region::test::extent_name_basic_ext_wal ... ok
11515 F
11516 1 010-019  0  0 F
11517 Max gen: 0, Max flush: 0
11518 test region::test::close_extent ... ok
11519 test region::test::extent_name_basic_three ... ok
11520 test region::test::extent_name_basic_two ... ok
11521 test region::test::extent_name_max ... ok
11522 test region::test::extent_name_min ... ok
11523 test region::test::extent_path_mid_hi ... ok
11524 test region::test::extent_path_mid_lo ... ok
11525 test region::test::extent_path_max ... ok
11526 test region::test::extent_path_min ... ok
11527 test region::test::extent_path_three ... ok
115282023-09-22T23:14:26.001ZINFOcrucible: current number of open files limit 65536 is already the maximum
115292023-09-22T23:14:26.001ZINFOcrucible: Created new region file "/tmp/.tmpSzJQkz/region.json"
115302023-09-22T23:14:26.002ZINFOcrucible: current number of open files limit 65536 is already the maximum
11531 {"msg":"Created new region file \"/tmp/.tmp3IoV6E/region.json\"","v":0,"name":"crucible","level":30test region::test::dump_a_region ... ,"ok
11532 time":"2023-09-22T23:14:26.002101511Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115332023-09-22T23:14:26.002ZINFOcrucible: current number of open files limit 65536 is already the maximum
11534 {"msg":"Created new region file \"/tmp/.tmpWC03QK/region.json\"","v":0,"name":"crucible","level":30{"msg",":time"":"2023-09-22T23:14:26.002269491Z"current number of open files limit 65536 is already the maximum","hostname",":v"":0,"ip-10-150-1-74.us-west-2.compute.internalname":"","crucible"pid",:"4298level":}30
11535 ,"time":"2023-09-22T23:14:26.002306475Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115362023-09-22T23:14:26.002ZINFOcrucible: Opened existing region file "/tmp/.tmpuVaodS/region.json"
115372023-09-22T23:14:26.002ZINFOcrucible: current number of open files limit 65536 is already the maximum
11538 test region::test::new_region ... {ok
11539 "{msg":"Opened existing region file \""/tmp/.tmpBbFuRN/region.json\""msg",:""v":0,"name":"current number of open files limit 65536 is already the maximum"crucible",",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.002507432Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11540 ,"time":"2023-09-22T23:14:26.002513225Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115412023-09-22T23:14:26.002ZINFOcrucible: Database read version 1
115422023-09-22T23:14:26.002ZINFOcrucible: Database write version 1
11543 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30test region::test::region_bad_database_read_version_high ... ok
11544 ,"time":"2023-09-22T23:14:26.002659232Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115452023-09-22T23:14:26.002ZINFOcrucible: Database read version 1
115462023-09-22T23:14:26.002ZINFOcrucible: Database write version 1
11547 test region::test::region_bad_database_read_version_low ... ok
11548 test region::test::new_existing_region ... ok
115492023-09-22T23:14:26.003ZINFOcrucible: current number of open files limit 65536 is already the maximum
11550 {"{msg":"Opened existing region file \""/tmp/.tmpU8e474/region.json\""msg",:""v":0,"name":"current number of open files limit 65536 is already the maximum"crucible",",level"":v30":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.003349992Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11551 ,"time":"{2023-09-22T23:14:26.003356061Z",""hostname"msg":":"Database read version 1ip-10-150-1-74.us-west-2.compute.internal"",","pid"v"::42980,"name}"
11552 :"crucible","level":30{"msg":"Opened existing region file \"/tmp/.tmpc0jGKx/region.json\"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.003404304Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11553 ,"time":"2023-09-22T23:14:26.003417495Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115542023-09-22T23:14:26.003ZINFOcrucible: Database read version 1
11555 test region::test::region_bad_database_write_version_high ... ok
11556 test region::test::region_bad_database_write_version_low ... ok
115572023-09-22T23:14:26.003ZINFOcrucible: current number of open files limit 65536 is already the maximum
115582023-09-22T23:14:26.004ZINFOcrucible: Created new region file "/tmp/.tmpL9TBhh/region.json"
115592023-09-22T23:14:26.004ZINFOcrucible: current number of open files limit 65536 is already the maximum
11560 {"msg":"{Created new region file \"/tmp/.tmp1rT7EC/region.json\"","v"":0msg,"":name"":"crucible","level":current number of open files limit 65536 is already the maximum30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.004201177Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11561 ,"time":"2023-09-22T23:14:26.004209413Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115622023-09-22T23:14:26.004ZINFOcrucible: Created new region file "/tmp/.tmpI32qSh/region.json"
115632023-09-22T23:14:26.004ZINFOcrucible: current number of open files limit 65536 is already the maximum
115642023-09-22T23:14:26.004ZINFOcrucible: current number of open files limit 65536 is already the maximum
11565 {{"msg":""msg":"Created new region file \"Created new region file /tmp/.tmpvhyZdk/region.json\"\""/tmp/.tmpfJBJTP/region.json\",""v":,0","v"name:"0:","crucible"name":","crucible"level",":level":3030,"time":"2023-09-22T23:14:26.004661369Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal""time":,""pid":2023-09-22T23:14:26.004661091Z4298","}hostname
11566 ":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115672023-09-22T23:14:26.005ZINFOcrucible: current number of open files limit 65536 is already the maximum
115682023-09-22T23:14:26.005ZINFOcrucible: Database read version 1
115692023-09-22T23:14:26.005ZINFOcrucible: Database write version 1
115702023-09-22T23:14:26.006ZINFOcrucible: current number of open files limit 65536 is already the maximum
115712023-09-22T23:14:26.006ZINFOcrucible: Database read version 1
115722023-09-22T23:14:26.006ZINFOcrucible: Database write version 1
11573 EXT BLOCKS GEN0 GEN1 FL0 FL1 D0 D1
11574 0 000-009  0  0  0  0 F F
11575 1 010-019  0  0  0  0 F F
11576 Max gen: 0, Max flush: 0
115772023-09-22T23:14:26.013ZINFOcrucible: current number of open files limit 65536 is already the maximum
115782023-09-22T23:14:26.013ZINFOcrucible: Database read version 1
115792023-09-22T23:14:26.013ZINFOcrucible: Database write version 1
11580 test region::test::multiple_context ... ok
115812023-09-22T23:14:26.014ZINFOcrucible: current number of open files limit 65536 is already the maximum
115822023-09-22T23:14:26.014ZINFOcrucible: Created new region file "/tmp/.tmpF7g92b/region.json"
11583 cp "/tmp/.tmpfJBJTP/00/000/000" to "/tmp/.tmpfJBJTP/00/000/001.copy/001"
115842023-09-22T23:14:26.016ZINFOcrucible: current number of open files limit 65536 is already the maximum
115852023-09-22T23:14:26.016ZINFOcrucible: Opened existing region file "/tmp/.tmpL9TBhh/region.json"
11586 {"msg":"Database read version 1","v":0,"name":"crucible","level":30Recreate "/tmp/.tmpfJBJTP/00/000/001.db-shm"
11587 ,"time":"2023-09-22T23:14:26.016776455Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115882023-09-22T23:14:26.016ZINFOcrucible: Database write version 1
11589 test region::test::dump_two_region ... ok
11590 Recreate "/tmp/.tmpfJBJTP/00/000/001.db-wal"
115912023-09-22T23:14:26.017ZINFOcrucible: Extent 1 found replacement dir, finishing replacement
115922023-09-22T23:14:26.017ZINFOcrucible: Copy files from "/tmp/.tmpfJBJTP/00/000/001.replace" in "/tmp/.tmpfJBJTP/00/000"
11593 {{"msg"":"msg":Remove old file \""/tmp/.tmpfJBJTP/00/000/001.db-shm\"current number of open files limit 65536 is already the maximum" as there is no replacement",",v"":v":00,","name"name"::""crucible"crucible",",level"":level"30:30,"time":"2023-09-22T23:14:26.017510447Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:26.017511604Z",",pid"":hostname4298":"}
11594 ip-10-150-1-74.us-west-2.compute.internal","pid":4298{}
11595 "msg":"Remove old file \"{/tmp/.tmpfJBJTP/00/000/001.db-wal\""msg" as there is no replacement":","v"Created new region file \":0/tmp/.tmpbHZr8I/region.json\"",,""vname""::0","name":"cruciblecrucible"","level":,30"level":30,"time":"2023-09-22T23:14:26.017580597Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11596 ,"time":"2023-09-22T23:14:26.017582411Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
115972023-09-22T23:14:26.017ZINFOcrucible: Move directory "/tmp/.tmpfJBJTP/00/000/001.replace" to "/tmp/.tmpfJBJTP/00/000/001.completed"
11598 test region::test::reopen_extent_cleanup_one ... ok
115992023-09-22T23:14:26.018ZINFOcrucible: Extent 1 found replacement dir, finishing replacement
116002023-09-22T23:14:26.018ZINFOcrucible: Copy files from "/tmp/.tmpvhyZdk/00/000/001.replace" in "/tmp/.tmpvhyZdk/00/000"
116012023-09-22T23:14:26.018ZINFOcrucible: Move directory "/tmp/.tmpvhyZdk/00/000/001.replace" to "/tmp/.tmpvhyZdk/00/000/001.completed"
116022023-09-22T23:14:26.019ZINFOcrucible: current number of open files limit 65536 is already the maximum
116032023-09-22T23:14:26.019ZINFOcrucible: Created new region file "/tmp/.tmpMyPH8a/region.json"
11604 test region::test::reopen_extent_cleanup_replay_short ... ok
116052023-09-22T23:14:26.022ZINFOcrucible: current number of open files limit 65536 is already the maximum
116062023-09-22T23:14:26.022ZINFOcrucible: Created new region file "/tmp/.tmpxFGvC6/region.json"
11607 test region::test::reopen_extent_cleanup_replay ... ok
116082023-09-22T23:14:26.023ZINFOcrucible: current number of open files limit 65536 is already the maximum
116092023-09-22T23:14:26.023ZINFOcrucible: Created new region file "/tmp/.tmpROqSgO/region.json"
11610 test region::test::region_create_drop_open ... ok
116112023-09-22T23:14:26.025ZINFOcrucible: current number of open files limit 65536 is already the maximum
116122023-09-22T23:14:26.025ZINFOcrucible: Database read version 1
11613 {"msg":"Database write version 1","v":0,"name":"crucible","level":30{","timemsg"":":"2023-09-22T23:14:26.025488848Z"Failed write hash validation","hostname":,""v":0,"ip-10-150-1-74.us-west-2.compute.internal"name",:""cruciblepid"":,"4298level":}50
11614 ,"time":"2023-09-22T23:14:26.025522244Z"," Extent 2
11615 hostname":GEN "ip-10-150-1-74.us-west-2.compute.internal"," 0 pid": 0 4298
11616 }FLUSH_ID
11617 { 0 0
11618 DIRTY "msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.025587684Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
116192023-09-22T23:14:26.025ZINFOcrucible: Created new region file "/tmp/.tmpn46IR3/region.json"
11620 
11621 
11622 BLOCK D0 D1 C0 C1 DIFF
116232023-09-22T23:14:26.025ZINFOcrucible: current number of open files limit 65536 is already the maximum
116242023-09-22T23:14:26.025ZINFOcrucible: Database read version 1
116252023-09-22T23:14:26.025ZINFOcrucible: Database write version 1
11626 test region::test::test_bad_hash_bad ... ok
116272023-09-22T23:14:26.027ZINFOcrucible: current number of open files limit 65536 is already the maximum
116282023-09-22T23:14:26.027ZINFOcrucible: Created new region file "/tmp/.tmpLMYlj2/region.json"
11629 test region::test::reopen_extent_cleanup_two ... ok
11630 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30test region::test::reopen_all_extents ... ok
11631 ,"time":"2023-09-22T23:14:26.029723851Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
116322023-09-22T23:14:26.029ZINFOcrucible: Created new region file "/tmp/.tmpV4jWy1/region.json"
116332023-09-22T23:14:26.030ZINFOcrucible: current number of open files limit 65536 is already the maximum
116342023-09-22T23:14:26.030ZINFOcrucible: Created new region file "/tmp/.tmpnFW2oG/region.json"
11635 test region::test::test_blank_block_read_ok ... ok
116362023-09-22T23:14:26.032ZINFOcrucible: current number of open files limit 65536 is already the maximum
116372023-09-22T23:14:26.032ZINFOcrucible: Database read version 1
116382023-09-22T23:14:26.032ZINFOcrucible: Database write version 1
116392023-09-22T23:14:26.032ZINFOcrucible: current number of open files limit 65536 is already the maximum
116402023-09-22T23:14:26.032ZINFOcrucible: Created new region file "/tmp/.tmpYEI8V4/region.json"
11641 test region::test::reopen_extent_no_replay_readonly ... ok
116422023-09-22T23:14:26.033ZINFOcrucible: current number of open files limit 65536 is already the maximum
116432023-09-22T23:14:26.034ZINFOcrucible: Created new region file "/tmp/.tmpPVxrNK/region.json"
11644 test region::test::test_extent_write_flush_close ... ok
11645 020 A A A A
116462023-09-22T23:14:26.037ZINFOcrucible: current number of open files limit 65536 is already the maximum
116472023-09-22T23:14:26.037ZINFOcrucible: current number of open files limit 65536 is already the maximum
116482023-09-22T23:14:26.037ZINFOcrucible: Created new region file "/tmp/.tmpaig8rX/region.json"
116492023-09-22T23:14:26.037ZINFOcrucible: Database read version 1
116502023-09-22T23:14:26.037ZINFOcrucible: Database write version 1
11651 test region::test::test_big_write ... ok
11652 test region::test::test_extent_close_reopen_flush_close ... ok
116532023-09-22T23:14:26.038ZINFOcrucible: current number of open files limit 65536 is already the maximum
116542023-09-22T23:14:26.038ZINFOcrucible: Created new region file "/tmp/.tmpp33hxJ/region.json"
116552023-09-22T23:14:26.039ZINFOcrucible: current number of open files limit 65536 is already the maximum
116562023-09-22T23:14:26.039ZINFOcrucible: Created new region file "/tmp/.tmp0OWlpB/region.json"
11657 test region::test::test_flush_extent_limit_too_large ... ok
116582023-09-22T23:14:26.041ZINFOcrucible: current number of open files limit 65536 is already the maximum
116592023-09-22T23:14:26.041ZINFOcrucible: Created new region file "/tmp/.tmpGCIag7/region.json"
11660 test region::test::test_flush_extent_limit_base ... ok
116612023-09-22T23:14:26.042ZINFOcrucible: current number of open files limit 65536 is already the maximum
116622023-09-22T23:14:26.042ZINFOcrucible: Created new region file "/tmp/.tmpgHBd3w/region.json"
116632023-09-22T23:14:26.043ZINFOcrucible: current number of open files limit 65536 is already the maximum
116642023-09-22T23:14:26.043ZINFOcrucible: Database read version 1
116652023-09-22T23:14:26.043ZINFOcrucible: Database write version 1
11666 test region::test::test_flush_extent_limit_end ... ok
116672023-09-22T23:14:26.045ZINFOcrucible: current number of open files limit 65536 is already the maximum
116682023-09-22T23:14:26.045ZINFOcrucible: Created new region file "/tmp/.tmp09t5Jb/region.json"
11669 test region::test::test_fully_rehash_and_clean_does_not_mark_blocks_as_written ... ok
116702023-09-22T23:14:26.047ZINFOcrucible: current number of open files limit 65536 is already the maximum
116712023-09-22T23:14:26.047ZINFOcrucible: Created new region file "/tmp/.tmp58F936/region.json"
11672 021 A A A A
116732023-09-22T23:14:26.047ZINFOcrucible: current number of open files limit 65536 is already the maximum
116742023-09-22T23:14:26.047ZINFOcrucible: Database read version 1
116752023-09-22T23:14:26.047ZINFOcrucible: Database write version 1
11676 test region::test::test_ok_hash_ok ... ok
11677 test region::test::test_fully_rehash_marks_blocks_unwritten_if_data_never_hit_disk ... ok
116782023-09-22T23:14:26.048ZINFOcrucible: current number of open files limit 65536 is already the maximum
116792023-09-22T23:14:26.049ZINFOcrucible: Created new region file "/tmp/.tmpKdyj6e/region.json"
116802023-09-22T23:14:26.049ZINFOcrucible: current number of open files limit 65536 is already the maximum
116812023-09-22T23:14:26.049ZINFOcrucible: Created new region file "/tmp/.tmprexlHr/region.json"
116822023-09-22T23:14:26.053ZINFOcrucible: current number of open files limit 65536 is already the maximum
116832023-09-22T23:14:26.053ZINFOcrucible: Database read version 1
116842023-09-22T23:14:26.053ZINFOcrucible: Database write version 1
11685 022 A A A A
116862023-09-22T23:14:26.056ZINFOcrucible: current number of open files limit 65536 is already the maximum
116872023-09-22T23:14:26.056ZINFOcrucible: Database read version 1
116882023-09-22T23:14:26.056ZINFOcrucible: Database write version 1
11689 test region::test::test_read_multiple_disjoint_none_contiguous ... ok
116902023-09-22T23:14:26.059ZINFOcrucible: current number of open files limit 65536 is already the maximum
11691 {"msg":"Created new region file \"/tmp/.tmpEB530l/region.json\"","v":0,"name":"crucible","level":30Send flush to extent limit 0
11692 ,"time":"2023-09-22T23:14:26.059611405Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11693 test region::test::test_read_multiple_disjoint_large_contiguous ... ok
11694 extent 0 should not be dirty now
11695 verify 1 still dirty
11696 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.060279931Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4298"}msg":
11697 "current number of open files limit 65536 is already the maximum"{,"v"":msg":"0,"nameCreated new region file ":\""crucible"/tmp/.tmp0aekN1/region.json\",""level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.060345116Z",","time":hostname"":"2023-09-22T23:14:26.06034024Z","ip-10-150-1-74.us-west-2.compute.internal"hostname":,""pid":4298ip-10-150-1-74.us-west-2.compute.internal"},
11698 "pid":4298}
116992023-09-22T23:14:26.060ZINFOcrucible: Database read version 1
117002023-09-22T23:14:26.060ZINFOcrucible: Database write version 1
11701 verify 2 still dirty
11702 verify 3 still dirty
11703 verify 4 still dirty
11704 verify 5 still dirty
11705 test region::test::test_read_single_large_contiguous ... ok
11706 verify 6 still dirty
11707 verify 7 still dirty
11708 verify 8 still dirty
11709 verify 9 still dirty
11710 Send flush to extent limit 1
117112023-09-22T23:14:26.062ZINFOcrucible: current number of open files limit 65536 is already the maximum
11712 extent 1 should not be dirty now
11713 test region::test::test_read_single_large_contiguous_span_extents ... ok
11714 verify 2 still dirty
117152023-09-22T23:14:26.062ZINFOcrucible: Created new region file "/tmp/.tmpYOFJTN/region.json"
11716 verify 3 still dirty
11717 verify 4 still dirty
11718 verify 5 still dirty
11719 verify 6 still dirty
11720 verify 7 still dirty
117212023-09-22T23:14:26.063ZINFOcrucible: current number of open files limit 65536 is already the maximum
117222023-09-22T23:14:26.063ZINFOcrucible: Created new region file "/tmp/.tmp1r2Mld/region.json"
11723 verify 8 still dirty
11724 verify 9 still dirty
11725 Send flush to extent limit 2
11726 extent 2 should not be dirty now
11727 verify 3 still dirty
11728 verify 4 still dirty
11729 verify 5 still dirty
11730 verify 6 still dirty
11731 verify 7 still dirty
11732 verify 8 still dirty
11733 verify 9 still dirty
11734 Send flush to extent limit 3
11735 extent 3 should not be dirty now
11736 verify 4 still dirty
11737 verify 5 still dirty
11738 verify 6 still dirty
11739 verify 7 still dirty
11740 verify 8 still dirty
11741 verify 9 still dirty
11742 Send flush to extent limit 4
11743 023 A A A A
11744 extent 4 should not be dirty now
11745 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","verify 5 still dirty
11746 level":30,"time"verify 6 still dirty
11747 :"2023-09-22T23:14:26.065713263Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11748 verify 7 still dirty
11749 verify 8 still dirty
11750 verify 9 still dirty
11751 Send flush to extent limit 5
117522023-09-22T23:14:26.066ZINFOcrucible: Database read version 1
117532023-09-22T23:14:26.066ZINFOcrucible: Database write version 1
11754 extent 5 should not be dirty now
11755 verify 6 still dirty
11756 verify 7 still dirty
11757 verify 8 still dirty
11758 verify 9 still dirty
11759 Send flush to extent limit 6
11760 test region::test::test_region_open_removes_partial_writes ... ok
11761 extent 6 should not be dirty now
11762 verify 7 still dirty
11763 verify 8 still dirty
11764 verify 9 still dirty
11765 Send flush to extent limit 7
11766 extent 7 should not be dirty now
11767 verify 8 still dirty
11768 {verify 9 still dirty
11769 "msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30Send flush to extent limit 8
11770 ,"time":"2023-09-22T23:14:26.067394229Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
117712023-09-22T23:14:26.067ZINFOcrucible: Created new region file "/tmp/.tmpFxX9XQ/region.json"
11772 extent 8 should not be dirty now
11773 verify 9 still dirty
11774 Send flush to extent limit 9
11775 extent 9 should not be dirty now
117762023-09-22T23:14:26.070ZINFOcrucible: current number of open files limit 65536 is already the maximum
117772023-09-22T23:14:26.070ZINFOcrucible: Database read version 1
117782023-09-22T23:14:26.070ZINFOcrucible: Database write version 1
11779 test region::test::test_flush_extent_limit_walk_it_off ... ok
11780 024 A A A A
117812023-09-22T23:14:26.073ZINFOcrucible: current number of open files limit 65536 is already the maximum
117822023-09-22T23:14:26.073ZINFOcrucible: Created new region file "/tmp/.tmpdCShqT/region.json"
117832023-09-22T23:14:26.073ZINFOcrucible: current number of open files limit 65536 is already the maximum
117842023-09-22T23:14:26.073ZINFOcrucible: Database read version 1
117852023-09-22T23:14:26.073ZINFOcrucible: Database write version 1
11786 test region::test::test_write_multiple_disjoint_large_contiguous ... ok
11787 test region::test::test_write_multiple_disjoint_none_contiguous ... ok
11788 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30test region::test::test_write_single_large_contiguous ... ok
11789 ,"time":"2023-09-22T23:14:26.076556839Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
117902023-09-22T23:14:26.076ZINFOcrucible: Created new region file "/tmp/.tmpu2Fu9O/region.json"
117912023-09-22T23:14:26.077ZINFOcrucible: current number of open files limit 65536 is already the maximum
117922023-09-22T23:14:26.077ZINFOcrucible: current number of open files limit 65536 is already the maximum
11793 {"msg":"Created new region file \"{/tmp/.tmphRuxRF/region.json\""",msg""v:"":0,"Created new region file name\"":"crucible"/tmp/.tmp2B4zKP/region.json\"",","level"v:":030,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:26.077208922Z2023-09-22T23:14:26.077205881Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::42984298}}
11794 
117952023-09-22T23:14:26.078ZINFOcrucible: current number of open files limit 65536 is already the maximum
117962023-09-22T23:14:26.078ZINFOcrucible: Database read version 1
117972023-09-22T23:14:26.078ZINFOcrucible: Database write version 1
11798 test region::test::test_write_single_large_contiguous_span_extents ... ok
11799 Total size: 15360
118002023-09-22T23:14:26.081ZINFOcrucible: current number of open files limit 65536 is already the maximum
11801 025 A A A A
118022023-09-22T23:14:26.081ZINFOcrucible: Created new region file "/tmp/.tmp5OTYZn/region.json"
118032023-09-22T23:14:26.081ZINFOcrucible: current number of open files limit 65536 is already the maximum
118042023-09-22T23:14:26.081ZINFOcrucible: Database read version 1
118052023-09-22T23:14:26.081ZINFOcrucible: Database write version 1
11806 test region::test::test_write_unwritten_big_write ... ok
11807 buffer size:2048
118082023-09-22T23:14:26.084ZINFOcrucible: current number of open files limit 65536 is already the maximum
118092023-09-22T23:14:26.084ZINFOcrucible: Created new region file "/tmp/.tmpACQl0A/region.json"
11810 Read eid: 0, 0 offset: Block { value: 0, shift: 9 }
11811 {Read eid: 0, 1 offset: Block { value: 1, shift: 9 }
11812 Read eid: 0, 2 offset: Block { value: 2, shift: 9 }
11813 "Read eid: 0, 3 offset: Block { value: 3, shift: 9 }
11814 msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.084947044Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
118152023-09-22T23:14:26.084ZINFOcrucible: Database read version 1
11816 {"msg":"Database write version 1","v":0,"name":"crucible","level":30,"time":"Read a region, append
11817 2023-09-22T23:14:26.085038808Z"Read a region, append
11818 ,"hostname":Read a region, append
11819 "Read a region, append
11820 ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11821 Total size: 20480
11822 test region::test::test_write_unwritten_big_write_partial_final ... ok
118232023-09-22T23:14:26.087ZINFOcrucible: current number of open files limit 65536 is already the maximum
118242023-09-22T23:14:26.087ZINFOcrucible: Created new region file "/tmp/.tmpguYfqg/region.json"
11825 test region::test::test_write_unwritten_big_write_partial_0 ... ok
11826 test region::test::test_write_unwritten_big_write_partial_1 ... ok
118272023-09-22T23:14:26.089ZINFOcrucible: current number of open files limit 65536 is already the maximum
118282023-09-22T23:14:26.089ZINFOcrucible: Created new region file "/tmp/.tmptWvoUN/region.json"
118292023-09-22T23:14:26.089ZINFOcrucible: current number of open files limit 65536 is already the maximum
118302023-09-22T23:14:26.089ZINFOcrucible: Created new region file "/tmp/.tmpuhsTvB/region.json"
11831 026 A A A A
118322023-09-22T23:14:26.090ZINFOcrucible: current number of open files limit 65536 is already the maximum
118332023-09-22T23:14:26.090ZINFOcrucible: Database read version 1
118342023-09-22T23:14:26.090ZINFOcrucible: Database write version 1
118352023-09-22T23:14:26.093ZINFOcrucible: current number of open files limit 65536 is already the maximum
118362023-09-22T23:14:26.093ZINFOcrucible: Database read version 1
118372023-09-22T23:14:26.093ZINFOcrucible: Database write version 1
11838 test region::test::test_write_unwritten_big_write_partial_sparse ... ok
118392023-09-22T23:14:26.096ZINFOcrucible: current number of open files limit 65536 is already the maximum
118402023-09-22T23:14:26.096ZINFOcrucible: Created new region file "/tmp/.tmpKj0abr/region.json"
11841 test region::test::test_write_unwritten_multiple_disjoint_large_contiguous ... ok
118422023-09-22T23:14:26.097ZINFOcrucible: current number of open files limit 65536 is already the maximum
118432023-09-22T23:14:26.097ZINFOcrucible: Created new region file "/tmp/.tmpsnGu6h/region.json"
11844 test region::test::test_write_unwritten_multiple_disjoint_none_contiguous ... ok
11845 027 A A A A
118462023-09-22T23:14:26.097ZINFOcrucible: current number of open files limit 65536 is already the maximum
118472023-09-22T23:14:26.098ZINFOcrucible: Database read version 1
118482023-09-22T23:14:26.098ZINFOcrucible: Database write version 1
11849 test region::test::test_write_unwritten_single_large_contiguous ... ok
118502023-09-22T23:14:26.098ZINFOcrucible: current number of open files limit 65536 is already the maximum
118512023-09-22T23:14:26.098ZINFOcrucible: Created new region file "/tmp/.tmpNq9xEH/region.json"
11852 test region::test::validate_repair_files_also_good ... ok
11853 test region::test::validate_repair_files_duplicate ... ok
11854 test region::test::validate_repair_files_duplicate_pair ... ok
11855 test region::test::validate_repair_files_empty ... ok
11856 test region::test::validate_repair_files_good ... ok
11857 test region::test::validate_repair_files_not_good_enough ... ok
11858 test region::test::validate_repair_files_offbyon ... ok
11859 test region::test::validate_repair_files_quad_duplicate ... ok
11860 test region::test::validate_repair_files_too_good ... ok
118612023-09-22T23:14:26.101ZINFOcrucible: current number of open files limit 65536 is already the maximum
118622023-09-22T23:14:26.101ZINFOcrucible: Database read version 1
118632023-09-22T23:14:26.101ZINFOcrucible: Database write version 1
11864 test region::test::test_write_unwritten_when_empty ... ok
118652023-09-22T23:14:26.101ZINFOcrucible: current number of open files limit 65536 is already the maximum
118662023-09-22T23:14:26.101ZINFOcrucible: Created new region file "/tmp/.tmpiSwTUM/region.json"
11867 test region::test::test_write_unwritten_single_large_contiguous_span_extents ... ok
118682023-09-22T23:14:26.102ZINFOcrucible: current number of open files limit 65536 is already the maximum
118692023-09-22T23:14:26.102ZINFOcrucible: Created new region file "/tmp/.tmpgZFRJj/region.json"
11870 test region::test::test_write_unwritten_when_written ... ok
118712023-09-22T23:14:26.102ZINFOcrucible: current number of open files limit 65536 is already the maximum
118722023-09-22T23:14:26.102ZINFOcrucible: Created new region file "/tmp/.tmpplfrpC/region.json"
118732023-09-22T23:14:26.103ZINFOcrucible: current number of open files limit 65536 is already the maximum
118742023-09-22T23:14:26.103ZINFOcrucible: Created new region file "/tmp/.tmpuu6Uye/region.json"
11875 test region::test::test_write_unwritten_when_written_flush ... ok
118762023-09-22T23:14:26.105ZINFOcrucible: current number of open files limit 65536 is already the maximum
118772023-09-22T23:14:26.105ZINFOcrucible: Created new region file "/tmp/.tmpChHAJv/region.json"
11878 028 A A A A
118792023-09-22T23:14:26.106ZINFOcrucible: current number of open files limit 65536 is already the maximum
118802023-09-22T23:14:26.106ZINFOcrucible: Database read version 1
118812023-09-22T23:14:26.106ZINFOcrucible: Database write version 1
11882 files: ["001", "001.db", "001.db-shm", "001.db-wal"]
11883 test repair::test::extent_expected_files_fail ... ok
11884 test repair::test::extent_expected_files ... ok
11885 files: ["001", "001.db"]
118862023-09-22T23:14:26.111ZINFOcrucible: current number of open files limit 65536 is already the maximum
118872023-09-22T23:14:26.111ZINFOcrucible: Created new region file "/tmp/.tmp6LDZTO/region.json"
11888 test repair::test::extent_expected_files_fail_two ... ok
11889 files: ["001", "001.db"]
11890 test repair::test::extent_expected_files_short ... ok
118912023-09-22T23:14:26.112ZINFOcrucible: current number of open files limit 65536 is already the maximum
118922023-09-22T23:14:26.113ZINFOcrucible: Database read version 1
118932023-09-22T23:14:26.113ZINFOcrucible: Database write version 1
118942023-09-22T23:14:26.113ZINFOcrucible: current number of open files limit 65536 is already the maximum
118952023-09-22T23:14:26.113ZINFOcrucible: Created new region file "/tmp/.tmp9ZmoLF/region.json"
118962023-09-22T23:14:26.113ZINFOcrucible: current number of open files limit 65536 is already the maximum
118972023-09-22T23:14:26.113ZINFOcrucible: Created new region file "/tmp/.tmp3VBj3m/region.json"
11898 test repair::test::extent_expected_files_short_with_close ... ok
118992023-09-22T23:14:26.114ZINFOcrucible: current number of open files limit 65536 is already the maximum
119002023-09-22T23:14:26.114ZINFOcrucible: Created new region file "/tmp/.tmpl7sOI4/region.json"
11901 029 A A A A
11902 test region::test::dump_extent ... ok
119032023-09-22T23:14:26.122ZWARNcrucible: 1002 job Flush for connection UpstairsConnection { upstairs_id: ef0699b7-a7d2-4976-b7ae-bba467e5372f, session_id: 7bbe9283-f318-4c62-98b0-765ed5da40e5, gen: 0 } waiting on 1 deps
11904 test repair::test::test_crucible_repair_openapi ... ok
11905 test test::job_dep_not_satisfied ... ok
11906 test test::jobs_extent_flush_close ... ok
11907 test test::jobs_extent_close ... ok
11908 test test::jobs_extent_live_noop ... ok
11909 test test::jobs_extent_live_reopen ... ok
11910 test test::jobs_extent_live_repair ... ok
11911 test test::jobs_independent ... ok
11912 test test::jobs_write_unwritten ... ok
119132023-09-22T23:14:26.129ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: 7cc10823-5dc1-45d3-b4e3-caebc1707232, session_id: 09a31877-3cc1-45a5-900a-ac6d12bbfc0d, gen: 0 } waiting on 2 deps
11914 {"msg":"1003 job Read for connection UpstairsConnection { upstairs_id: da404d58-d3b5-45a9-8123-7d7ea17331ee, session_id: 470ece38-bc72-4a95-acd5-ab8a19a83232, gen: 0 } waiting on 2 deps","v":0,"name":"crucible","level":40{"msg",:""time":"2023-09-22T23:14:26.129688383Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","1003 job Read for connection UpstairsConnection { upstairs_id: 7cc10823-5dc1-45d3-b4e3-caebc1707232, session_id: 09a31877-3cc1-45a5-900a-ac6d12bbfc0d, gen: 0 } waiting on 1 depspid"":4298,"v":}
11915 0,"name":"crucible","level":40,"time":"2023-09-22T23:14:26.129738173Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11916 Import file_size: 51200 Extent size: 5120 Needed extents: 10
11917 Region already large enough for image
11918 Importing "/tmp/.tmpU4tedt/random_data" to region
119192023-09-22T23:14:26.130ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: da404d58-d3b5-45a9-8123-7d7ea17331ee, session_id: 470ece38-bc72-4a95-acd5-ab8a19a83232, gen: 0 } waiting on 1 deps
11920 test test::out_of_order_arrives_after_first_do_work ... ok
11921 test test::out_of_order_arrives_after_1001_completes ... ok
119222023-09-22T23:14:26.132ZINFOcrucible: current number of open files limit 65536 is already the maximum
11923 Import file_size: 51200 Extent size: 5120 Needed extents: 10
11924 Region already large enough for image
11925 Importing "/tmp/.tmpYqlDGl/random_data" to region
119262023-09-22T23:14:26.132ZINFOcrucible: Created new region file "/tmp/.tmpFWGEYo/region.json"
11927 Import file_size: 51300 Extent size: 5120 Needed extents: 11
11928 Extending region to fit image
119292023-09-22T23:14:26.134ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: e7023dfa-b4ee-47ff-86db-76391d641315, session_id: cff987a0-69f9-40a4-bed7-56a371434922, gen: 0 } waiting on 2 deps
11930 Import file_size: 51100 Extent size: 5120 Needed extents: 10
11931 Region already large enough for image
11932 Importing "/tmp/.tmpCjoWLo/random_data" to region
119332023-09-22T23:14:26.135ZWARNcrucible: 1003 job Read for connection UpstairsConnection { upstairs_id: e7023dfa-b4ee-47ff-86db-76391d641315, session_id: cff987a0-69f9-40a4-bed7-56a371434922, gen: 0 } waiting on 1 deps
11934 Importing "/tmp/.tmphz84Vr/random_data" to region
11935 test test::out_of_order_arrives_after_first_push_next_jobs ... ok
119362023-09-22T23:14:26.137ZINFOcrucible: current number of open files limit 65536 is already the maximum
119372023-09-22T23:14:26.137ZINFOcrucible: Created new region file "/tmp/.tmpbuYlX2/region.json"
119382023-09-22T23:14:26.139ZINFOcrucible: current number of open files limit 65536 is already the maximum
119392023-09-22T23:14:26.139ZINFOcrucible: Opened existing region file "/tmp/.tmpFWGEYo/region.json"
119402023-09-22T23:14:26.139ZINFOcrucible: Database read version 1
119412023-09-22T23:14:26.139ZINFOcrucible: Database write version 1
119422023-09-22T23:14:26.141ZINFOcrucible: UUID: 450d54ff-1683-4680-b6a5-24cc9ce0daf7
119432023-09-22T23:14:26.142ZINFOcrucible: Blocks per extent:4 Total Extents: 2
119442023-09-22T23:14:26.142ZINFOcrucible: UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } is now active (read-write)
119452023-09-22T23:14:26.142ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } to UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 }
119462023-09-22T23:14:26.142ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } thread that UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } is being promoted (read-write)
119472023-09-22T23:14:26.142ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } to active, discarding 1 jobs
119482023-09-22T23:14:26.142ZINFOcrucible: UpstairsConnection { upstairs_id: d057f4b7-1b10-42a3-88cf-51785901d47d, session_id: 9afe45e4-1ca0-4807-99fc-8653b658d629, gen: 10 } is now active (read-write)
119492023-09-22T23:14:26.142ZINFOcrucible: current number of open files limit 65536 is already the maximum
119502023-09-22T23:14:26.143ZINFOcrucible: Opened existing region file "/tmp/.tmpbuYlX2/region.json"
119512023-09-22T23:14:26.143ZINFOcrucible: Database read version 1
119522023-09-22T23:14:26.143ZINFOcrucible: Database write version 1
11953 test test::test_complete_work_can_see_none ... ok
119542023-09-22T23:14:26.144ZINFOcrucible: UUID: 83f98483-6271-40ff-97e5-08827ab14d83
119552023-09-22T23:14:26.144ZINFOcrucible: Blocks per extent:4 Total Extents: 2
119562023-09-22T23:14:26.144ZINFOcrucible: UpstairsConnection { upstairs_id: 2a2fb5a4-15e7-480f-9a28-a07bb7742f73, session_id: e0427225-64a8-4195-a672-02c0760a2888, gen: 10 } is now active (read-write)
119572023-09-22T23:14:26.144ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 2a2fb5a4-15e7-480f-9a28-a07bb7742f73, session_id: e0427225-64a8-4195-a672-02c0760a2888, gen: 10 } to UpstairsConnection { upstairs_id: a1d15123-7067-4c3f-89e4-378e79794086, session_id: 1d8586e9-d692-46f3-84af-ff5f40a5cfdd, gen: 11 }
119582023-09-22T23:14:26.145ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 2a2fb5a4-15e7-480f-9a28-a07bb7742f73, session_id: e0427225-64a8-4195-a672-02c0760a2888, gen: 10 } thread that UpstairsConnection { upstairs_id: a1d15123-7067-4c3f-89e4-378e79794086, session_id: 1d8586e9-d692-46f3-84af-ff5f40a5cfdd, gen: 11 } is being promoted (read-write)
119592023-09-22T23:14:26.145ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: a1d15123-7067-4c3f-89e4-378e79794086, session_id: 1d8586e9-d692-46f3-84af-ff5f40a5cfdd, gen: 11 } to active, discarding 1 jobs
119602023-09-22T23:14:26.145ZINFOcrucible: UpstairsConnection { upstairs_id: a1d15123-7067-4c3f-89e4-378e79794086, session_id: 1d8586e9-d692-46f3-84af-ff5f40a5cfdd, gen: 11 } is now active (read-write)
119612023-09-22T23:14:26.145ZWARNcrucible: UpstairsConnection { upstairs_id: 2a2fb5a4-15e7-480f-9a28-a07bb7742f73, session_id: e0427225-64a8-4195-a672-02c0760a2888, gen: 10 } cannot grab work lock, 2a2fb5a4-15e7-480f-9a28-a07bb7742f73 is not active!
119622023-09-22T23:14:26.145ZINFOcrucible: current number of open files limit 65536 is already the maximum
119632023-09-22T23:14:26.145ZINFOcrucible: Created new region file "/tmp/.tmpuhw6gO/region.json"
11964 test test::test_complete_work_cannot_see_none_different_upstairs_id ... ok
119652023-09-22T23:14:26.148ZINFOcrucible: current number of open files limit 65536 is already the maximum
119662023-09-22T23:14:26.148ZINFOcrucible: Created new region file "/tmp/.tmpHEvATG/region.json"
119672023-09-22T23:14:26.149ZINFOcrucible: current number of open files limit 65536 is already the maximum
119682023-09-22T23:14:26.149ZINFOcrucible: Opened existing region file "/tmp/.tmpuhw6gO/region.json"
119692023-09-22T23:14:26.149ZINFOcrucible: Database read version 1
119702023-09-22T23:14:26.149ZINFOcrucible: Database write version 1
119712023-09-22T23:14:26.150ZINFOcrucible: UUID: 72c34938-e54c-40f2-a16e-12d71ec544ba
119722023-09-22T23:14:26.150ZINFOcrucible: Blocks per extent:4 Total Extents: 2
119732023-09-22T23:14:26.150ZINFOcrucible: UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: 24b68982-d2b2-4031-8e67-90612b3f5688, gen: 10 } is now active (read-write)
119742023-09-22T23:14:26.150ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: 24b68982-d2b2-4031-8e67-90612b3f5688, gen: 10 } to UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: d8c24f4b-68d5-42a8-9e9e-cc29439323c1, gen: 11 }
119752023-09-22T23:14:26.150ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: 24b68982-d2b2-4031-8e67-90612b3f5688, gen: 10 } thread that UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: d8c24f4b-68d5-42a8-9e9e-cc29439323c1, gen: 11 } is being promoted (read-write)
119762023-09-22T23:14:26.150ZWARNcrucible: Crucible Downstairs promoting UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: d8c24f4b-68d5-42a8-9e9e-cc29439323c1, gen: 11 } to active, discarding 1 jobs
119772023-09-22T23:14:26.150ZINFOcrucible: UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: d8c24f4b-68d5-42a8-9e9e-cc29439323c1, gen: 11 } is now active (read-write)
119782023-09-22T23:14:26.150ZWARNcrucible: UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: 24b68982-d2b2-4031-8e67-90612b3f5688, gen: 10 } cannot grab lock, does not match UpstairsConnection { upstairs_id: 6dc1f99f-e171-42d7-963b-de7a3686232f, session_id: d8c24f4b-68d5-42a8-9e9e-cc29439323c1, gen: 11 }!
11979 test test::test_complete_work_cannot_see_none_same_upstairs_id ... ok
119802023-09-22T23:14:26.153ZINFOcrucible: current number of open files limit 65536 is already the maximum
119812023-09-22T23:14:26.153ZINFOcrucible: Created new region file "/tmp/.tmp7Bkl4G/region.json"
119822023-09-22T23:14:26.154ZINFOcrucible: current number of open files limit 65536 is already the maximum
119832023-09-22T23:14:26.154ZINFOcrucible: Opened existing region file "/tmp/.tmpHEvATG/region.json"
119842023-09-22T23:14:26.154ZINFOcrucible: Database read version 1
119852023-09-22T23:14:26.154ZINFOcrucible: Database write version 1
119862023-09-22T23:14:26.161ZINFOcrucible: UUID: 90179e5a-16a5-46d5-b74c-1dd8822109a4
119872023-09-22T23:14:26.161ZINFOcrucible: Blocks per extent:4 Total Extents: 5
11988 {"msg":"UpstairsConnection { upstairs_id: 55411ce4-0884-4ba0-b98a-b2cd8a769c36, session_id: c062eae8-f235-4a59-ae71-0f843aaba046, gen: 10 } is now active (read-write)","v":0,"name":"crucible","level":30Active Upstairs connections: [UpstairsConnection { upstairs_id: 55411ce4-0884-4ba0-b98a-b2cd8a769c36, session_id: c062eae8-f235-4a59-ae71-0f843aaba046, gen: 10 }]
11989 ,"time":"2023-09-22T23:14:26.162478477Z","hostname":"Crucible Downstairs work queue:
11990 ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
11991 DSW:[1000] EClose New deps:[]
11992 DSW:[1001] EFClose New deps:[]
11993 DSW:[1002] ReOpen New deps:[JobId(1000)]
11994 DSW:[1003] ReOpen New deps:[JobId(1001)]
11995 Done tasks []
11996 last_flush: JobId(0)
11997 --------------------------------------
11998 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
119992023-09-22T23:14:26.163ZINFOcrucible: current number of open files limit 65536 is already the maximum
120002023-09-22T23:14:26.163ZINFOcrucible: Opened existing region file "/tmp/.tmp7Bkl4G/region.json"
120012023-09-22T23:14:26.163ZINFOcrucible: Database read version 1
120022023-09-22T23:14:26.163ZINFOcrucible: Database write version 1
120032023-09-22T23:14:26.167ZINFOcrucible: UUID: 84243b75-8316-472c-ba11-8b81676900ba
120042023-09-22T23:14:26.167ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12005 {"msg":"UpstairsConnection { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, gen: 10 } is now active (read-write)","Before doing work we have:
12006 v":0,"name":"crucible"Active Upstairs connections: [UpstairsConnection { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, gen: 10 }]
12007 ,"level":30Crucible Downstairs work queue:
12008 DSW:[1000] EClose New deps:[]
12009 DSW:[1001] EFClose New deps:[]
12010 DSW:[1002] Read New deps:[JobId(1000), JobId(1001)]
12011 ,"DSW:[1003] NoOp New deps:[JobId(1000), JobId(1001), JobId(1002)]
12012 time":"DSW:[1004] ReOpen New deps:[JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
12013 2023-09-22T23:14:26.168476029ZDone tasks []
12014 "last_flush: JobId(0)
12015 --------------------------------------
12016 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
12017 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)]
12018 Do IOop 1000
12019 test test::test_extent_new_close_flush_close ... ok
12020 Got m: ExtentLiveCloseAck { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, job_id: JobId(1000), result: Ok((0, 0, false)) }
12021 Do IOop 1001
12022 Got m: ExtentLiveCloseAck { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, job_id: JobId(1001), result: Ok((0, 0, false)) }
12023 Do IOop 1002
12024 {Got m: ReadResponse { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, job_id: JobId(1002), responses: Ok([ReadResponse { eid: 2, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12025 "msg":"current number of open files limit 65536 is already the maximum","v":Do IOop 1003
12026 0,"name":"crucible","level":30Got m: ExtentLiveAckId { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, job_id: JobId(1003), result: Ok(()) }
12027 ,"time":"2023-09-22T23:14:26.16976289Z","hostname"Do IOop 1004
12028 :"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
120292023-09-22T23:14:26.169ZINFOcrucible: Created new region file "/tmp/.tmpoEhsiG/region.json"
12030 Got m: ExtentLiveAckId { upstairs_id: 0cb5c48e-8289-45ce-9f41-b89f63ddb194, session_id: 27220b61-bec0-40c8-a3f7-47a239b4bea0, job_id: JobId(1004), result: Ok(()) }
12031 test test::test_extent_simple_close_flush_close ... ok
120322023-09-22T23:14:26.173ZINFOcrucible: current number of open files limit 65536 is already the maximum
120332023-09-22T23:14:26.173ZINFOcrucible: Created new region file "/tmp/.tmp2IKeMN/region.json"
120342023-09-22T23:14:26.176ZINFOcrucible: current number of open files limit 65536 is already the maximum
120352023-09-22T23:14:26.176ZINFOcrucible: Opened existing region file "/tmp/.tmpoEhsiG/region.json"
120362023-09-22T23:14:26.176ZINFOcrucible: Database read version 1
120372023-09-22T23:14:26.176ZINFOcrucible: Database write version 1
120382023-09-22T23:14:26.179ZINFOcrucible: UUID: 7b8a2fdd-e7fa-41fb-ba6d-29998f3b93da
120392023-09-22T23:14:26.179ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12040 Active Upstairs connections: [UpstairsConnection { upstairs_id: e9c958d8-148a-44b9-9739-f11441835204, session_id: bf487ac1-bcd6-432a-ae01-13b6988c7e6a, gen: 10 }]
12041 {"Crucible Downstairs work queue:
12042 msg":"current number of open files limit 65536 is already the maximum"DSW:[1000] Write New deps:[]
12043 ,"v":DSW:[1001] EClose New deps:[JobId(1000)]
12044 0Done tasks []
12045 ,"last_flush: JobId(0)
12046 name":--------------------------------------
12047 "crucible","level":30Got new work: [JobId(1000), JobId(1001)]
12048 ,"time":"2023-09-22T23:14:26.180039179Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
120492023-09-22T23:14:26.180ZINFOcrucible: Opened existing region file "/tmp/.tmp2IKeMN/region.json"
120502023-09-22T23:14:26.180ZINFOcrucible: Database read version 1
120512023-09-22T23:14:26.180ZINFOcrucible: Database write version 1
120522023-09-22T23:14:26.180ZINFOcrucible: UpstairsConnection { upstairs_id: e9c958d8-148a-44b9-9739-f11441835204, session_id: bf487ac1-bcd6-432a-ae01-13b6988c7e6a, gen: 10 } is now active (read-write)
12053 test test::test_extent_write_close ... ok
120542023-09-22T23:14:26.183ZINFOcrucible: UUID: b58633dc-e777-4cbe-9ca4-68b1a51741e9
120552023-09-22T23:14:26.183ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12056 {"msg":"Active Upstairs connections: [UpstairsConnection { upstairs_id: d7339ef2-846e-45a9-97f5-26e0bae2d192, session_id: c28c0787-e835-4833-a63b-8e7c4ed91207, gen: 10 }]
12057 UpstairsConnection { upstairs_id: d7339ef2-846e-45a9-97f5-26e0bae2d192, session_id: c28c0787-e835-4833-a63b-8e7c4ed91207, gen: 10 } is now active (read-write)","v":0,"name":"Crucible Downstairs work queue:
12058 crucible","level":30DSW:[1000] Write New deps:[]
12059 DSW:[1001] EFClose New deps:[JobId(1000)]
12060 Done tasks []
12061 last_flush: JobId(0)
12062 --------------------------------------
12063 ,"time":"2023-09-22T23:14:26.184537609Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pidGot new work: [JobId(1000), JobId(1001)]
12064 ":4298}
120652023-09-22T23:14:26.184ZINFOcrucible: current number of open files limit 65536 is already the maximum
120662023-09-22T23:14:26.184ZINFOcrucible: Created new region file "/tmp/.tmpxzz7iE/region.json"
12067 test test::test_extent_write_flush_close ... ok
120682023-09-22T23:14:26.188ZINFOcrucible: current number of open files limit 65536 is already the maximum
120692023-09-22T23:14:26.188ZINFOcrucible: Created new region file "/tmp/.tmpse7v29/region.json"
120702023-09-22T23:14:26.191ZINFOcrucible: current number of open files limit 65536 is already the maximum
120712023-09-22T23:14:26.191ZINFOcrucible: Opened existing region file "/tmp/.tmpxzz7iE/region.json"
120722023-09-22T23:14:26.191ZINFOcrucible: Database read version 1
120732023-09-22T23:14:26.191ZINFOcrucible: Database write version 1
120742023-09-22T23:14:26.193ZINFOcrucible: UUID: 28bb9eac-6aa0-42de-b72b-6604429b5dd2
120752023-09-22T23:14:26.193ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12076 {Active Upstairs connections: [UpstairsConnection { upstairs_id: 77cb7963-731b-490c-9ce7-14fe9ddb89d1, session_id: c86ee0fc-1408-4020-bc81-8ca87eab267c, gen: 10 }]
12077 "msg":"Crucible Downstairs work queue:
12078 UpstairsConnection { upstairs_id: 77cb7963-731b-490c-9ce7-14fe9ddb89d1, session_id: c86ee0fc-1408-4020-bc81-8ca87eab267c, gen: 10 } is now active (read-write)","v":0,"name":"DSW:[1000] Write New deps:[]
12079 crucible"DSW:[1001] Flush New deps:[]
12080 ,"level"DSW:[1002] Write New deps:[JobId(1000), JobId(1001)]
12081 :30DSW:[1003] EClose New deps:[JobId(1000), JobId(1001), JobId(1002)]
12082 Done tasks []
12083 last_flush: JobId(0)
12084 --------------------------------------
12085 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
12086 ,"time":"2023-09-22T23:14:26.194515469Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
120872023-09-22T23:14:26.194ZINFOcrucible: current number of open files limit 65536 is already the maximum
120882023-09-22T23:14:26.194ZINFOcrucible: Opened existing region file "/tmp/.tmpse7v29/region.json"
120892023-09-22T23:14:26.194ZINFOcrucible: Database read version 1
120902023-09-22T23:14:26.195ZINFOcrucible: Database write version 1
12091 {"msg":"UUID: 808c8e16-3443-43e9-bf16-febf4fb4541f","v":0,"name":"crucible","level":30test test::test_extent_write_flush_write_close ... ok
12092 ,"time":"2023-09-22T23:14:26.198199678Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
120932023-09-22T23:14:26.198ZINFOcrucible: Blocks per extent:4 Total Extents: 5
120942023-09-22T23:14:26.198ZINFOcrucible: current number of open files limit 65536 is already the maximum
12095 Active Upstairs connections: [UpstairsConnection { upstairs_id: 3037f5d2-30e9-4f18-b4e1-7161671a03e0, session_id: 1988a056-8d38-43bd-b8fa-9545cfa696cc, gen: 10 }]
12096 Crucible Downstairs work queue:
12097 DSW:[1000] Write New deps:[]
12098 DSW:[1001] Write New deps:[]
12099 DSW:[1002] EFClose New deps:[JobId(1000)]
12100 DSW:[1003] EClose New deps:[JobId(1001)]
12101 Done tasks []
12102 last_flush: JobId(0)
12103 --------------------------------------
12104 Got new work: [JobId(1000), JobId(1001), JobId(1002), JobId(1003)]
121052023-09-22T23:14:26.198ZINFOcrucible: Created new region file "/tmp/.tmpVilIEa/region.json"
121062023-09-22T23:14:26.199ZINFOcrucible: UpstairsConnection { upstairs_id: 3037f5d2-30e9-4f18-b4e1-7161671a03e0, session_id: 1988a056-8d38-43bd-b8fa-9545cfa696cc, gen: 10 } is now active (read-write)
12107 test test::test_extent_write_write_flush_close ... ok
121082023-09-22T23:14:26.203ZINFOcrucible: current number of open files limit 65536 is already the maximum
121092023-09-22T23:14:26.203ZINFOcrucible: Created new region file "/tmp/.tmpxQvbkS/region.json"
121102023-09-22T23:14:26.204ZINFOcrucible: current number of open files limit 65536 is already the maximum
121112023-09-22T23:14:26.204ZINFOcrucible: Opened existing region file "/tmp/.tmpVilIEa/region.json"
121122023-09-22T23:14:26.204ZINFOcrucible: Database read version 1
121132023-09-22T23:14:26.204ZINFOcrucible: Database write version 1
121142023-09-22T23:14:26.204ZINFOcrucible: UUID: b66f61a5-b14a-4ce4-ba9b-6a4c98e2915e
121152023-09-22T23:14:26.204ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12116 test test::test_multiple_read_only_no_job_id_collision ... ok
121172023-09-22T23:14:26.207ZINFOcrucible: current number of open files limit 65536 is already the maximum
121182023-09-22T23:14:26.207ZINFOcrucible: Created new region file "/tmp/.tmpQXuMCi/region.json"
121192023-09-22T23:14:26.207ZINFOcrucible: current number of open files limit 65536 is already the maximum
121202023-09-22T23:14:26.208ZINFOcrucible: Opened existing region file "/tmp/.tmpxQvbkS/region.json"
121212023-09-22T23:14:26.208ZINFOcrucible: Database read version 1
121222023-09-22T23:14:26.208ZINFOcrucible: Database write version 1
121232023-09-22T23:14:26.208ZINFOcrucible: UUID: bcd6b1a8-f754-4069-b16b-256b9067e2d1
121242023-09-22T23:14:26.208ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12125 test test::test_promote_to_active_multi_read_only_different_uuid ... ok
121262023-09-22T23:14:26.211ZINFOcrucible: current number of open files limit 65536 is already the maximum
121272023-09-22T23:14:26.211ZINFOcrucible: Created new region file "/tmp/.tmpIHYD46/region.json"
121282023-09-22T23:14:26.211ZINFOcrucible: current number of open files limit 65536 is already the maximum
121292023-09-22T23:14:26.211ZINFOcrucible: Opened existing region file "/tmp/.tmpQXuMCi/region.json"
121302023-09-22T23:14:26.211ZINFOcrucible: Database read version 1
121312023-09-22T23:14:26.211ZINFOcrucible: Database write version 1
121322023-09-22T23:14:26.212ZINFOcrucible: UUID: 3adb2941-a761-4ec0-9c56-75f1fee90eab
121332023-09-22T23:14:26.212ZINFOcrucible: Blocks per extent:4 Total Extents: 2
121342023-09-22T23:14:26.213ZINFOcrucible: Signaling to UpstairsConnection { upstairs_id: 9ceb7609-1b50-43ef-b9d8-5723c7697fa1, session_id: d28eb097-f28a-423a-8e1e-3fae5a9e9ada, gen: 1 } thread that UpstairsConnection { upstairs_id: 9ceb7609-1b50-43ef-b9d8-5723c7697fa1, session_id: 9a2048e7-004c-4c26-8dd7-e00cb0be5dd2, gen: 1 } is being promoted (read-only)
12135 test test::test_promote_to_active_multi_read_only_same_uuid ... ok
121362023-09-22T23:14:26.215ZINFOcrucible: current number of open files limit 65536 is already the maximum
12137 {"msg":"Created new region file \"/tmp/.tmpDI35aR/region.json\"","v":0,"name":"crucible","level":30{"msg":","timecurrent number of open files limit 65536 is already the maximum"":","v2023-09-22T23:14:26.215563331Z"":,"0hostname":","name":"crucibleip-10-150-1-74.us-west-2.compute.internal"",","pid"level"::429830}
12138 ,"time":"2023-09-22T23:14:26.21559619Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
121392023-09-22T23:14:26.215ZINFOcrucible: Opened existing region file "/tmp/.tmpIHYD46/region.json"
121402023-09-22T23:14:26.215ZINFOcrucible: Database read version 1
121412023-09-22T23:14:26.215ZINFOcrucible: Database write version 1
121422023-09-22T23:14:26.216ZINFOcrucible: UUID: de931ed0-806c-4581-98d6-a0ba1b93aff8
121432023-09-22T23:14:26.216ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12144 ds1: MutexGuard { value: Downstairs { region: Region { dir: "/tmp/.tmpIHYD46", def: RegionDefinition { block_size: 512, extent_size: Block { value: 4, shift: 9 }, extent_count: 2, uuid: de931ed0-806c-4581-98d6-a0ba1b93aff8, encrypted: false, database_read_version: 1, database_write_version: 1 }, extents: [Mutex { data: Opened(Extent { number: 0, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 234 }, metadb: Connection { path: Some("/tmp/.tmpIHYD46/00/000/000.db") }, dirty_blocks: {} } } }) }, Mutex { data: Opened(Extent { number: 1, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 238 }, metadb: Connection { path: Some("/tmp/.tmpIHYD46/00/000/001.db") }, dirty_blocks: {} } } }) }], dirty_extents: {}, read_only: false, log: Logger() }, lossy: false, read_errors: false, write_errors: false, flush_errors: false, active_upstairs: {}, dss: DsStatOuter { ds_stat_wrap: Mutex { is_locked: false, has_waiters: false } }, read_only: false, encrypted: false, address: None, repair_address: None, log: Logger() }, mutex: Mutex { is_locked: true, has_waiters: false } }
12145 
12146 ds2: MutexGuard { value: Downstairs { region: Region { dir: "/tmp/.tmpIHYD46", def: RegionDefinition { block_size: 512, extent_size: Block { value: 4, shift: 9 }, extent_count: 2, uuid: de931ed0-806c-4581-98d6-a0ba1b93aff8, encrypted: false, database_read_version: 1, database_write_version: 1 }, extents: [Mutex { data: Opened(Extent { number: 0, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 234 }, metadb: Connection { path: Some("/tmp/.tmpIHYD46/00/000/000.db") }, dirty_blocks: {} } } }) }, Mutex { data: Opened(Extent { number: 1, read_only: false, block_size: 512, extent_size: Block { value: 4, shift: 9 }, iov_max: 1024, inner: Mutex { data: Inner { file: File { fd: 238 }, metadb: Connection { path: Some("/tmp/.tmpIHYD46/00/000/001.db") }, dirty_blocks: {} } } }) }], dirty_extents: {}, read_only: false, log: Logger() }, lossy: false, read_errors: false, write_errors: false, flush_errors: false, active_upstairs: {19701549-8c58-40ed-bb17-7c74ded37f68: ActiveUpstairs { upstairs_connection: UpstairsConnection { upstairs_id: 19701549-8c58-40ed-bb17-7c74ded37f68, session_id: 9db573ae-8d0b-42da-a5b8-c3f109f81491, gen: 2 }, work: Mutex { is_locked: false, has_waiters: false }, terminate_sender: Sender { chan: Tx { inner: Chan { tx: Tx { block_tail: 0x57d0050, tail_position: 0 }, semaphore: Semaphore { semaphore: Semaphore { permits: 1 }, bound: 1 }, rx_waker: AtomicWaker, tx_count: 1, rx_fields: "..." } } } }}, dss: DsStatOuter { ds_stat_wrap: Mutex { is_locked: false, has_waiters: false } }, read_only: false, encrypted: false, address: None, repair_address: None, log: Logger() }, mutex: Mutex { is_locked: true, has_waiters: false } }
12147 
121482023-09-22T23:14:26.217ZINFOcrucible: UpstairsConnection { upstairs_id: 19701549-8c58-40ed-bb17-7c74ded37f68, session_id: 9db573ae-8d0b-42da-a5b8-c3f109f81491, gen: 2 } is now active (read-write)
121492023-09-22T23:14:26.217ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 19701549-8c58-40ed-bb17-7c74ded37f68, session_id: 9db573ae-8d0b-42da-a5b8-c3f109f81491, gen: 2 } to UpstairsConnection { upstairs_id: 5e379a22-3c2f-4551-9fc9-ad3870755d7a, session_id: 97bba431-3401-427f-a7bd-ebb3b3269626, gen: 1 }
12150 test test::test_promote_to_active_multi_read_write_different_uuid_lower_gen ... ok
121512023-09-22T23:14:26.218ZINFOcrucible: current number of open files limit 65536 is already the maximum
121522023-09-22T23:14:26.219ZINFOcrucible: Opened existing region file "/tmp/.tmpDI35aR/region.json"
121532023-09-22T23:14:26.219ZINFOcrucible: Database read version 1
121542023-09-22T23:14:26.219ZINFOcrucible: Database write version 1
121552023-09-22T23:14:26.219ZINFOcrucible: current number of open files limit 65536 is already the maximum
121562023-09-22T23:14:26.219ZINFOcrucible: Created new region file "/tmp/.tmpfNTSSm/region.json"
121572023-09-22T23:14:26.220ZINFOcrucible: UUID: 52f7f69b-43c7-4f37-9439-1be23add535f
121582023-09-22T23:14:26.220ZINFOcrucible: Blocks per extent:4 Total Extents: 2
121592023-09-22T23:14:26.220ZINFOcrucible: UpstairsConnection { upstairs_id: e908b04f-d364-4854-bb49-6f0d5be2289f, session_id: 3f4bb7da-417a-4c11-9b21-bcda36aee8d2, gen: 1 } is now active (read-write)
121602023-09-22T23:14:26.220ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: e908b04f-d364-4854-bb49-6f0d5be2289f, session_id: 3f4bb7da-417a-4c11-9b21-bcda36aee8d2, gen: 1 } to UpstairsConnection { upstairs_id: 398953cd-7d91-4ad5-9bbc-a75405eababb, session_id: 81c7c98c-90e2-4ce3-a767-d7ce7df9ca4e, gen: 1 }
12161 Populated 11 extents by copying 51712 bytes (101 blocks)
12162 test test::test_promote_to_active_multi_read_write_different_uuid_same_gen ... ok
121632023-09-22T23:14:26.223ZINFOcrucible: current number of open files limit 65536 is already the maximum
121642023-09-22T23:14:26.223ZINFOcrucible: Created new region file "/tmp/.tmpcxko0X/region.json"
121652023-09-22T23:14:26.224ZINFOcrucible: current number of open files limit 65536 is already the maximum
121662023-09-22T23:14:26.224ZINFOcrucible: Opened existing region file "/tmp/.tmpfNTSSm/region.json"
121672023-09-22T23:14:26.224ZINFOcrucible: Database read version 1
121682023-09-22T23:14:26.224ZINFOcrucible: Database write version 1
121692023-09-22T23:14:26.225ZINFOcrucible: UUID: 803d56cc-604b-4a1e-914d-3094ef59ab32
12170 {"msg":"Blocks per extent:4 Total Extents: 2Populated 10 extents by copying 51200 bytes (100 blocks)
12171 ","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.225438036Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
12172 Export total_size: 56320 Extent size:5120 Total Extents:11
12173 Exporting from start_block: 0 count:101
121742023-09-22T23:14:26.225ZINFOcrucible: current number of open files limit 65536 is already the maximum
121752023-09-22T23:14:26.226ZINFOcrucible: Opened existing region file "/tmp/.tmpcxko0X/region.json"
121762023-09-22T23:14:26.226ZINFOcrucible: Database read version 1
121772023-09-22T23:14:26.226ZINFOcrucible: Database write version 1
12178 Populated 10 extents by copying 51200 bytes (100 blocks)
121792023-09-22T23:14:26.226ZINFOcrucible: UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 49b5d662-290a-41d8-a1f6-e053df67278f, gen: 1 } is now active (read-write)
121802023-09-22T23:14:26.226ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 49b5d662-290a-41d8-a1f6-e053df67278f, gen: 1 } to UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 42e0ccac-f200-45c4-84e8-d7825764fe9a, gen: 2 }
121812023-09-22T23:14:26.226ZWARNcrucible: Signaling to UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 49b5d662-290a-41d8-a1f6-e053df67278f, gen: 1 } thread that UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 42e0ccac-f200-45c4-84e8-d7825764fe9a, gen: 2 } is being promoted (read-write)
121822023-09-22T23:14:26.226ZINFOcrucible: UpstairsConnection { upstairs_id: 8b47ade1-a217-4690-9bae-35fa541c9998, session_id: 42e0ccac-f200-45c4-84e8-d7825764fe9a, gen: 2 } is now active (read-write)
12183 test test::test_promote_to_active_multi_read_write_same_uuid_larger_gen ... {ok
12184 "msg":"UUID: 4892109d-aab0-4aea-b071-15726cbd6530","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.227387872Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
121852023-09-22T23:14:26.227ZINFOcrucible: Blocks per extent:4 Total Extents: 2
121862023-09-22T23:14:26.227ZINFOcrucible: UpstairsConnection { upstairs_id: 73dae26e-1e29-42b5-9ed1-ce87c9338289, session_id: 782ed9a4-b5c3-4a6a-9a4e-ac45001d1939, gen: 1 } is now active (read-write)
121872023-09-22T23:14:26.227ZWARNcrucible: Attempting RW takeover from UpstairsConnection { upstairs_id: 73dae26e-1e29-42b5-9ed1-ce87c9338289, session_id: 782ed9a4-b5c3-4a6a-9a4e-ac45001d1939, gen: 1 } to UpstairsConnection { upstairs_id: 73dae26e-1e29-42b5-9ed1-ce87c9338289, session_id: 804a6b78-70a2-40e8-a86f-1ff5c54737d2, gen: 1 }
121882023-09-22T23:14:26.228ZINFOcrucible: current number of open files limit 65536 is already the maximum
121892023-09-22T23:14:26.228ZINFOcrucible: Created new region file "/tmp/.tmpar7POn/region.json"
12190 test test::test_promote_to_active_multi_read_write_same_uuid_same_gen ... ok
12191 Export total_size: 51200 Extent size:5120 Total Extents:10
12192 Exporting from start_block: 0 count:100
121932023-09-22T23:14:26.229ZINFOcrucible: current number of open files limit 65536 is already the maximum
121942023-09-22T23:14:26.229ZINFOcrucible: Created new region file "/tmp/.tmp24NmSA/region.json"
12195 {"msg":"current number of open files limit 65536 is already the maximum","v":0,"name":"crucible","level":30Populated 10 extents by copying 51200 bytes (100 blocks)
12196 ,"time":"2023-09-22T23:14:26.231319321Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
121972023-09-22T23:14:26.231ZINFOcrucible: Opened existing region file "/tmp/.tmpar7POn/region.json"
121982023-09-22T23:14:26.231ZINFOcrucible: Database read version 1
121992023-09-22T23:14:26.231ZINFOcrucible: Database write version 1
122002023-09-22T23:14:26.232ZINFOcrucible: UUID: 8a20b68b-256d-4dc5-a918-562bc0077aac
122012023-09-22T23:14:26.232ZINFOcrucible: Blocks per extent:4 Total Extents: 2
122022023-09-22T23:14:26.232ZINFOcrucible: current number of open files limit 65536 is already the maximum
122032023-09-22T23:14:26.232ZINFOcrucible: Opened existing region file "/tmp/.tmp24NmSA/region.json"
122042023-09-22T23:14:26.232ZINFOcrucible: Database read version 1
122052023-09-22T23:14:26.233ZINFOcrucible: Database write version 1
12206 test test::test_promote_to_active_one_read_only ... ok
122072023-09-22T23:14:26.234ZINFOcrucible: UUID: 54bbf592-cabd-40af-9fe7-2224ae97382e
122082023-09-22T23:14:26.234ZINFOcrucible: Blocks per extent:4 Total Extents: 2
12209 Export total_size: 51200 Extent size:5120 Total Extents:10
12210 Exporting from start_block: 0 count:100
122112023-09-22T23:14:26.234ZINFOcrucible: current number of open files limit 65536 is already the maximum
122122023-09-22T23:14:26.234ZINFOcrucible: Created new region file "/tmp/.tmpZVPXLF/region.json"
122132023-09-22T23:14:26.234ZINFOcrucible: UpstairsConnection { upstairs_id: 6a624d39-d4f6-4cc6-9590-d28983cf34a0, session_id: 22876128-f147-4ab2-bf2d-9b227d57490d, gen: 1 } is now active (read-write)
12214 test test::test_promote_to_active_one_read_write ... ok
12215 Read and wrote out 101 blocks
122162023-09-22T23:14:26.236ZINFOcrucible: current number of open files limit 65536 is already the maximum
122172023-09-22T23:14:26.236ZINFOcrucible: Created new region file "/tmp/.tmp31s0dl/region.json"
122182023-09-22T23:14:26.238ZINFOcrucible: current number of open files limit 65536 is already the maximum
122192023-09-22T23:14:26.239ZINFOcrucible: Opened existing region file "/tmp/.tmpZVPXLF/region.json"
122202023-09-22T23:14:26.239ZINFOcrucible: Database read version 1
122212023-09-22T23:14:26.239ZINFOcrucible: Database write version 1
122222023-09-22T23:14:26.241ZINFOcrucible: UUID: 74f443cd-4ab5-4618-8512-dc2ae037a0c8
12223 {"msg":"Blocks per extent:4 Total Extents: 2","v":0,"name":"crucible","level":30Active Upstairs connections: [UpstairsConnection { upstairs_id: b2f7a8d9-e891-4b8a-9d71-3b7f09ce387c, session_id: 7c0d32a6-2649-495c-bbdb-ce1c249a507e, gen: 10 }]
12224 ,"time":"2023-09-22T23:14:26.241274163Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"Crucible Downstairs work queue:
12225 :4298}
12226 {DSW:[1000] Read New deps:[]
12227 "msg":"DSW:[1001] Read New deps:[JobId(1000)]
12228 Done tasks []
12229 UpstairsConnection { upstairs_id: b2f7a8d9-e891-4b8a-9d71-3b7f09ce387c, session_id: 7c0d32a6-2649-495c-bbdb-ce1c249a507e, gen: 10 } is now active (read-write)"last_flush: JobId(0)
12230 ,--------------------------------------
12231 "v":0,"name":"crucible","level":30Got new work: [JobId(1000), JobId(1001)]
12232 ,"time":"2023-09-22T23:14:26.241328529Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",Do IOop 1000
12233 "pid":4298}
12234 Got m: ReadResponse { upstairs_id: b2f7a8d9-e891-4b8a-9d71-3b7f09ce387c, session_id: 7c0d32a6-2649-495c-bbdb-ce1c249a507e, job_id: JobId(1000), responses: Ok([ReadResponse { eid: 0, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12235 Read and wrote out 100 blocks
12236 Do IOop 1001
12237 Got m: ReadResponse { upstairs_id: b2f7a8d9-e891-4b8a-9d71-3b7f09ce387c, session_id: 7c0d32a6-2649-495c-bbdb-ce1c249a507e, job_id: JobId(1001), responses: Ok([ReadResponse { eid: 1, offset: Block { value: 1, shift: 9 }, data: b"\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", block_contexts: [] }]) }
12238 Active Upstairs connections: [UpstairsConnection { upstairs_id: b2f7a8d9-e891-4b8a-9d71-3b7f09ce387c, session_id: 7c0d32a6-2649-495c-bbdb-ce1c249a507e, gen: 10 }]
12239 Crucible Downstairs work queue: Empty
12240 Done tasks [JobId(1000), JobId(1001)]
12241 last_flush: JobId(0)
12242 --------------------------------------
12243 test test::import_test_too_large ... ok
122442023-09-22T23:14:26.242ZINFOcrucible: current number of open files limit 65536 is already the maximum
122452023-09-22T23:14:26.242ZINFOcrucible: Created new region file "/tmp/.tmpZP1sMk/region.json"
12246 test test::test_simple_read ... ok
122472023-09-22T23:14:26.244ZINFOcrucible: current number of open files limit 65536 is already the maximum
122482023-09-22T23:14:26.244ZINFOcrucible: Created new region file "/tmp/.tmpqubsKp/region.json"
122492023-09-22T23:14:26.245ZINFOcrucible: current number of open files limit 65536 is already the maximum
122502023-09-22T23:14:26.245ZINFOcrucible: Opened existing region file "/tmp/.tmp31s0dl/region.json"
122512023-09-22T23:14:26.245ZINFOcrucible: Database read version 1
122522023-09-22T23:14:26.245ZINFOcrucible: Database write version 1
12253 test test::import_test_basic ... ok
122542023-09-22T23:14:26.248ZINFOcrucible: current number of open files limit 65536 is already the maximum
122552023-09-22T23:14:26.248ZINFOcrucible: Created new region file "/tmp/.tmpFvOuef/region.json"
122562023-09-22T23:14:26.250ZINFOcrucible: UUID: 1607a550-5075-492a-b42f-150b765059f9
122572023-09-22T23:14:26.250ZINFOcrucible: Blocks per extent:4 Total Extents: 5
12258 test test::import_test_basic_read_blocks ... ok
122592023-09-22T23:14:26.251ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
122602023-09-22T23:14:26.251ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
122612023-09-22T23:14:26.251ZINFOcrucible: Using address: 127.0.0.1:5557 task = main
122622023-09-22T23:14:26.251ZINFOcrucible: current number of open files limit 65536 is already the maximum
122632023-09-22T23:14:26.251ZINFOcrucible: Created new region file "/tmp/.tmpFAaLjw/region.json"
122642023-09-22T23:14:26.251ZINFOcrucible: Repair listens on 127.0.0.1:5558 task = repair
122652023-09-22T23:14:26.252ZINFOcrucible: listening local_addr = 127.0.0.1:5558 task = repair
122662023-09-22T23:14:26.252ZINFOcrucible: Using repair address: 127.0.0.1:5558 task = main
122672023-09-22T23:14:26.252ZINFOcrucible: No SSL acceptor configured task = main
122682023-09-22T23:14:26.252ZINFOcrucible: current number of open files limit 65536 is already the maximum
122692023-09-22T23:14:26.252ZINFOcrucible: Opened existing region file "/tmp/.tmpZP1sMk/region.json"
122702023-09-22T23:14:26.252ZINFOcrucible: Database read version 1
122712023-09-22T23:14:26.252ZINFOcrucible: Database write version 1
122722023-09-22T23:14:26.253ZINFOcrucible: current number of open files limit 65536 is already the maximum
12273 {"msg":"Opened existing region file \"/tmp/.tmpqubsKp/region.json\"","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.253501449Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":{4298}
12274 "msg":"{listening on 127.0.0.1:5557"",msg":""v":Database read version 1"0,",v"":name0":,""crucible"name":","crucible"level",":level":3030,"time":"2023-09-22T23:14:26.253551767Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid"time:"4298:"}
12275 2023-09-22T23:14:26.253551923Z","hostname"{:""msg"ip-10-150-1-74.us-west-2.compute.internal":","pid":Database write version 1"4298,"v":,0","task"name":":"crucible"main","level"}:
12276 30,"time":"2023-09-22T23:14:26.253605261Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
122772023-09-22T23:14:26.254ZINFOcrucible: accepted connection from 127.0.0.1:49975 task = main
122782023-09-22T23:14:26.254ZINFOcrucible: Connection request from 935b1a66-7a6c-4fa1-8abe-908a1d7bfaac with version 3 task = proc
122792023-09-22T23:14:26.254ZERROcrucible: connection (127.0.0.1:49975) Exits with error: Required version 4, Or [3] got 3
122802023-09-22T23:14:26.254ZINFOcrucible: current number of open files limit 65536 is already the maximum
122812023-09-22T23:14:26.254ZINFOcrucible: Opened existing region file "/tmp/.tmpFvOuef/region.json"
122822023-09-22T23:14:26.254ZINFOcrucible: Database read version 1
122832023-09-22T23:14:26.254ZINFOcrucible: Database write version 1
12284 Read and wrote out 100 blocks
12285 test test::test_version_downrev ... ok
122862023-09-22T23:14:26.257ZINFOcrucible: UUID: e85e0099-05b3-4835-89a6-80c1c5211ec4
122872023-09-22T23:14:26.257ZINFOcrucible: Blocks per extent:4 Total Extents: 5
122882023-09-22T23:14:26.257ZINFOcrucible: current number of open files limit 65536 is already the maximum
122892023-09-22T23:14:26.257ZINFOcrucible: Opened existing region file "/tmp/.tmpFAaLjw/region.json"
122902023-09-22T23:14:26.257ZINFOcrucible: Database read version 1
122912023-09-22T23:14:26.257ZINFOcrucible: Database write version 1
122922023-09-22T23:14:26.258ZINFOcrucible: UUID: d7e10717-6bc5-4ee2-8774-7145f63de841
122932023-09-22T23:14:26.258ZINFOcrucible: Blocks per extent:4 Total Extents: 5
122942023-09-22T23:14:26.258ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
122952023-09-22T23:14:26.259ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
122962023-09-22T23:14:26.259ZINFOcrucible: Using address: 127.0.0.1:5555 task = main
122972023-09-22T23:14:26.259ZINFOcrucible: Repair listens on 127.0.0.1:5556 task = repair
122982023-09-22T23:14:26.259ZINFOcrucible: listening local_addr = 127.0.0.1:5556 task = repair
122992023-09-22T23:14:26.259ZINFOcrucible: Using repair address: 127.0.0.1:5556 task = main
123002023-09-22T23:14:26.259ZINFOcrucible: No SSL acceptor configured task = main
123012023-09-22T23:14:26.259ZINFOcrucible: listening on 127.0.0.1:5555 task = main
123022023-09-22T23:14:26.260ZINFOcrucible: accepted connection from 127.0.0.1:40010 task = main
123032023-09-22T23:14:26.260ZINFOcrucible: Connection request from f5694906-48f3-4395-8bea-4227cf12f387 with version 4 task = proc
12304 {{"msg":""msg":upstairs UpstairsConnection { upstairs_id: f5694906-48f3-4395-8bea-4227cf12f387, session_id: 406accdd-25fa-4bb6-b7ec-24d088ab7351, gen: 1 } connected, version 4"","v":0UUID: db42931f-715c-43d7-abac-4e896436b1b0","name":",crucible"",v"":level":300,"name":"crucible","level":30,"time":"2023-09-22T23:14:26.260362385Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298,"task":"proc"}
12305 ,"time":"2023-09-22T23:14:26.260368346Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298}
123062023-09-22T23:14:26.260ZINFOcrucible: Blocks per extent:4 Total Extents: 5
123072023-09-22T23:14:26.260ZWARNcrucible: 1002 job Read for connection UpstairsConnection { upstairs_id: 4d29a5db-9e90-4c9c-922a-08d1aebbce46, session_id: b4ff52ee-dd5c-407f-acf1-59120c5095cb, gen: 0 } waiting on 1 deps
123082023-09-22T23:14:26.261ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
123092023-09-22T23:14:26.261ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
123102023-09-22T23:14:26.261ZINFOcrucible: Using address: 127.0.0.1:5561 task = main
123112023-09-22T23:14:26.261ZINFOcrucible: Repair listens on 127.0.0.1:5562 task = repair
123122023-09-22T23:14:26.261ZINFOcrucible: listening local_addr = 127.0.0.1:5562 task = repair
12313 test test::import_test_too_small ... ok
123142023-09-22T23:14:26.261ZINFOcrucible: Using repair address: 127.0.0.1:5562 task = main
123152023-09-22T23:14:26.261ZINFOcrucible: No SSL acceptor configured task = main
12316 {"msg":"listening on 127.0.0.1:5561","v":0,"name":"crucible","level":{30"{msg":""msg":"Crucible Version: Crucible Version: 0.0.1\n,"2002 job Flush for connection UpstairsConnection { upstairs_id: 4d29a5db-9e90-4c9c-922a-08d1aebbce46, session_id: b4ff52ee-dd5c-407f-acf1-59120c5095cb, gen: 0 } waiting on 1 depstimeCommit SHA: ed48f294784d46ea7d4bb99336918b74358eca46"\n:""2023-09-22T23:14:26.262001644Z",","v"hostname"::"0,"name":ip-10-150-1-74.us-west-2.compute.internal"Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main,"\npid"":4298rustc: 1.70.0 stable x86_64-unknown-illumos\ncrucible",","taskCargo: x86_64-unknown-illumos Debug: true Opt level: 0"":level"","mainv""}:
12317 0,"name":"crucible","level":30:40,"time":"2023-09-22T23:14:26.262048284Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298,"task":"main","}
12318 time":"2023-09-22T23:14:26.262053724Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","Upstairs <-> Downstairs Message Version: 4"pid,""v"::0,"4298name":"crucible","level":}30
12319 ,"time":"2023-09-22T23:14:26.262099812Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298,"task":"main"}
123202023-09-22T23:14:26.262ZINFOcrucible: Using address: 127.0.0.1:5563 task = main
123212023-09-22T23:14:26.262ZINFOcrucible: Repair listens on 127.0.0.1:5564 task = repair
123222023-09-22T23:14:26.262ZINFOcrucible: listening local_addr = 127.0.0.1:5564 task = repair
12323 {"msg":"accepted connection from 127.0.0.1:36611","v":0,"name":"crucible","level":30{"msg":"Using repair address: 127.0.0.1:5564,""time":","v":2023-09-22T23:14:26.262570016Z"0,",name":""crucible"hostname":,""level":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4298,"task":"main"}
12324 ,"time":"2023-09-22T23:14:26.262603753Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4298,"task":"main"}
123252023-09-22T23:14:26.262ZINFOcrucible: No SSL acceptor configured task = main
123262023-09-22T23:14:26.262ZINFOcrucible: listening on 127.0.0.1:5563 task = main
123272023-09-22T23:14:26.262ZINFOcrucible: Connection request from 8917f57d-2ffe-4231-a9c4-639afb7355f1 with version 5 task = proc
123282023-09-22T23:14:26.263ZWARNcrucible: downstairs and upstairs using different but compatible versions, Upstairs is 5, but supports [4, 5], downstairs is 4 task = {proc
12329 "msg":"{accepted connection from 127.0.0.1:34699""msg":","v":0,"name":"crucible","level":30upstairs UpstairsConnection { upstairs_id: 8917f57d-2ffe-4231-a9c4-639afb7355f1, session_id: d4ae3f49-545e-4dc9-92b9-296413a08ae6, gen: 1 } connected, version 4","v":0,"name":"crucible","level":30,","time":time"":"2023-09-22T23:14:26.263067864Z"2023-09-22T23:14:26.263063015Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":42984298,",task"":"task":proc""main"}
12330 }
123312023-09-22T23:14:26.263ZINFOcrucible: Connection request from ab4ca4d2-5662-4514-b278-a597af86ed19 with version 8 task = proc
123322023-09-22T23:14:26.263ZWARNcrucible: downstairs and upstairs using different but compatible versions, Upstairs is 8, but supports [3, 4, 5], downstairs is 4 task = proc
123332023-09-22T23:14:26.263ZINFOcrucible: upstairs UpstairsConnection { upstairs_id: ab4ca4d2-5662-4514-b278-a597af86ed19, session_id: 023d6dc1-cb61-433b-973a-d4952af26fd5, gen: 1 } connected, version 4 task = proc
12334 test test::test_version_match ... ok
12335 test test::two_job_chains ... ok
12336 test test::unblock_job ... ok
123372023-09-22T23:14:26.264ZINFOcrucible: UUID: c9c30354-b358-4f32-b67e-4473d8e10b4f
123382023-09-22T23:14:26.264ZINFOcrucible: Blocks per extent:4 Total Extents: 5
123392023-09-22T23:14:26.266ZINFOcrucible: Crucible Version: Crucible Version: 0.0.1 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main rustc: 1.70.0 stable x86_64-unknown-illumos Cargo: x86_64-unknown-illumos Debug: true Opt level: 0 task = main
123402023-09-22T23:14:26.266ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4 task = main
123412023-09-22T23:14:26.266ZINFOcrucible: Using address: 127.0.0.1:5579 task = main
123422023-09-22T23:14:26.266ZINFOcrucible: Repair listens on 127.0.0.1:5560 task = repair
123432023-09-22T23:14:26.266ZINFOcrucible: listening local_addr = 127.0.0.1:5560 task = repair
123442023-09-22T23:14:26.267ZINFOcrucible: Using repair address: 127.0.0.1:5560 task = main
123452023-09-22T23:14:26.267ZINFOcrucible: No SSL acceptor configured task = main
12346 test test::test_version_uprev_compatable ... ok
123472023-09-22T23:14:26.267ZINFOcrucible: listening on 127.0.0.1:5579 task = main
123482023-09-22T23:14:26.267ZWARNcrucible: 1002 job Read for connection UpstairsConnection { upstairs_id: 22fba83c-ad11-4df7-879f-4755fbab107b, session_id: 09611d86-6d2c-4de4-a752-1ff1b5a5a940, gen: 0 } waiting on 1 deps
123492023-09-22T23:14:26.267ZWARNcrucible: 1002 job Read for connection UpstairsConnection { upstairs_id: 2b7ff4d2-06f8-47d4-9f52-149c4f0909af, session_id: cf5e0281-b491-4321-b274-e3a7d839621b, gen: 0 } waiting on 1 deps
123502023-09-22T23:14:26.267ZWARNcrucible: 1002 job Read for connection UpstairsConnection { upstairs_id: b8c2874c-4b76-449d-92ad-d2d522190d37, session_id: 724a3ccc-41c5-4120-9882-ab3be9385edc, gen: 0 } waiting on 1 deps
123512023-09-22T23:14:26.267ZINFOcrucible: accepted connection from 127.0.0.1:65411 task = main
123522023-09-22T23:14:26.267ZINFOcrucible: Connection request from 859b4fe6-13ef-4e36-b8ad-25ce7fbfbb3e with version 5 task = proc
123532023-09-22T23:14:26.268ZERROcrucible: connection (127.0.0.1:65411) Exits with error: Required version 4, Or [5] got 5
12354 test test::test_version_uprev_list ... ok
12355 test test::unblock_job_chain ... ok
12356 test test::unblock_job_chain_first_is_flush ... ok
12357 test test::unblock_job_chain_second_is_flush ... ok
12358 test test::test_version_uprev_only ... ok
12359 test test::you_had_one_job ... ok
123602023-09-22T23:14:26.270ZWARNcrucible: 1002 job Flush for connection UpstairsConnection { upstairs_id: 3cd5bddc-7b50-446f-b9b5-925e8378021b, session_id: 58226fae-67bc-48ad-ad12-e9aee903862e, gen: 0 } waiting on 1 deps
12361 test test::unblock_job_upstairs_sends_big_deps ... ok
12362 test region::test::test_flush_after_multiple_disjoint_writes ... ok
12363 test region::test::test_big_extent_full_write_and_flush ... ok
12364 
12365 test result: ok. 156 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 1.50s
12366 
12367 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_downstairs-3ed9735920c1592d --nocapture`
12368 
12369 running 0 tests
12370 
12371 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
12372 
12373 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_hammer-9622fb9be260fb45 --nocapture`
12374 
12375 running 0 tests
12376 
12377 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
12378 
12379 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_integration_tests-8902d603847d3610 --nocapture`
12380 
12381 running 57 tests
12382 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12383 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12384 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12385 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12386 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12387 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12388 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12389 Sep 22 23:14:27.540 INFO current number of open files limit 65536 is already the maximum
12390 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-4I4s6nIW/region.json"
12391 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-ibnrqPCO/region.json"
12392 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-oCVv3Vu2/region.json"
12393 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-mVeBUKlC/region.json"
12394 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-7lqwXVde/region.json"
12395 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-ouxUBI5R/region.json"
12396 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-uVlKmCnF/region.json"
12397 Sep 22 23:14:27.544 INFO Created new region file "/tmp/downstairs-iZvqbKa5/region.json"
12398 Sep 22 23:14:27.556 INFO current number of open files limit 65536 is already the maximum
12399 Sep 22 23:14:27.556 INFO Opened existing region file "/tmp/downstairs-ouxUBI5R/region.json"
12400 Sep 22 23:14:27.556 INFO Database read version 1
12401 Sep 22 23:14:27.556 INFO Database write version 1
12402 Sep 22 23:14:27.558 INFO current number of open files limit 65536 is already the maximum
12403 Sep 22 23:14:27.558 INFO Opened existing region file "/tmp/downstairs-ibnrqPCO/region.json"
12404 Sep 22 23:14:27.558 INFO Database read version 1
12405 Sep 22 23:14:27.558 INFO Database write version 1
12406 Sep 22 23:14:27.558 INFO current number of open files limit 65536 is already the maximum
12407 Sep 22 23:14:27.558 INFO Opened existing region file "/tmp/downstairs-uVlKmCnF/region.json"
12408 Sep 22 23:14:27.558 INFO Database read version 1
12409 Sep 22 23:14:27.558 INFO Database write version 1
12410 Sep 22 23:14:27.558 INFO current number of open files limit 65536 is already the maximum
12411 Sep 22 23:14:27.558 INFO Opened existing region file "/tmp/downstairs-7lqwXVde/region.json"
12412 Sep 22 23:14:27.558 INFO Database read version 1
12413 Sep 22 23:14:27.558 INFO Database write version 1
12414 Sep 22 23:14:27.559 INFO current number of open files limit 65536 is already the maximum
12415 Sep 22 23:14:27.559 INFO current number of open files limit 65536 is already the maximum
12416 Sep 22 23:14:27.559 INFO Opened existing region file "/tmp/downstairs-iZvqbKa5/region.json"
12417 Sep 22 23:14:27.559 INFO Opened existing region file "/tmp/downstairs-mVeBUKlC/region.json"
12418 Sep 22 23:14:27.559 INFO Database read version 1
12419 Sep 22 23:14:27.559 INFO Database read version 1
12420 Sep 22 23:14:27.559 INFO Database write version 1
12421 Sep 22 23:14:27.559 INFO Database write version 1
12422 Sep 22 23:14:27.560 INFO current number of open files limit 65536 is already the maximum
12423 Sep 22 23:14:27.560 INFO Opened existing region file "/tmp/downstairs-oCVv3Vu2/region.json"
12424 Sep 22 23:14:27.560 INFO Database read version 1
12425 Sep 22 23:14:27.560 INFO Database write version 1
12426 Sep 22 23:14:27.563 INFO current number of open files limit 65536 is already the maximum
12427 Sep 22 23:14:27.563 INFO Opened existing region file "/tmp/downstairs-4I4s6nIW/region.json"
12428 Sep 22 23:14:27.563 INFO Database read version 1
12429 Sep 22 23:14:27.563 INFO Database write version 1
12430 Sep 22 23:14:27.563 INFO UUID: d2285a39-bf6a-4487-9493-8622023b1e4b
12431 Sep 22 23:14:27.563 INFO Blocks per extent:5 Total Extents: 2
12432 Sep 22 23:14:27.564 INFO Crucible Version: Crucible Version: 0.0.1
12433 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12434 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12435 rustc: 1.70.0 stable x86_64-unknown-illumos
12436 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12437 Sep 22 23:14:27.564 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12438 Sep 22 23:14:27.564 INFO Using address: 127.0.0.1:32776, task: main
12439 Sep 22 23:14:27.564 INFO UUID: b63c1426-990b-43e7-baa9-a926ab8df052
12440 Sep 22 23:14:27.564 INFO Blocks per extent:5 Total Extents: 2
12441 Sep 22 23:14:27.564 INFO Crucible Version: Crucible Version: 0.0.1
12442 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12443 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12444 rustc: 1.70.0 stable x86_64-unknown-illumos
12445 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12446 Sep 22 23:14:27.564 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12447 Sep 22 23:14:27.564 INFO Using address: 127.0.0.1:45897, task: main
12448 Sep 22 23:14:27.565 INFO UUID: 6b339ff0-fd62-4d3d-96a2-00a3cd97fb5c
12449 Sep 22 23:14:27.565 INFO Blocks per extent:5 Total Extents: 2
12450 Sep 22 23:14:27.565 INFO Crucible Version: Crucible Version: 0.0.1
12451 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12452 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12453 rustc: 1.70.0 stable x86_64-unknown-illumos
12454 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12455 Sep 22 23:14:27.565 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12456 Sep 22 23:14:27.565 INFO Using address: 127.0.0.1:43186, task: main
12457 Sep 22 23:14:27.566 INFO UUID: daca29a6-1615-4360-b286-c9a82fbfcd3d
12458 Sep 22 23:14:27.567 INFO Blocks per extent:5 Total Extents: 2
12459 Sep 22 23:14:27.567 INFO UUID: 97377133-8bb1-47f5-a2f0-3b28d74593db
12460 Sep 22 23:14:27.567 INFO Blocks per extent:5 Total Extents: 2
12461 Sep 22 23:14:27.567 INFO Crucible Version: Crucible Version: 0.0.1
12462 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12463 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12464 rustc: 1.70.0 stable x86_64-unknown-illumos
12465 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12466 Sep 22 23:14:27.567 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12467 Sep 22 23:14:27.567 INFO Using address: 127.0.0.1:33768, task: main
12468 Sep 22 23:14:27.567 INFO Crucible Version: Crucible Version: 0.0.1
12469 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12470 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12471 rustc: 1.70.0 stable x86_64-unknown-illumos
12472 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12473 Sep 22 23:14:27.567 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12474 Sep 22 23:14:27.567 INFO Using address: 127.0.0.1:58086, task: main
12475 Sep 22 23:14:27.567 INFO UUID: 9008985f-4d59-43db-b559-a48d3978dfda
12476 Sep 22 23:14:27.567 INFO Blocks per extent:5 Total Extents: 2
12477 Sep 22 23:14:27.567 INFO Crucible Version: Crucible Version: 0.0.1
12478 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12479 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12480 rustc: 1.70.0 stable x86_64-unknown-illumos
12481 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12482 Sep 22 23:14:27.567 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12483 Sep 22 23:14:27.567 INFO Using address: 127.0.0.1:36835, task: main
12484 Sep 22 23:14:27.567 INFO UUID: 122e249d-ed33-49c6-bb2e-b79a59ebf9a6
12485 Sep 22 23:14:27.567 INFO Blocks per extent:5 Total Extents: 2
12486 Sep 22 23:14:27.567 INFO Crucible Version: Crucible Version: 0.0.1
12487 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12488 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12489 rustc: 1.70.0 stable x86_64-unknown-illumos
12490 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12491 Sep 22 23:14:27.567 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12492 Sep 22 23:14:27.567 INFO Using address: 127.0.0.1:42970, task: main
12493 Sep 22 23:14:27.567 INFO UUID: 35ccb45b-0c22-4a14-bf83-50fbfaffa7a6
12494 Sep 22 23:14:27.568 INFO Blocks per extent:5 Total Extents: 2
12495 Sep 22 23:14:27.568 INFO Crucible Version: Crucible Version: 0.0.1
12496 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12497 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12498 rustc: 1.70.0 stable x86_64-unknown-illumos
12499 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12500 Sep 22 23:14:27.568 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12501 Sep 22 23:14:27.568 INFO Using address: 127.0.0.1:56524, task: main
12502 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12503 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12504 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12505 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12506 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12507 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12508 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12509 Sep 22 23:14:27.568 INFO Repair listens on 127.0.0.1:0, task: repair
12510 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48710, task: repair
12511 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37143, task: repair
12512 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38525, task: repair
12513 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42417, task: repair
12514 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50356, task: repair
12515 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55024, task: repair
12516 Sep 22 23:14:27.570 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39068, task: repair
12517 Sep 22 23:14:27.571 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44953, task: repair
12518 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50356, task: repair
12519 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37143, task: repair
12520 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48710, task: repair
12521 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38525, task: repair
12522 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42417, task: repair
12523 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55024, task: repair
12524 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39068, task: repair
12525 Sep 22 23:14:27.571 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44953, task: repair
12526 Sep 22 23:14:27.571 INFO listening, local_addr: 127.0.0.1:44953, task: repair
12527 Sep 22 23:14:27.571 INFO listening, local_addr: 127.0.0.1:37143, task: repair
12528 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:42417, task: repair
12529 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:50356, task: repair
12530 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:48710, task: repair
12531 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:39068, task: repair
12532 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:55024, task: repair
12533 Sep 22 23:14:27.572 INFO listening, local_addr: 127.0.0.1:38525, task: repair
12534 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39068, task: repair
12535 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38525, task: repair
12536 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:39068, task: main
12537 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12538 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:38525, task: main
12539 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12540 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48710, task: repair
12541 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:48710, task: main
12542 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12543 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55024, task: repair
12544 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:55024, task: main
12545 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12546 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42417, task: repair
12547 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:42417, task: main
12548 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12549 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50356, task: repair
12550 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:50356, task: main
12551 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12552 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12553 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12554 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12555 Sep 22 23:14:27.577 INFO Created new region file "/tmp/downstairs-i6X8vigb/region.json"
12556 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12557 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37143, task: repair
12558 Sep 22 23:14:27.577 INFO Created new region file "/tmp/downstairs-Zh66PBhN/region.json"
12559 Sep 22 23:14:27.577 INFO Created new region file "/tmp/downstairs-WtiCXF2Q/region.json"
12560 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:37143, task: main
12561 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12562 Sep 22 23:14:27.577 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44953, task: repair
12563 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12564 Sep 22 23:14:27.577 INFO Created new region file "/tmp/downstairs-G9ZJdv0G/region.json"
12565 Sep 22 23:14:27.577 INFO Using repair address: 127.0.0.1:44953, task: main
12566 Sep 22 23:14:27.577 INFO current number of open files limit 65536 is already the maximum
12567 Sep 22 23:14:27.577 INFO No SSL acceptor configured, task: main
12568 Sep 22 23:14:27.577 INFO Created new region file "/tmp/downstairs-oHRV06JF/region.json"
12569 Sep 22 23:14:27.578 INFO Created new region file "/tmp/downstairs-iuS0VtCf/region.json"
12570 Sep 22 23:14:27.578 INFO current number of open files limit 65536 is already the maximum
12571 Sep 22 23:14:27.578 INFO Created new region file "/tmp/downstairs-FuV2Esuv/region.json"
12572 Sep 22 23:14:27.578 INFO current number of open files limit 65536 is already the maximum
12573 Sep 22 23:14:27.578 INFO Created new region file "/tmp/downstairs-PnHwI8Ne/region.json"
12574 Sep 22 23:14:27.581 INFO current number of open files limit 65536 is already the maximum
12575 Sep 22 23:14:27.581 INFO Opened existing region file "/tmp/downstairs-i6X8vigb/region.json"
12576 Sep 22 23:14:27.581 INFO Database read version 1
12577 Sep 22 23:14:27.581 INFO Database write version 1
12578 Sep 22 23:14:27.582 INFO current number of open files limit 65536 is already the maximum
12579 Sep 22 23:14:27.582 INFO Opened existing region file "/tmp/downstairs-WtiCXF2Q/region.json"
12580 Sep 22 23:14:27.582 INFO Database read version 1
12581 Sep 22 23:14:27.582 INFO Database write version 1
12582 Sep 22 23:14:27.582 INFO current number of open files limit 65536 is already the maximum
12583 Sep 22 23:14:27.582 INFO Opened existing region file "/tmp/downstairs-Zh66PBhN/region.json"
12584 Sep 22 23:14:27.582 INFO Database read version 1
12585 Sep 22 23:14:27.582 INFO Database write version 1
12586 Sep 22 23:14:27.583 INFO current number of open files limit 65536 is already the maximum
12587 Sep 22 23:14:27.583 INFO Opened existing region file "/tmp/downstairs-G9ZJdv0G/region.json"
12588 Sep 22 23:14:27.583 INFO Database read version 1
12589 Sep 22 23:14:27.583 INFO Database write version 1
12590 Sep 22 23:14:27.583 INFO current number of open files limit 65536 is already the maximum
12591 Sep 22 23:14:27.583 INFO Opened existing region file "/tmp/downstairs-oHRV06JF/region.json"
12592 Sep 22 23:14:27.583 INFO Database read version 1
12593 Sep 22 23:14:27.583 INFO Database write version 1
12594 Sep 22 23:14:27.584 INFO current number of open files limit 65536 is already the maximum
12595 Sep 22 23:14:27.584 INFO Opened existing region file "/tmp/downstairs-iuS0VtCf/region.json"
12596 Sep 22 23:14:27.584 INFO Database read version 1
12597 Sep 22 23:14:27.584 INFO Database write version 1
12598 Sep 22 23:14:27.585 INFO UUID: fe394f4d-b22e-45c5-ad25-c835a8779c91
12599 Sep 22 23:14:27.585 INFO Blocks per extent:5 Total Extents: 2
12600 Sep 22 23:14:27.585 INFO Crucible Version: Crucible Version: 0.0.1
12601 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12602 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12603 rustc: 1.70.0 stable x86_64-unknown-illumos
12604 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12605 Sep 22 23:14:27.585 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12606 Sep 22 23:14:27.585 INFO Using address: 127.0.0.1:33784, task: main
12607 Sep 22 23:14:27.586 INFO Repair listens on 127.0.0.1:0, task: repair
12608 Sep 22 23:14:27.586 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58243, task: repair
12609 Sep 22 23:14:27.586 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58243, task: repair
12610 Sep 22 23:14:27.586 INFO listening, local_addr: 127.0.0.1:58243, task: repair
12611 Sep 22 23:14:27.586 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58243, task: repair
12612 Sep 22 23:14:27.586 INFO current number of open files limit 65536 is already the maximum
12613 Sep 22 23:14:27.586 INFO Opened existing region file "/tmp/downstairs-FuV2Esuv/region.json"
12614 Sep 22 23:14:27.586 INFO Using repair address: 127.0.0.1:58243, task: main
12615 Sep 22 23:14:27.586 INFO Database read version 1
12616 Sep 22 23:14:27.586 INFO No SSL acceptor configured, task: main
12617 Sep 22 23:14:27.586 INFO Database write version 1
12618 Sep 22 23:14:27.586 INFO UUID: 54deed7d-517b-47d9-a428-2bbdc0729fba
12619 Sep 22 23:14:27.586 INFO Blocks per extent:5 Total Extents: 2
12620 Sep 22 23:14:27.586 INFO Crucible Version: Crucible Version: 0.0.1
12621 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12622 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12623 rustc: 1.70.0 stable x86_64-unknown-illumos
12624 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12625 Sep 22 23:14:27.586 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12626 Sep 22 23:14:27.586 INFO Using address: 127.0.0.1:52116, task: main
12627 Sep 22 23:14:27.587 INFO UUID: 80932238-df83-4f6e-96fc-9d0aac0f8ced
12628 Sep 22 23:14:27.587 INFO Blocks per extent:5 Total Extents: 2
12629 Sep 22 23:14:27.587 INFO Crucible Version: Crucible Version: 0.0.1
12630 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12631 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12632 rustc: 1.70.0 stable x86_64-unknown-illumos
12633 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12634 Sep 22 23:14:27.587 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12635 Sep 22 23:14:27.587 INFO Using address: 127.0.0.1:54745, task: main
12636 Sep 22 23:14:27.587 INFO Repair listens on 127.0.0.1:0, task: repair
12637 Sep 22 23:14:27.587 INFO UUID: 594f2637-eae4-459c-bc2e-92b501adec4b
12638 Sep 22 23:14:27.587 INFO Blocks per extent:5 Total Extents: 2
12639 Sep 22 23:14:27.587 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47921, task: repair
12640 Sep 22 23:14:27.587 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47921, task: repair
12641 Sep 22 23:14:27.587 INFO Crucible Version: Crucible Version: 0.0.1
12642 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12643 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12644 rustc: 1.70.0 stable x86_64-unknown-illumos
12645 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12646 Sep 22 23:14:27.587 INFO listening, local_addr: 127.0.0.1:47921, task: repair
12647 Sep 22 23:14:27.587 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12648 Sep 22 23:14:27.587 INFO Repair listens on 127.0.0.1:0, task: repair
12649 Sep 22 23:14:27.587 INFO Using address: 127.0.0.1:50563, task: main
12650 Sep 22 23:14:27.587 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53000, task: repair
12651 Sep 22 23:14:27.587 INFO current number of open files limit 65536 is already the maximum
12652 Sep 22 23:14:27.587 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53000, task: repair
12653 Sep 22 23:14:27.587 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47921, task: repair
12654 Sep 22 23:14:27.587 INFO UUID: 1a8f9cb4-4a64-4013-8234-292244fb0112
12655 Sep 22 23:14:27.587 INFO Blocks per extent:5 Total Extents: 2
12656 Sep 22 23:14:27.587 INFO Using repair address: 127.0.0.1:47921, task: main
12657 Sep 22 23:14:27.587 INFO listening, local_addr: 127.0.0.1:53000, task: repair
12658 Sep 22 23:14:27.587 INFO No SSL acceptor configured, task: main
12659 Sep 22 23:14:27.587 INFO Created new region file "/tmp/downstairs-LpKqZye7/region.json"
12660 Sep 22 23:14:27.587 INFO Crucible Version: Crucible Version: 0.0.1
12661 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12662 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12663 rustc: 1.70.0 stable x86_64-unknown-illumos
12664 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12665 Sep 22 23:14:27.587 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12666 Sep 22 23:14:27.587 INFO Using address: 127.0.0.1:43400, task: main
12667 Sep 22 23:14:27.587 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53000, task: repair
12668 Sep 22 23:14:27.587 INFO Repair listens on 127.0.0.1:0, task: repair
12669 Sep 22 23:14:27.587 INFO Using repair address: 127.0.0.1:53000, task: main
12670 Sep 22 23:14:27.588 INFO No SSL acceptor configured, task: main
12671 Sep 22 23:14:27.588 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62168, task: repair
12672 Sep 22 23:14:27.588 INFO current number of open files limit 65536 is already the maximum
12673 Sep 22 23:14:27.588 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62168, task: repair
12674 Sep 22 23:14:27.588 INFO listening, local_addr: 127.0.0.1:62168, task: repair
12675 Sep 22 23:14:27.588 INFO Created new region file "/tmp/downstairs-U8m0aCFa/region.json"
12676 Sep 22 23:14:27.588 INFO Repair listens on 127.0.0.1:0, task: repair
12677 Sep 22 23:14:27.588 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62168, task: repair
12678 Sep 22 23:14:27.588 INFO Using repair address: 127.0.0.1:62168, task: main
12679 Sep 22 23:14:27.588 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60847, task: repair
12680 Sep 22 23:14:27.588 INFO No SSL acceptor configured, task: main
12681 Sep 22 23:14:27.588 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60847, task: repair
12682 Sep 22 23:14:27.588 INFO listening, local_addr: 127.0.0.1:60847, task: repair
12683 Sep 22 23:14:27.588 INFO UUID: 9191f668-24b7-4176-b853-43cf34e8ccd3
12684 Sep 22 23:14:27.588 INFO Blocks per extent:5 Total Extents: 2
12685 Sep 22 23:14:27.588 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60847, task: repair
12686 Sep 22 23:14:27.588 INFO Using repair address: 127.0.0.1:60847, task: main
12687 Sep 22 23:14:27.588 INFO No SSL acceptor configured, task: main
12688 Sep 22 23:14:27.588 INFO Crucible Version: Crucible Version: 0.0.1
12689 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12690 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12691 rustc: 1.70.0 stable x86_64-unknown-illumos
12692 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12693 Sep 22 23:14:27.588 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12694 Sep 22 23:14:27.588 INFO Using address: 127.0.0.1:62060, task: main
12695 Sep 22 23:14:27.588 INFO current number of open files limit 65536 is already the maximum
12696 Sep 22 23:14:27.588 INFO Created new region file "/tmp/downstairs-WYIwOmtG/region.json"
12697 Sep 22 23:14:27.588 INFO UUID: 45f3b954-a0cc-44a1-92c8-91ac1da9b5f8
12698 Sep 22 23:14:27.589 INFO current number of open files limit 65536 is already the maximum
12699 Sep 22 23:14:27.589 INFO Blocks per extent:5 Total Extents: 2
12700 Sep 22 23:14:27.589 INFO Repair listens on 127.0.0.1:0, task: repair
12701 Sep 22 23:14:27.589 INFO current number of open files limit 65536 is already the maximum
12702 Sep 22 23:14:27.589 INFO Created new region file "/tmp/downstairs-JIyoAurl/region.json"
12703 Sep 22 23:14:27.589 INFO current number of open files limit 65536 is already the maximum
12704 Sep 22 23:14:27.589 INFO Crucible Version: Crucible Version: 0.0.1
12705 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12706 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12707 rustc: 1.70.0 stable x86_64-unknown-illumos
12708 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12709 Sep 22 23:14:27.589 INFO Opened existing region file "/tmp/downstairs-PnHwI8Ne/region.json"
12710 Sep 22 23:14:27.589 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12711 Sep 22 23:14:27.589 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55098, task: repair
12712 Sep 22 23:14:27.589 INFO Database read version 1
12713 Sep 22 23:14:27.589 INFO Using address: 127.0.0.1:46381, task: main
12714 Sep 22 23:14:27.589 INFO Database write version 1
12715 Sep 22 23:14:27.589 INFO Created new region file "/tmp/downstairs-wiRB3PPR/region.json"
12716 Sep 22 23:14:27.589 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55098, task: repair
12717 Sep 22 23:14:27.589 INFO listening, local_addr: 127.0.0.1:55098, task: repair
12718 Sep 22 23:14:27.589 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55098, task: repair
12719 Sep 22 23:14:27.589 INFO Using repair address: 127.0.0.1:55098, task: main
12720 Sep 22 23:14:27.589 INFO Repair listens on 127.0.0.1:0, task: repair
12721 Sep 22 23:14:27.589 INFO No SSL acceptor configured, task: main
12722 Sep 22 23:14:27.589 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65411, task: repair
12723 Sep 22 23:14:27.589 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65411, task: repair
12724 Sep 22 23:14:27.589 INFO listening, local_addr: 127.0.0.1:65411, task: repair
12725 Sep 22 23:14:27.589 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65411, task: repair
12726 Sep 22 23:14:27.589 INFO Using repair address: 127.0.0.1:65411, task: main
12727 Sep 22 23:14:27.589 INFO No SSL acceptor configured, task: main
12728 Sep 22 23:14:27.590 INFO current number of open files limit 65536 is already the maximum
12729 Sep 22 23:14:27.590 INFO Created new region file "/tmp/downstairs-VWuvNJ4J/region.json"
12730 Sep 22 23:14:27.590 INFO current number of open files limit 65536 is already the maximum
12731 Sep 22 23:14:27.590 INFO Created new region file "/tmp/downstairs-LdYRdLcv/region.json"
12732 Sep 22 23:14:27.592 INFO current number of open files limit 65536 is already the maximum
12733 Sep 22 23:14:27.592 INFO Opened existing region file "/tmp/downstairs-LpKqZye7/region.json"
12734 Sep 22 23:14:27.592 INFO Database read version 1
12735 Sep 22 23:14:27.592 INFO Database write version 1
12736 Sep 22 23:14:27.592 INFO UUID: 53cd4d63-78de-43d1-b2db-bd20d98528e7
12737 Sep 22 23:14:27.592 INFO current number of open files limit 65536 is already the maximum
12738 Sep 22 23:14:27.592 INFO Blocks per extent:5 Total Extents: 2
12739 Sep 22 23:14:27.592 INFO Opened existing region file "/tmp/downstairs-U8m0aCFa/region.json"
12740 Sep 22 23:14:27.592 INFO Database read version 1
12741 Sep 22 23:14:27.592 INFO Database write version 1
12742 Sep 22 23:14:27.592 INFO Crucible Version: Crucible Version: 0.0.1
12743 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12744 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12745 rustc: 1.70.0 stable x86_64-unknown-illumos
12746 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12747 Sep 22 23:14:27.592 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12748 Sep 22 23:14:27.592 INFO Using address: 127.0.0.1:50371, task: main
12749 Sep 22 23:14:27.593 INFO Repair listens on 127.0.0.1:0, task: repair
12750 Sep 22 23:14:27.593 INFO current number of open files limit 65536 is already the maximum
12751 Sep 22 23:14:27.593 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53985, task: repair
12752 Sep 22 23:14:27.593 INFO Opened existing region file "/tmp/downstairs-WYIwOmtG/region.json"
12753 Sep 22 23:14:27.593 INFO Database read version 1
12754 Sep 22 23:14:27.593 INFO Database write version 1
12755 Sep 22 23:14:27.593 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53985, task: repair
12756 Sep 22 23:14:27.593 INFO listening, local_addr: 127.0.0.1:53985, task: repair
12757 Sep 22 23:14:27.593 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53985, task: repair
12758 Sep 22 23:14:27.593 INFO Using repair address: 127.0.0.1:53985, task: main
12759 Sep 22 23:14:27.593 INFO No SSL acceptor configured, task: main
12760 Sep 22 23:14:27.594 INFO current number of open files limit 65536 is already the maximum
12761 Sep 22 23:14:27.594 INFO Opened existing region file "/tmp/downstairs-wiRB3PPR/region.json"
12762 Sep 22 23:14:27.594 INFO Database read version 1
12763 Sep 22 23:14:27.594 INFO current number of open files limit 65536 is already the maximum
12764 Sep 22 23:14:27.594 INFO Database write version 1
12765 Sep 22 23:14:27.594 INFO Opened existing region file "/tmp/downstairs-JIyoAurl/region.json"
12766 Sep 22 23:14:27.594 INFO Database read version 1
12767 Sep 22 23:14:27.594 INFO Database write version 1
12768 Sep 22 23:14:27.597 INFO UUID: 4bb2c32c-5001-4960-9cd0-22694b9c6a8c
12769 Sep 22 23:14:27.597 INFO Blocks per extent:5 Total Extents: 2
12770 Sep 22 23:14:27.597 INFO Crucible Version: Crucible Version: 0.0.1
12771 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12772 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12773 rustc: 1.70.0 stable x86_64-unknown-illumos
12774 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12775 Sep 22 23:14:27.597 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12776 Sep 22 23:14:27.597 INFO Using address: 127.0.0.1:39493, task: main
12777 Sep 22 23:14:27.597 INFO current number of open files limit 65536 is already the maximum
12778 Sep 22 23:14:27.597 INFO UUID: 67caa349-8834-4aba-8026-7fdaaae2f119
12779 Sep 22 23:14:27.597 INFO Blocks per extent:5 Total Extents: 2
12780 Sep 22 23:14:27.597 INFO Created new region file "/tmp/downstairs-N9ygFYLS/region.json"
12781 Sep 22 23:14:27.597 INFO Crucible Version: Crucible Version: 0.0.1
12782 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12783 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12784 rustc: 1.70.0 stable x86_64-unknown-illumos
12785 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12786 Sep 22 23:14:27.597 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12787 Sep 22 23:14:27.597 INFO Using address: 127.0.0.1:56258, task: main
12788 Sep 22 23:14:27.598 INFO Repair listens on 127.0.0.1:0, task: repair
12789 Sep 22 23:14:27.598 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51273, task: repair
12790 Sep 22 23:14:27.598 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51273, task: repair
12791 Sep 22 23:14:27.598 INFO listening, local_addr: 127.0.0.1:51273, task: repair
12792 Sep 22 23:14:27.598 INFO current number of open files limit 65536 is already the maximum
12793 Sep 22 23:14:27.598 INFO Opened existing region file "/tmp/downstairs-VWuvNJ4J/region.json"
12794 Sep 22 23:14:27.598 INFO Database read version 1
12795 Sep 22 23:14:27.598 INFO Database write version 1
12796 Sep 22 23:14:27.598 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51273, task: repair
12797 Sep 22 23:14:27.598 INFO Using repair address: 127.0.0.1:51273, task: main
12798 Sep 22 23:14:27.598 INFO No SSL acceptor configured, task: main
12799 Sep 22 23:14:27.598 INFO Repair listens on 127.0.0.1:0, task: repair
12800 Sep 22 23:14:27.598 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44814, task: repair
12801 Sep 22 23:14:27.598 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44814, task: repair
12802 Sep 22 23:14:27.598 INFO listening, local_addr: 127.0.0.1:44814, task: repair
12803 Sep 22 23:14:27.598 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44814, task: repair
12804 Sep 22 23:14:27.598 INFO Using repair address: 127.0.0.1:44814, task: main
12805 Sep 22 23:14:27.598 INFO No SSL acceptor configured, task: main
12806 Sep 22 23:14:27.598 INFO current number of open files limit 65536 is already the maximum
12807 Sep 22 23:14:27.598 INFO Opened existing region file "/tmp/downstairs-LdYRdLcv/region.json"
12808 Sep 22 23:14:27.598 INFO Database read version 1
12809 Sep 22 23:14:27.598 INFO Database write version 1
12810 Sep 22 23:14:27.598 INFO UUID: bb12d53e-6697-4186-ab01-3149bf11c327
12811 Sep 22 23:14:27.599 INFO Blocks per extent:5 Total Extents: 2
12812 Sep 22 23:14:27.599 INFO UUID: 7f911e5b-f4b1-49f1-9a5d-fe84ec8226ed
12813 Sep 22 23:14:27.599 INFO Blocks per extent:5 Total Extents: 2
12814 Sep 22 23:14:27.599 INFO Crucible Version: Crucible Version: 0.0.1
12815 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12816 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12817 rustc: 1.70.0 stable x86_64-unknown-illumos
12818 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12819 Sep 22 23:14:27.599 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12820 Sep 22 23:14:27.599 INFO Using address: 127.0.0.1:39722, task: main
12821 Sep 22 23:14:27.599 INFO Crucible Version: Crucible Version: 0.0.1
12822 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12823 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12824 rustc: 1.70.0 stable x86_64-unknown-illumos
12825 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12826 Sep 22 23:14:27.599 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12827 Sep 22 23:14:27.599 INFO Using address: 127.0.0.1:60582, task: main
12828 Sep 22 23:14:27.599 INFO Repair listens on 127.0.0.1:0, task: repair
12829 Sep 22 23:14:27.599 INFO Repair listens on 127.0.0.1:0, task: repair
12830 Sep 22 23:14:27.599 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60166, task: repair
12831 Sep 22 23:14:27.599 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40192, task: repair
12832 Sep 22 23:14:27.599 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60166, task: repair
12833 Sep 22 23:14:27.599 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40192, task: repair
12834 Sep 22 23:14:27.599 INFO listening, local_addr: 127.0.0.1:60166, task: repair
12835 Sep 22 23:14:27.599 INFO listening, local_addr: 127.0.0.1:40192, task: repair
12836 Sep 22 23:14:27.599 INFO UUID: 922c2bc8-c976-47a3-990e-e4ec311f6c5b
12837 Sep 22 23:14:27.599 INFO Blocks per extent:5 Total Extents: 2
12838 Sep 22 23:14:27.599 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60166, task: repair
12839 Sep 22 23:14:27.599 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40192, task: repair
12840 Sep 22 23:14:27.599 INFO Using repair address: 127.0.0.1:60166, task: main
12841 Sep 22 23:14:27.599 INFO Crucible Version: Crucible Version: 0.0.1
12842 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12843 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12844 rustc: 1.70.0 stable x86_64-unknown-illumos
12845 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12846 Sep 22 23:14:27.599 INFO No SSL acceptor configured, task: main
12847 Sep 22 23:14:27.599 INFO Using repair address: 127.0.0.1:40192, task: main
12848 Sep 22 23:14:27.599 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12849 Sep 22 23:14:27.599 INFO No SSL acceptor configured, task: main
12850 Sep 22 23:14:27.599 INFO Using address: 127.0.0.1:33349, task: main
12851 Sep 22 23:14:27.600 INFO Repair listens on 127.0.0.1:0, task: repair
12852 Sep 22 23:14:27.600 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46884, task: repair
12853 Sep 22 23:14:27.600 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46884, task: repair
12854 Sep 22 23:14:27.600 INFO listening, local_addr: 127.0.0.1:46884, task: repair
12855 Sep 22 23:14:27.600 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46884, task: repair
12856 Sep 22 23:14:27.600 INFO Using repair address: 127.0.0.1:46884, task: main
12857 Sep 22 23:14:27.600 INFO No SSL acceptor configured, task: main
12858 {"msg":"Upstairs starts","v":0,"name":"crucible"{"msg":"Upstairs starts","v":0,"name":"crucible","level":30,"level":30{"msg":"Upstairs starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.601346175Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12859 ,"time":"2023-09-22T23:14:27.601282745Z{","hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4301Crucible Version: BuildInfo {\n} version:
12860 \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",{\n git_commit_timestamp: \","2023-09-22T22:51:18.000000000Z\"time",:"\n git_branch: \"2023-09-22T23:14:27.601850906Z"main\",\n," rustc_semver: \"hostname"1.70.0\":,\n" rustc_channel: \"stable\",ip-10-150-1-74.us-west-2.compute.internal"\n,"pid": rustc_host_triple: \"4301"msg":x86_64-unknown-illumos\"}"
12861 Crucible Version: BuildInfo {,\n\n version: \" rustc_commit_sha: 0.0.1{\"\",\n git_sha: \"90c541806f23a127002de5b4038be731ba1458ca\"",\nmsged48f294784d46ea7d4bb99336918b74358eca46"\":,"\n cargo_triple: \"Crucible Version: BuildInfo { git_commit_timestamp: \n\"x86_64-unknown-illumos version: \"\"2023-09-22T22:51:18.000000000Z0.0.1\"\",,\n\n,\n git_branch: git_sha: \"\" debug: true,main\n\"ed48f294784d46ea7d4bb99336918b74358eca46,\"\n,\n opt_level: 0,\n rustc_semver: git_commit_timestamp: \"\"}"1.70.0\"2023-09-22T22:51:18.000000000Z,\"\n,\n rustc_channel: ,"\" git_branch: stable\"\",v":main\n\"0, rustc_host_triple: \n\","name" rustc_semver: x86_64-unknown-illumos\"\"1.70.0,\"\n,\n:" rustc_commit_sha: crucible\" rustc_channel: \""stable,90c541806f23a127002de5b4038be731ba1458ca\"\",\n,\n" rustc_host_triple: cargo_triple: \"\"level":x86_64-unknown-illumos30\"x86_64-unknown-illumos,\"\n,\n rustc_commit_sha: debug: true,\"\n opt_level: 0,\n}"90c541806f23a127002de5b4038be731ba1458ca\",",\nv": cargo_triple: 0\","namex86_64-unknown-illumos"\":",\ncrucible" debug: true,\n,"level" opt_level: 0,:\n}"30,"v":,0","name":"crucible"time",:""level":302023-09-22T23:14:27.602253889Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"}time"
12862 ,:""{time"2023-09-22T23:14:27.602267561Z:"","hostname2023-09-22T23:14:27.602277069Z"":{",""msg"hostnameip-10-150-1-74.us-west-2.compute.internal":msg""",:""pid""::4301ip-10-150-1-74.us-west-2.compute.internal"}"
12863 ,"Upstairs <-> Downstairs Message Version: 4Upstairs startspid""{:",4301""}msg
12864 ",:"v"{"v":"Upstairs <-> Downstairs Message Version: 40:msg"",:,"""v"Upstairs <-> Downstairs Message Version: 4:"name",0",v""name:"0:,""0namecrucible"":,"":levelcrucible"":,30",level"""namecrucible""::,"30"level"crucible,":time"30:"","level":2023-09-22T23:14:27.602351704Z"30,,""hostnametime""::""2023-09-22T23:14:27.602360327Z"ip-10-150-1-74.us-west-2.compute.internal",,""pidhostname""::"4301}
12865 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
12866 ",msg{":"""time"msg":":Crucible stats registered with UUID: a697641e-15fb-475d-87b4-6f9c9b92b978"",2023-09-22T23:14:27.602365964Z""v"Crucible stats registered with UUID: 66688a47-67b1-4e47-8e76-e0e145532b3a:"0,,""namev""::",crucible0",""name,"":hostname"level""::crucible,30""","time":level":"30ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.602370327Z","pid":,4301,""}hostname":"
12867 ,time""ip-10-150-1-74.us-west-2.compute.internal":time,"":"{2023-09-22T23:14:27.602407209Z""pid",2023-09-22T23:14:27.602413581Z":"hostname"msg"",:""hostname":":"4301ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid"pid:"4301:Crucible stats registered with UUID: 1f7159d2-76c3-4968-a4c3-9cc9b7758094"}}4301
12868 },"
12869 {
12870 {"vmsg"":msg""{:"":0,"name"Crucible 66688a47-67b1-4e47-8e76-e0e145532b3a has session id: 74575cda-5ddb-4e62-8d17-231d991256f7:"",Crucible a697641e-15fb-475d-87b4-6f9c9b92b978 has session id: c1dd7802-7a2b-465d-9b93-1deda33a45f5""vmsg":""",:crucible"Crucible Version: BuildInfo {,0",v""":level\n0" version: :,name""name:"":\"300.0.1"cruciblecrucible"",,""\"level,"level:"30:\n30 git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n," git_branch: \"time"main,:",\"time""time:"":"2023-09-22T23:14:27.60249869Z"",2023-09-22T23:14:27.602497473Z"",,hostname""\n2023-09-22T23:14:27.602490949Z":hostname,"""hostname":" rustc_semver: \"ip-10-150-1-74.us-west-2.compute.internal:"",1.70.0\""ip-10-150-1-74.us-west-2.compute.internalpid"":,4301,"}ip-10-150-1-74.us-west-2.compute.internal"pid
12871 ":,"4301pid"}:
12872 4301}
12873 {"msg":"\nCrucible 1f7159d2-76c3-4968-a4c3-9cc9b7758094 has session id: 9808e339-fe8d-4257-bb78-48915f23db00", rustc_channel: \""stable\"v":,\n0,"name":" rustc_host_triple: \"crucible","x86_64-unknown-illumoslevel"\":,30\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":,"0time",:""name":"2023-09-22T23:14:27.602582533Z"crucible",","hostname"level":":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12874 ,"time":"2023-09-22T23:14:27.60260638Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12875 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30The guest has requested activation
12876 ,"time":"2023-09-22T23:14:27.602838226Z"The guest has requested activation
12877 ,"hostname":"The guest has requested activation
12878 ip-10-150-1-74.us-west-2.compute.internal","The guest has requested activation
12879 pid":4301}
12880 Sep 22 23:14:27.602 INFO UUID: b37a15c7-b55a-4f3b-ab04-a618bf111318
12881 {"msg":"Crucible stats registered with UUID: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c","v":0,"name":"crucible","Sep 22 23:14:27.602 INFO Blocks per extent:5 Total Extents: 2
12882 level":30,"time":"2023-09-22T23:14:27.602914349Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
128832023-09-22T23:14:27.602ZINFOcrucible: Crucible 9ed4d6bc-e430-4200-9ad2-68905b0ff40c has session id: 6a66447a-3d57-4eb2-927b-5246d1b47acc
12884 {"msg":"Upstairs starts","v":0,"name":"crucible","level":30Sep 22 23:14:27.603 INFO Crucible Version: Crucible Version: 0.0.1
12885 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12886 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12887 rustc: 1.70.0 stable x86_64-unknown-illumos
12888 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12889 Sep 22 23:14:27.603 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12890 Sep 22 23:14:27.603 INFO Using address: 127.0.0.1:42513, task: main
12891 ,"time":"2023-09-22T23:14:27.603091445Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
128922023-09-22T23:14:27.603ZINFOcrucible: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
12893 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.603395093Z","hostname":"Sep 22 23:14:27.603 INFO UUID: ed248c78-99b2-4b20-8580-19eb24b6c3f5
12894 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12895 {"msg":"Crucible stats registered with UUID: 64be6ec7-0ac5-4896-883a-00ec34140b84"Sep 22 23:14:27.603 INFO Blocks per extent:5 Total Extents: 2
12896 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.603446539Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
128972023-09-22T23:14:27.603ZINFOcrucible: Crucible 64be6ec7-0ac5-4896-883a-00ec34140b84 has session id: b259cf28-eda8-4b6d-9178-3013a298e183
12898 Sep 22 23:14:27.603 INFO Crucible Version: Crucible Version: 0.0.1
12899 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
12900 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
12901 rustc: 1.70.0 stable x86_64-unknown-illumos
12902 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
12903 Sep 22 23:14:27.603 INFO Upstairs <-> Downstairs Message Version: 4, task: main
12904 Sep 22 23:14:27.603 INFO Using address: 127.0.0.1:49774, task: main
12905 The guest has requested activation
12906 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12907 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12908 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12909 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12910 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12911 Sep 22 23:14:27.604 INFO Repair listens on 127.0.0.1:0, task: repair
12912 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12913 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12914 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12915 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12916 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12917 Sep 22 23:14:27.604 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42268, task: repair
12918 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12919 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12920 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12921 Sep 22 23:14:27.604 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42268, task: repair
12922 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12923 Sep 22 23:14:27.604 INFO Repair listens on 127.0.0.1:0, task: repair
12924 Sep 22 23:14:27.604 INFO listening on 127.0.0.1:0, task: main
12925 Sep 22 23:14:27.604 INFO listening, local_addr: 127.0.0.1:42268, task: repair
12926 Sep 22 23:14:27.604 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44564, task: repair
12927 Sep 22 23:14:27.604 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44564, task: repair
12928 Sep 22 23:14:27.604 INFO current number of open files limit 65536 is already the maximum
12929 Sep 22 23:14:27.604 INFO Opened existing region file "/tmp/downstairs-N9ygFYLS/region.json"
12930 Sep 22 23:14:27.604 INFO listening, local_addr: 127.0.0.1:44564, task: repair
12931 Sep 22 23:14:27.604 INFO Database read version 1
12932 Sep 22 23:14:27.604 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42268, task: repair
12933 Sep 22 23:14:27.604 INFO Database write version 1
12934 Sep 22 23:14:27.604 INFO Using repair address: 127.0.0.1:42268, task: main
12935 Sep 22 23:14:27.604 INFO No SSL acceptor configured, task: main
12936 Sep 22 23:14:27.604 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44564, task: repair
12937 Sep 22 23:14:27.604 INFO Using repair address: 127.0.0.1:44564, task: main
12938 Sep 22 23:14:27.604 INFO No SSL acceptor configured, task: main
12939 {"msg":"[0] connecting to 127.0.0.1:32776","v":0,"name":"crucible","level"{:30"msg":"{[0] connecting to 127.0.0.1:45897","v":"0msg",":name"":"crucible"[0] connecting to 127.0.0.1:36835","level,"":v":300,"name":"{crucible","level":30,"time"":"msg":"2023-09-22T23:14:27.605218154Z","[0] connecting to 127.0.0.1:56524"hostname":","v":ip-10-150-1-74.us-west-2.compute.internal"{,0","pid"name:""4301msg:"":"crucible,",[0] connecting to 127.0.0.1:33768"",",,""v"level:time""time"looper::"""0:2023-09-22T23:14:27.605238214Z2023-09-22T23:14:27.605234223Z"""30,:,,""hostname"""name"0":hostname""}:"ip-10-150-1-74.us-west-2.compute.internal"
12940 ip-10-150-1-74.us-west-2.compute.internal,"",pid""pid:"4301:,:""time,crucible"":4301","looper,"":"looper0"":2023-09-22T23:14:27.605269529Z""level"}"
12941 0":,"}30hostname":"
12942 ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
12943 ,"time":"2023-09-22T23:14:27.605310738Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
12944 {"msg":"[1] connecting to 127.0.0.1:54745","v":0,"name":"crucible","level":30,"{time":"2023-09-22T23:14:27.605546571Z","hostname":""msgip-10-150-1-74.us-west-2.compute.internal"":,""pid":4301[1] connecting to 127.0.0.1:50563","looper",:"Sep 22 23:14:27.605 INFO Upstairs starts
12945 "v"1":0},
12946 "name":"crucible","level":30,"time{":"2023-09-22T23:14:27.605609847Z",""hostname":"msg":"ip-10-150-1-74.us-west-2.compute.internal","[1] connecting to 127.0.0.1:43400pid"Sep 22 23:14:27.605 INFO Crucible Version: BuildInfo {
12947 version: "0.0.1",
12948 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
12949 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
12950 git_branch: "main",
12951 rustc_semver: "1.70.0",
12952 rustc_channel: "stable",
12953 rustc_host_triple: "x86_64-unknown-illumos",
12954 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
12955 cargo_triple: "x86_64-unknown-illumos",
12956 debug: true,
12957 opt_level: 0,
12958 }
12959 ",":v"4301:0,","looper"name":":1""crucible"},
12960 {{"Sep 22 23:14:27.605 INFO Upstairs <-> Downstairs Message Version: 4
12961 "msg":"[2] connecting to 127.0.0.1:39722","v":0,"name":"crucible","level":30,"Sep 22 23:14:27.605 INFO Crucible stats registered with UUID: a50fc3fb-3de2-4743-9c50-cc80cfba77db
12962 time":"2023-09-22T23:14:27.605823038Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2"}
12963 msg":"[1] connecting to 127.0.0.1:52116"","v":0level,""name":"Sep 22 23:14:27.605 INFO Crucible a50fc3fb-3de2-4743-9c50-cc80cfba77db has session id: b014a775-2625-4651-87a0-a19fedb96440
12964 crucible:30","level":30,","timetime"":":"2023-09-22T23:14:27.605884947Z2023-09-22T23:14:27.605887144Z"",",hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
12965 ip-10-150-1-74.us-west-2.compute.internal","{pid":4301,"looper":"1""}
12966 msg":"[1] connecting to 127.0.0.1:33784"{,"v":0,""msgname":":""crucible"[2] connecting to 127.0.0.1:60582,""level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.605963816Z",,""time"hostname:":""2023-09-22T23:14:27.605969179Z"ip-10-150-1-74.us-west-2.compute.internal",","hostnamepid""::"4301,"ip-10-150-1-74.us-west-2.compute.internallooper"":,""pid"1":4301}
12967 ,"looper":"2"}
12968 {"{msg":"[2] connecting to 127.0.0.1:39493""msg":","vup_listen starts"":,0","v"name"::"0crucible,"",name"":level"":crucible30","level":30The guest has requested activation
12969 ,"time":","time":2023-09-22T23:14:27.60604096Z"","2023-09-22T23:14:27.606043396Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4301,"pid":,"4301looper":"2","}task
12970 ":"up_listen"}
12971 {"msg":"{up_listen starts","v":"0msg":","name":"crucibleWait for all three downstairs to come online"",","vlevel""::300,"name":"crucible","level":30,"time":"Sep 22 23:14:27.606 INFO listening on 127.0.0.1:0, task: main
12972 2023-09-22T23:14:27.606098935Z,"",time"":"hostname":"2023-09-22T23:14:27.606103539Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":4301ip-10-150-1-74.us-west-2.compute.internal",,""pid"task"::"4301up_listen"{}
12973 "}msg"
12974 {:""msg"[2] connecting to 127.0.0.1:56258:""{,"v":Wait for all three downstairs to come online0",,""namev""::0","crucible""name",":level""msg"crucible:":,30""level":30Flush timeout: 0.5","v":0,"name":"crucible","level":30Sep 22 23:14:27.606 INFO listening on 127.0.0.1:0, task: main
12975 ,,""timetime""::""2023-09-22T23:14:27.606167938Z2023-09-22T23:14:27.606166662Z"",,""hostnamehostname""::"","time":ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",""pid,"":pid4301"2023-09-22T23:14:27.606174432Z"}:
12976 4301,"hostname"{,:"""msglooper""::""ip-10-150-1-74.us-west-2.compute.internal"Flush timeout: 0.52"",,}""
12977 v"pid"::04301,"name":}"
12978 crucible","level":30{Sep 22 23:14:27.606 INFO listening on 127.0.0.1:0, task: main
12979 {""msg"msg,"":time"":up_listen starts"":","v":02023-09-22T23:14:27.60622089Z"64be6ec7-0ac5-4896-883a-00ec34140b84 active request set",,"",hostnamename""::"""v"crucibleip-10-150-1-74.us-west-2.compute.internal"",,""pid"level"::304301:0},"
12980 name":"crucible","level":30,"time":"2023-09-22T23:14:27.606247706Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",{"pid":4301"msg,"":"task":"up_listen"}
12981 a697641e-15fb-475d-87b4-6f9c9b92b978 active request set",","time"{v"::"0",msg"":name""2023-09-22T23:14:27.606256787Z":",crucibleWait for all three downstairs to come online"",",""hostname"levelv""::300:","name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",","level":pid"30:4301}
12982 ,"time":","2023-09-22T23:14:27.606292501Z"time":","hostname":"2023-09-22T23:14:27.606300362Z","Sep 22 23:14:27.606 INFO [0] connecting to 127.0.0.1:42970, looper: 0
12983 ip-10-150-1-74.us-west-2.compute.internalhostname"",:""pid"ip-10-150-1-74.us-west-2.compute.internal:"4301,"pid"}:
12984 4301}
12985 {{""msg":"msg":"Flush timeout: 0.5"up_listen starts,""v":,0",v"":name"0:","cruciblename"":,""level"crucible":,"30level":30,"time":"2023-09-22T23:14:27.606352442Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,""time,"":pid"":43012023-09-22T23:14:27.606353878Z"}
12986 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal"",msg":""pidSep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:47572, task: main
12987 Sep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:43136, task: main
12988 Sep 22 23:14:27.606 INFO [1] connecting to 127.0.0.1:62060, looper: 1
12989 ":4301,"task":"up_listen"}
12990 {"msg":"Wait for all three downstairs to come online","1f7159d2-76c3-4968-a4c3-9cc9b7758094 active request setv"":0,"name",:""crucible"v,"":level"0:,30"Sep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:53904, task: main
12991 name":"crucible","level":30,"time":"2023-09-22T23:14:27.606511701Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12992 {"msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30Sep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:50653, task: main
12993 ,"time":","time"2023-09-22T23:14:27.606521997Z":"2023-09-22T23:14:27.606612421Z,""hostname":,""hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4301,"pid":}4301
12994 }
12995 {"msg":"66688a47-67b1-4e47-8e76-e0e145532b3a active request set","v":0,"name":"crucible","level":30Sep 22 23:14:27.606 INFO [2] connecting to 127.0.0.1:42513, looper: 2
12996 ,"time":"2023-09-22T23:14:27.606664215Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
12997 {"msg":"[2] connecting to 127.0.0.1:33349",Sep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:33852, task: main
12998 {"v":0,""name":"msgcrucible"":","level":30[0] 64be6ec7-0ac5-4896-883a-00ec34140b84 looper connected"Sep 22 23:14:27.606 INFO accepted connection from 127.0.0.1:52656, task: main
12999 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.606760183Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2"}
13000 ,"time":"{2023-09-22T23:14:27.606775017Z",""msg":"hostname":"up_listen starts","v"ip-10-150-1-74.us-west-2.compute.internal":,"0pid",:"4301name":",crucible"","looper":level"":0"30}
13001 {"msg":","time":"[0] Proc runs for 127.0.0.1:45897 in state New"2023-09-22T23:14:27.606816076Z,""v,"":hostname":0","name":"ip-10-150-1-74.us-west-2.compute.internalcrucible"",","level"pid:"30:4301,"task":"up_listen"}
13002 {,"time":""msg":"2023-09-22T23:14:27.606845572Z","hostname"Wait for all three downstairs to come online":","v":0ip-10-150-1-74.us-west-2.compute.internal",",name"":"pid"crucible:"4301,"level"}:
13003 30{,"time":"{2023-09-22T23:14:27.606878016Z"",""msghostname"msg""::"":"ip-10-150-1-74.us-west-2.compute.internal[0] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 looper connected[0] 66688a47-67b1-4e47-8e76-e0e145532b3a looper connected""",",,""vv""::pid"00,,""name:"4301:name"":"crucible"},crucible""level,"":level"30:
13004 30{"msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:27.606925106Z2023-09-22T23:14:27.606920964Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::4301,"4301,"timelooper,"":"looper""0:"":}0"
13005 }{"
13006 2023-09-22T23:14:27.606937217Z"","msg":hostname"":"Upstairs starts",ip-10-150-1-74.us-west-2.compute.internal""v",:"pid"0:4301,"name"}:"
13007 crucible","level":30{"msg":"9ed4d6bc-e430-4200-9ad2-68905b0ff40c active request set","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.606991722Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4301"time"}:"
13008 2023-09-22T23:14:27.606982458Z","hostname":"{"ip-10-150-1-74.us-west-2.compute.internal"msg",:""pid":4301}
13009 [0] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c looper connected","v":0,"name":"{crucible","level":30"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \","2023-09-22T22:51:18.000000000Z\"time,"\n:" git_branch: \"main2023-09-22T23:14:27.607035815Z\"",\n,"hostname rustc_semver: "\":"1.70.0\",\nip-10-150-1-74.us-west-2.compute.internal rustc_channel: "\","stable\"pid",:\n4301, rustc_host_triple: "\"looper":"x86_64-unknown-illumos0\"",\n}
13010 rustc_commit_sha: \"{90c541806f23a127002de5b4038be731ba1458ca\",\n"msg": cargo_triple: "\"x86_64-unknown-illumos\",\n[0] Proc runs for 127.0.0.1:36835 in state New" debug: true,,\n"v": opt_level: 0,0\n,}""name":"crucible",,""vlevel""::030,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.607075029Z",","hostname":time"":"ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:14:27.607078469Z"",","pid":hostname"4301:"}
13011 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
130122023-09-22T23:14:27.607ZINFOcrucible: Upstairs <-> Downstairs Message Version: 4
130132023-09-22T23:14:27.607ZINFOcrucible: Crucible stats registered with UUID: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e
130142023-09-22T23:14:27.607ZINFOcrucible: Crucible 5b98377f-d3ca-45bf-893d-9aae2fd5a48e has session id: 00bca160-fbd9-4074-8c7d-5e932e8b713d
130152023-09-22T23:14:27.607ZINFOcrucible: [0] Proc runs for 127.0.0.1:32776 in state New
13016 Sep 22 23:14:27.607 INFO UUID: bb88743f-4e54-484f-8e97-8fa2d4bde987
13017 Sep 22 23:14:27.607 INFO Blocks per extent:5 Total Extents: 2
13018 Sep 22 23:14:27.607 INFO Crucible Version: Crucible Version: 0.0.1
13019 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13020 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13021 rustc: 1.70.0 stable x86_64-unknown-illumos
13022 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13023 Sep 22 23:14:27.607 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13024 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:45775, task: main
13025 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:65350, task: main
13026 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:58018, task: main
13027 Sep 22 23:14:27.607 INFO Using address: 127.0.0.1:54983, task: main
13028 Sep 22 23:14:27.607 INFO up_listen starts, task: up_listen
13029 Sep 22 23:14:27.607 INFO Wait for all three downstairs to come online
13030 The guest has requested activation
13031 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:34921, task: main
13032 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:59139, task: main
13033 Sep 22 23:14:27.607 INFO Flush timeout: 0.5
13034 {"msg":"[0] Proc runs for 127.0.0.1:33768 in state New","v":0,"name":"crucible","level":30Sep 22 23:14:27.607 INFO listening on 127.0.0.1:0, task: main
13035 ,"time":"2023-09-22T23:14:27.607654183Z","hostname":"Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:52230, task: main
13036 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13037 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:45705, task: main
13038 Sep 22 23:14:27.607 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db active request set
13039 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:56039, task: main
13040 Sep 22 23:14:27.607 INFO listening on 127.0.0.1:0, task: main
13041 Sep 22 23:14:27.607 INFO listening on 127.0.0.1:0, task: main
13042 {"msg":"[1] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.607847431Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}{
13043 "Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:55691, task: main
13044 msg":"[0] connecting to 127.0.0.1:43186","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.607899715Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
13045 Sep 22 23:14:27.607 INFO Repair listens on 127.0.0.1:0, task: repair
13046 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:34793, task: main
13047 Sep 22 23:14:27.607 INFO accepted connection from 127.0.0.1:40268, task: main
13048 {{""Sep 22 23:14:27.608 INFO accepted connection from 127.0.0.1:62689, task: main
13049 msg"msg"::""[1] connecting to 127.0.0.1:46381"[1] 64be6ec7-0ac5-4896-883a-00ec34140b84 looper connected,""v":0,",name""v":":crucible"0,,""level"name"::"30crucible","level":30{"msg":"[0] a697641e-15fb-475d-87b4-6f9c9b92b978 looper connected","v",:"0time":,""name":",crucible""time2023-09-22T23:14:27.608073878Z,""level"::""302023-09-22T23:14:27.608077112Z",",hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""pid",":pid4301":4301,"looperSep 22 23:14:27.608 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db looper connected, looper: 0
13050 ":",1,Sep 22 23:14:27.608 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48117, task: repair
13051 "time"":""}looper"2023-09-22T23:14:27.608105664Z":
13052 ","1"hostname":"}
13053 {{ip-10-150-1-74.us-west-2.compute.internal","pid":4301""{,msgmsg""::""""looper"msg":{"0"msg":Sep 22 23:14:27.608 INFO Connection request from 64be6ec7-0ac5-4896-883a-00ec34140b84 with version 4, task: proc
13054 "[1] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c looper connected","v":0,"name":"crucible","level":30Sep 22 23:14:27.608 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48117, task: repair
13055 ,"time":""2023-09-22T23:14:27.608212946Z","}hostname
13056 ":"Sep 22 23:14:27.608 INFO Connection request from 9ed4d6bc-e430-4200-9ad2-68905b0ff40c with version 4, task: proc
13057 ip-10-150-1-74.us-west-2.compute.internal{""msg":","pid":4301[0] Proc runs for 127.0.0.1:56524 in state New,"looper":"1"}"
13058 ,"v":{{0Sep 22 23:14:27.608 INFO listening, local_addr: 127.0.0.1:48117, task: repair
13059 "msg":","msgname"":":""crucible"[1] Proc runs for 127.0.0.1:54745 in state New","level,"":v":300,"[2] connecting to 127.0.0.1:49774name"[1] Proc runs for 127.0.0.1:52116 in state New"":",,""v"crucible"v",:"Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } connected, version 4, task: proc
13060 :level,"":time300":,"0","2023-09-22T23:14:27.608280986Zname"name"":",crucible"":,,hostname""""level"crucible"time:"":,""ip-10-150-1-74.us-west-2.compute.internal:level"2023-09-22T23:14:27.608301943Z"",",:pid"":hostname430130"}:
13061 "30ip-10-150-1-74.us-west-2.compute.internal{","pid":4301"msg":"}
13062 [1] a697641e-15fb-475d-87b4-6f9c9b92b978 looper connected","v":0{,"name":""cruciblemsg"":,""level":30,,"[2] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 looper connectedtime"""time,""::",v""time:"02023-09-22T23:14:27.608338666Z",:""name"",:2023-09-22T23:14:27.608352358Z""2023-09-22T23:14:27.608333519Z",crucible",hostname""":"",hostname""ip-10-150-1-74.us-west-2.compute.internal"level,"":pid30":Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } connected, version 4, task: proc
13063 hostname:4301,",":""time"":"looper"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",","pid":2023-09-22T23:14:27.608384425Z:pid"""1":},
13064 "4301hostname"4301:,""looper"}:
13065 ip-10-150-1-74.us-west-2.compute.internal"","2pidSep 22 23:14:27.608 INFO Connection request from 9ed4d6bc-e430-4200-9ad2-68905b0ff40c with version 4, task: proc
13066 ""}{
13067 :"Sep 22 23:14:27.608 INFO accepted connection from 127.0.0.1:37430, task: main
13068 {Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } connected, version 4, task: proc
13069 ":"[1] Proc runs for 127.0.0.1:43400 in state New","v":0,"name":"crucible"msg,":""level":[2] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c looper connected"30,"v":0,"name"[1] 66688a47-67b1-4e47-8e76-e0e145532b3a looper connected:""crucible",",level"":v"30:0,"name":"crucible"msg",":"level,"":timeup_listen starts""30:","v":2023-09-22T23:14:27.60848186Z"0,,4301"Sep 22 23:14:27.608 INFO Connection request from 64be6ec7-0ac5-4896-883a-00ec34140b84 with version 4, task: proc
13070 "name",hostname"",,""timetime""::""looper:""2023-09-22T23:14:27.608489865Z2023-09-22T23:14:27.608500482Z""ip-10-150-1-74.us-west-2.compute.internal,,"":":hostnamehostname""::"Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } connected, version 4, task: proc
13071 crucible",""level2"""",}"
13072 :ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""30pid"Sep 22 23:14:27.608 INFO Connection request from 9ed4d6bc-e430-4200-9ad2-68905b0ff40c with version 4, task: proc
13073 Sep 22 23:14:27.608 INFO accepted connection from 127.0.0.1:33050, task: main
13074 :pidpid""::430143014301,,}""
13075 ,looperlooper":"":1"{"2""time"}}
13076 :Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } connected, version 4, task: proc
13077 "{
13078 "2023-09-22T23:14:27.608552249Z"msg,{"hostname":""":msg"":"ip-10-150-1-74.us-west-2.compute.internal","pid":[2] Proc runs for 127.0.0.1:33349 in state New[1] Proc runs for 127.0.0.1:50563 in state New""4301,,""v"v:":,"00task,,""namename""::"""cruciblecrucible"",:,""levellevel"""::3030up_listen"}
13079 Sep 22 23:14:27.608 INFO Connection request from 64be6ec7-0ac5-4896-883a-00ec34140b84 with version 4, task: proc
13080 {,,"""timetimemsg"""::"":"2023-09-22T23:14:27.608618803Z2023-09-22T23:14:27.608619148Z"",,""Wait for all three downstairs to come online"hostnamehostname""::"","v":0ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",",,Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } connected, version 4, task: proc
13081 "pid""pidname"":Sep 22 23:14:27.608 INFO accepted connection from 127.0.0.1:42022, task: main
13082 :43014301:}
13083 }"
13084 {crucible",""level"msg"::"30[2] 66688a47-67b1-4e47-8e76-e0e145532b3a looper connected","v":0,"name":"crucible","level":30,"time":","time":"2023-09-22T23:14:27.608677768Z","2023-09-22T23:14:27.608685361Z"hostname",":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,4301"pid":}4301
13085 ,"looper":"{2"}
13086 {"msg":"{"msgFlush timeout: 0.5""":"msg",:"[1] Proc runs for 127.0.0.1:33784 in state New""v",:[2] Proc runs for 127.0.0.1:39722 in state New""v":,0","0namev""::"0,,crucible""name",:""level":crucible30"",name"":level"":30crucible","level":30,"time":","time2023-09-22T23:14:27.608745803Z"":","hostname"2023-09-22T23:14:27.608751103Z:"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4301","}pid
13087 ":,"4301{time":}"
13088 "2023-09-22T23:14:27.608753921Zmsg":"","hostname":"[2] a697641e-15fb-475d-87b4-6f9c9b92b978 looper connected","ip-10-150-1-74.us-west-2.compute.internalv"",:"0pid",":name4301":"crucible"},
13089 "levelSep 22 23:14:27.608 INFO Connection request from 66688a47-67b1-4e47-8e76-e0e145532b3a with version 4, task: proc
13090 {":"30msg":"5b98377f-d3ca-45bf-893d-9aae2fd5a48e active request set","v"":msg"0:,""name":"crucible","[2] 64be6ec7-0ac5-4896-883a-00ec34140b84 looper connected,""level",time""v:""::030,2023-09-22T23:14:27.608808005Z""name",:""hostnamecrucible"":","level":30ip-10-150-1-74.us-west-2.compute.internal",Sep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } connected, version 4, task: proc
13091 ",pid"":time4301",:"",looper":2023-09-22T23:14:27.608830716Z""2""time",}"
13092 hostname"::"{"ip-10-150-1-74.us-west-2.compute.internal""msg",:""pid"2023-09-22T23:14:27.608826655Z":[2] Proc runs for 127.0.0.1:39493 in state New4301",,,""v"looper:"0:,""2"name":}"
13093 crucible"",{"hostname":"level"":msg"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":[2] Proc runs for 127.0.0.1:60582 in state New"4301,"v":}0
13094 ,,""timename""::""crucible","2023-09-22T23:14:27.608880642Z"level",:"30{hostname":""ip-10-150-1-74.us-west-2.compute.internalmsg"":","pid":4301}
13095 ,"[0] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e looper connected"time":,""v":02023-09-22T23:14:27.608895219Z,""name":","crucible"hostname,"":level"":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13096 {{""msgmsg""::""[0] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) New New New ds_transition to WaitActive[0] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) New New New ds_transition to WaitActive"",,""vv""::0,"0,"time"name,"":name"":crucible"":"Sep 22 23:14:27.608 INFO Connection request from 66688a47-67b1-4e47-8e76-e0e145532b3a with version 4, task: proc
13097 crucible,""2023-09-22T23:14:27.60892007Z"level,"":level30":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
13098 ,,""timetime""::""2023-09-22T23:14:27.608957939Z2023-09-22T23:14:27.608956552Z""{,,""hostnamehostname""::"""msgSep 22 23:14:27.608 INFO upstairs UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } connected, version 4, task: proc
13099 ":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid"":[0] Proc runs for 127.0.0.1:43186 in state New"4301:4301},}
13100 
13101 {"v":{"0msg"":"msg",:[0] Transition from New to WaitActive""","name":v[0] Transition from New to WaitActive"":,0",v"":name0","":"cruciblenamecrucible"":,"""levelcrucible"":,30","levellevel":"30Sep 22 23:14:27.608 INFO Connection request from a697641e-15fb-475d-87b4-6f9c9b92b978 with version 4, task: proc
13102 :,30"time":"2023-09-22T23:14:27.609018791Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid"time:":4301"}
13103 2023-09-22T23:14:27.609028642Z",,{""hostname"time":""msg"::ip-10-150-1-74.us-west-2.compute.internal""","pid":43012023-09-22T23:14:27.609022954Z"}
13104 [0] client is_active_req TRUE, promote! session ad0fa1b4-52c3-4329-aeea-287c43c0dc1e",{,"""vmsg"":Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } connected, version 4, task: proc
13105 0,:""hostname"name":":[0] client is_active_req TRUE, promote! session 58d25f3b-f138-43ff-93b7-02b5e6bdc53e""crucible,""ip-10-150-1-74.us-west-2.compute.internalv,"":level0"Sep 22 23:14:27.609 INFO Connection request from 66688a47-67b1-4e47-8e76-e0e145532b3a with version 4, task: proc
13106 ,"",:"30name":"pid":crucible"4301,"level":}30
13107 ,"time":"2023-09-22T23:14:27.609084767Z",{","hostname"time:"":""2023-09-22T23:14:27.609092144Zip-10-150-1-74.us-west-2.compute.internal""msg",,""pid"hostname::""4301:"Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } connected, version 4, task: proc
13108 ip-10-150-1-74.us-west-2.compute.internal}[1] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e looper connected","
13109 "v,{"pid":4301"}msg
13110 ":""{:0",msg":""[1] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) WaitActive New New ds_transition to WaitActive"name":,""v":crucible"0[1] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) WaitActive New New ds_transition to WaitActive",,,""v"name:"0:,"""namecrucible"":,""levellevelcrucible"":,30"":level":3030,"time":"Sep 22 23:14:27.609 INFO Connection request from a697641e-15fb-475d-87b4-6f9c9b92b978 with version 4, task: proc
13111 ,2023-09-22T23:14:27.609150402Z""Sep 22 23:14:27.609 INFO [0] Proc runs for 127.0.0.1:42970 in state New
13112 time",":Sep 22 23:14:27.608 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48117, task: repair
13113 hostname":""2023-09-22T23:14:27.609153217Z"ip-10-150-1-74.us-west-2.compute.internal",",pid"":hostname4301":"}
13114 ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } connected, version 4, task: proc
13115 ,{"pid":4301",msg":""looper":"1"[1] Transition from New to WaitActive"}
13116 ,"v":0,"name":"crucible"{,"level":30"msg":"[1] Proc runs for 127.0.0.1:46381 in state New"Sep 22 23:14:27.609 INFO Using repair address: 127.0.0.1:48117, task: main
13117 ,"v":0,",name":""crucible"time",":level"":302023-09-22T23:14:27.609226036Z","hostname":"Sep 22 23:14:27.609 INFO No SSL acceptor configured, task: main
13118 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13119 {,""timemsg"":":"2023-09-22T23:14:27.609246809Z","[1] client is_active_req TRUE, promote! session ad0fa1b4-52c3-4329-aeea-287c43c0dc1ehostname":""Sep 22 23:14:27.609 INFO [1] a50fc3fb-3de2-4743-9c50-cc80cfba77db looper connected, looper: 1
13120 ip-10-150-1-74.us-west-2.compute.internal",Sep 22 23:14:27.609 INFO Connection request from 5b98377f-d3ca-45bf-893d-9aae2fd5a48e with version 4, task: proc
13121 Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } is now active (read-write)
13122 ,""pidv""::04301,"name":"}
13123 crucible","level":30{"Sep 22 23:14:27.609 INFO Connection request from a697641e-15fb-475d-87b4-6f9c9b92b978 with version 4, task: proc
13124 msg"Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } connected, version 4, task: proc
13125 :","time":"[2] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e looper connected"2023-09-22T23:14:27.609301836Z",",v"":hostname0":","name":"ip-10-150-1-74.us-west-2.compute.internal"crucible,"",pid"":level4301"Sep 22 23:14:27.609 INFO [1] Proc runs for 127.0.0.1:62060 in state New
13126 :30}Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } connected, version 4, task: proc
13127 
13128 {",msg":""time":"2023-09-22T23:14:27.609342925Z","hostname":"[2] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) WaitActive WaitActive New ds_transition to WaitActive"ip-10-150-1-74.us-west-2.compute.internal",","vpid""::04301,"name":,""looper":crucible"","2"level":}30
13129 {"msg":"[2] Proc runs for 127.0.0.1:49774 in state New",",time":""v":02023-09-22T23:14:27.609378985Z",","name":hostname":""crucible","ip-10-150-1-74.us-west-2.compute.internal"level,""Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } is now active (read-write)
13130 Sep 22 23:14:27.609 INFO Connection request from 5b98377f-d3ca-45bf-893d-9aae2fd5a48e with version 4, task: proc
13131 :pid30":4301Sep 22 23:14:27.609 INFO [2] a50fc3fb-3de2-4743-9c50-cc80cfba77db looper connected, looper: 2
13132 }
13133 {,"time":"{2023-09-22T23:14:27.609415347Z"""msg,msg""::""Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } connected, version 4, task: proc
13134 "[0] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) New New New ds_transition to WaitActive[2] Transition from New to WaitActive""hostname":,,""v":v0",:"0name,"":"name"crucible:"",""crucible"level",:"level30":ip-10-150-1-74.us-west-2.compute.internal"30,"pid":4301}
13135 ,,""timetime""::""2023-09-22T23:14:27.609457223Z2023-09-22T23:14:27.609459069Z""Sep 22 23:14:27.609 INFO [2] Proc runs for 127.0.0.1:42513 in state New
13136 ,,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::43014301}
13137 }
13138 {"{msg":""msg":"[0] Transition from New to WaitActive"[2] client is_active_req TRUE, promote! session ad0fa1b4-52c3-4329-aeea-287c43c0dc1e",",v"":v"0:0,","name":name"":"crucible"crucible",","level"level:":3030{"msg":",,""[0] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) New New New ds_transition to WaitActivetime"Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 9ed4d6bc-e430-4200-9ad2-68905b0ff40c, session_id: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e, gen: 1 } is now active (read-write)
13139 Sep 22 23:14:27.609 INFO Connection request from 5b98377f-d3ca-45bf-893d-9aae2fd5a48e with version 4, task: proc
13140 ,"time:"":""v":2023-09-22T23:14:27.60950878Z2023-09-22T23:14:27.609508835Z""0,,"","hostnamehostname""::""name":"crucible"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid":"4301:,4301}}
13141 
13142 Sep 22 23:14:27.609 INFO upstairs UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } connected, version 4, task: proc
13143 "level":{30"msg":"[0] client is_active_req TRUE, promote! session ff839372-f996-4bbe-90dd-222e8d070509","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.609566405Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:14:27.609578209Zpid"":,4301"hostname":"}
13144 ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } is now active (read-write)
13145 {Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } is now active (read-write)
13146 "msg":"[0] Transition from New to WaitActive","v":0,",name"":"time"crucible":","level":2023-09-22T23:14:27.609153139Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13147 {,""timemsg":"":"[1] Transition from New to WaitActive"2023-09-22T23:14:27.609635126Z","v",":hostname0",":"name":"crucible"ip-10-150-1-74.us-west-2.compute.internal,""level",":pid"30:4301}
13148 {"msg":","time":"[0] client is_active_req TRUE, promote! session 01d84cdc-ab54-426d-be83-dc686be9c9e6"2023-09-22T23:14:27.60967046Z",",v":"0hostname,"":"name":"crucible",ip-10-150-1-74.us-west-2.compute.internal"","levelpid""::430130}
13149 {"msg":"[1] client is_active_req TRUE, promote! session 58d25f3b-f138-43ff-93b7-02b5e6bdc53e,"",time":""v":02023-09-22T23:14:27.609702445Z,""Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } is now active (read-write)
13150 ,"name":"hostname":crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13151 {"msg":"Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } is now active (read-write)
13152 [1] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) WaitActive New New ds_transition to WaitActive,""time":,""v":02023-09-22T23:14:27.609735591Z","name,"":hostname"":"crucible","levelip-10-150-1-74.us-west-2.compute.internal"",":pid"30:4301}
13153 {Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } is now active (read-write)
13154 ,""time":"msg":"2023-09-22T23:14:27.609773659Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"[2] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) WaitActive WaitActive New ds_transition to WaitActive",",pid"":v"4301:0,"}name":"
13155 crucible","level":30{"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time"Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 64be6ec7-0ac5-4896-883a-00ec34140b84, session_id: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e, gen: 1 } is now active (read-write)
13156 ,"time":"2023-09-22T23:14:27.609830279Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":pid"4301:4301}
13157 }
13158 Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 66688a47-67b1-4e47-8e76-e0e145532b3a, session_id: ff839372-f996-4bbe-90dd-222e8d070509, gen: 1 } is now active (read-write)
13159 {{"msg":""msg":"[1] client is_active_req TRUE, promote! session 01d84cdc-ab54-426d-be83-dc686be9c9e6","v":0,"name":"[1] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) WaitActive New New ds_transition to WaitActive"crucible",,""level"v"::030,"name":"crucible","level":30Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } is now active (read-write)
13160 ,","time":"time":"2023-09-22T23:14:27.60989648Z"2023-09-22T23:14:27.609900443Z",","hostname"hostname":":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid"::43014301}
13161 }
13162 {"{msg":"[1] Transition from New to WaitActive"","msg":"v":0,"name":"crucible","level":30[2] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.60996031Z","hostname":"Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } is now active (read-write)
13163 ip-10-150-1-74.us-west-2.compute.internal,""time":","pid":2023-09-22T23:14:27.609967071Z"4301,"}hostname":
13164 Sep 22 23:14:27.609 INFO Connection request from a50fc3fb-3de2-4743-9c50-cc80cfba77db with version 4, task: proc
13165 {""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301[1] client is_active_req TRUE, promote! session ff839372-f996-4bbe-90dd-222e8d070509"}
13166 Sep 22 23:14:27.609 INFO UpstairsConnection { upstairs_id: a697641e-15fb-475d-87b4-6f9c9b92b978, session_id: 01d84cdc-ab54-426d-be83-dc686be9c9e6, gen: 1 } is now active (read-write)
13167 {,""v"msg:":Sep 22 23:14:27.610 INFO upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } connected, version 4, task: proc
13168 0,""name":"crucible",[2] Transition from New to WaitActive""level",:"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.610042999Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""timepid""::"43012023-09-22T23:14:27.610048332Z"}
13169 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4301"}msg"
13170 :"{[2] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) WaitActive WaitActive New ds_transition to WaitActive"",msg""Sep 22 23:14:27.610 INFO UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } is now active (read-write)
13171 v"::0","name":"crucible","level":30[2] client is_active_req TRUE, promote! session 01d84cdc-ab54-426d-be83-dc686be9c9e6","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.610111392Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,Sep 22 23:14:27.610 INFO Connection request from a50fc3fb-3de2-4743-9c50-cc80cfba77db with version 4, task: proc
13172 }
13173 "time":"2023-09-22T23:14:27.610118082Z"{,"hostname":""msg":ip-10-150-1-74.us-west-2.compute.internal"","pid":[2] Transition from New to WaitActive4301","}v"
13174 :0Sep 22 23:14:27.610 INFO upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } connected, version 4, task: proc
13175 ,"name":"crucible","level":30,"time":"{2023-09-22T23:14:27.610169361Z","hostname":""ip-10-150-1-74.us-west-2.compute.internal"msg":,""pid":4301}
13176 [0] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) New New New ds_transition to WaitActive","v":0,{"name":"crucible",""level"msg:"30:"[2] client is_active_req TRUE, promote! session ff839372-f996-4bbe-90dd-222e8d070509","v":0,"name":"Sep 22 23:14:27.610 INFO UpstairsConnection { upstairs_id: 5b98377f-d3ca-45bf-893d-9aae2fd5a48e, session_id: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76, gen: 1 } is now active (read-write)
13177 ,crucible""time",":"level":302023-09-22T23:14:27.610207196Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13178 {,"time":""msg":"2023-09-22T23:14:27.610234067Z","[0] Transition from New to WaitActive"hostname":","v":0ip-10-150-1-74.us-west-2.compute.internal",",Sep 22 23:14:27.610 INFO Connection request from a50fc3fb-3de2-4743-9c50-cc80cfba77db with version 4, task: proc
13179 "pid"name":":crucible4301","level}"
13180 :30Sep 22 23:14:27.610 INFO upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } connected, version 4, task: proc
13181 ,"time":"2023-09-22T23:14:27.610278239Z":,""hostname":"2023-09-22T23:14:27.609817026Z"ip-10-150-1-74.us-west-2.compute.internal",","hostnamepid""::"4301}ip-10-150-1-74.us-west-2.compute.internal
13182 ","pid":4301{}
13183 "msg":"{[0] client is_active_req TRUE, promote! session d3623aa7-ee8c-40f8-a982-d0eadbcd4a76","v"":msg0":,""name":"crucible"[2] Transition from New to WaitActive,""level":,"30v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.610338693Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""timepid":"":43012023-09-22T23:14:27.610343754Z"}
13184 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4301"msg":}"
13185 [1] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) WaitActive New New ds_transition to WaitActive","v":0{,"name":"crucible",""levelmsg""::30"[2] client is_active_req TRUE, promote! session 58d25f3b-f138-43ff-93b7-02b5e6bdc53e","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.610392879Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,"4301time":"}
13186 2023-09-22T23:14:27.610402812Z","hostname":"Sep 22 23:14:27.610 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) New New New ds_transition to WaitActive
13187 ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4301"}msg"
13188 :"[1] Transition from New to WaitActive"{,"v":0,""msg"Sep 22 23:14:27.610 INFO [0] Transition from New to WaitActive
13189 :"name":"crucible","level":30[0] downstairs client at 127.0.0.1:45897 has UUID b63c1426-990b-43e7-baa9-a926ab8df052","v":0,"name":"crucible","level":30,Sep 22 23:14:27.610 INFO [0] client is_active_req TRUE, promote! session 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8
13190 ,""time":"time":"2023-09-22T23:14:27.610461629Z"2023-09-22T23:14:27.610455543Z","hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"","pidpid""::43014301}}
13191 
13192 {"{msg":""msg":"[1] client is_active_req TRUE, promote! session d3623aa7-ee8c-40f8-a982-d0eadbcd4a76","v":0[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b63c1426-990b-43e7-baa9-a926ab8df052, encrypted: true, database_read_version: 1, database_write_version: 1 }","name",":"v"crucible":,"0level",:"30name":"crucible","level":30,"time":"2023-09-22T23:14:27.610525091Z",,""hostname"time":":"2023-09-22T23:14:27.61052837Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostnamepid""::"4301}ip-10-150-1-74.us-west-2.compute.internal
13193 ","pid":4301{}
13194 "msg":"{"msg[2] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) WaitActive WaitActive New ds_transition to WaitActive"":","v":Sep 22 23:14:27.610 INFO [1] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) WaitActive New New ds_transition to WaitActive
13195 64be6ec7-0ac5-4896-883a-00ec34140b84 WaitActive WaitActive WaitActive"0,","vname""::"0crucible",",name"":"level":crucible"30,Sep 22 23:14:27.610 INFO [1] Transition from New to WaitActive
13196 "level":30,",time"":"time":"Sep 22 23:14:27.610 INFO [1] client is_active_req TRUE, promote! session 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8
13197 2023-09-22T23:14:27.61060414Z"2023-09-22T23:14:27.610599829Z","hostname,"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal"",pid""pid:"4301:4301}
13198 }
13199 {"msg"{:""msg":"[1] downstairs client at 127.0.0.1:43400 has UUID 1a8f9cb4-4a64-4013-8234-292244fb0112Sep 22 23:14:27.610 INFO Current flush_numbers [0..12]: [0, 0]
13200 [2] Transition from New to WaitActive"",",v"":v"0:,"0name",":name":""crucible"crucible",",level"":level"30:30,"time":,""time":2023-09-22T23:14:27.610683717Z"","2023-09-22T23:14:27.610684907Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pidip-10-150-1-74.us-west-2.compute.internal"",:"4301pid":4301}
13201 }
13202 Sep 22 23:14:27.610 INFO [2] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) WaitActive WaitActive New ds_transition to WaitActive
13203 {{"msg":""msg":"[2] client is_active_req TRUE, promote! session d3623aa7-ee8c-40f8-a982-d0eadbcd4a76","v":0,"name":"crucible"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1a8f9cb4-4a64-4013-8234-292244fb0112, encrypted: true, database_read_version: 1, database_write_version: 1 },""level":Sep 22 23:14:27.610 INFO [2] Transition from New to WaitActive
13204 ,"30v":0,"name":"crucible","level"Sep 22 23:14:27.610 INFO Current flush_numbers [0..12]: [0, 0]
13205 :30,Sep 22 23:14:27.610 INFO [2] client is_active_req TRUE, promote! session 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8
13206 "time":"2023-09-22T23:14:27.610751993Z",","time":"hostname":"2023-09-22T23:14:27.61076398Z","ip-10-150-1-74.us-west-2.compute.internalhostname"":","pid":4301Sep 22 23:14:27.610 INFO Current flush_numbers [0..12]: [0, 0]
13207 }ip-10-150-1-74.us-west-2.compute.internal"
13208 ,"pid":4301}{
13209 "msg":"{[0] downstairs client at 127.0.0.1:43186 has UUID 6b339ff0-fd62-4d3d-96a2-00a3cd97fb5c",""v"msg"::"0,"name":"crucible","64be6ec7-0ac5-4896-883a-00ec34140b84 WaitActive WaitActive WaitActive"level":,"30v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.610 INFO Current flush_numbers [0..12]: [0, 0]
13210 2023-09-22T23:14:27.610816648Z",,""time":"hostname":"2023-09-22T23:14:27.61082176Z"ip-10-150-1-74.us-west-2.compute.internal",","hostname"pid":":4301ip-10-150-1-74.us-west-2.compute.internal"},
13211 "pid":4301}
13212 {"{msg":""msg":"[2] downstairs client at 127.0.0.1:60582 has UUID 7f911e5b-f4b1-49f1-9a5d-fe84ec8226ed","v":0[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6b339ff0-fd62-4d3d-96a2-00a3cd97fb5c, encrypted: true, database_read_version: 1, database_write_version: 1 }","name,"":"v":crucible"0,","levelname""::"30crucible","level":30,"time":"2023-09-22T23:14:27.610892963Z",","time"hostname"::""2023-09-22T23:14:27.610895692Z"ip-10-150-1-74.us-west-2.compute.internalSep 22 23:14:27.610 INFO UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } is now active (read-write)
13213 ",",pid"":hostname4301":"}
13214 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
13215 "msg":"{"msg":"5b98377f-d3ca-45bf-893d-9aae2fd5a48e WaitActive WaitActive WaitActive"[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7f911e5b-f4b1-49f1-9a5d-fe84ec8226ed, encrypted: true, database_read_version: 1, database_write_version: 1 }","v,""v"::00,"Sep 22 23:14:27.610 INFO Downstairs has completed Negotiation, task: proc
13216 name":,""crucible"name",:""crucible"level",":Sep 22 23:14:27.610 INFO Downstairs has completed Negotiation, task: proc
13217 30level":30Sep 22 23:14:27.610 INFO Downstairs has completed Negotiation, task: proc
13218 ,"time":"2023-09-22T23:14:27.610979154Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""time"pid":":43012023-09-22T23:14:27.610982862Z"}
13219 ,"hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":4301"msg":}"
13220 Sep 22 23:14:27.610 INFO Downstairs has completed Negotiation, task: proc
13221 {[1] downstairs client at 127.0.0.1:46381 has UUID 45f3b954-a0cc-44a1-92c8-91ac1da9b5f8"{,""v":0msg",msg""::""name":""crucible","level":[0] downstairs client at 127.0.0.1:56524 has UUID 35ccb45b-0c22-4a14-bf83-50fbfaffa7a630"64be6ec7-0ac5-4896-883a-00ec34140b84 WaitActive WaitActive WaitActive",,""v"v"::00,,"{"name"name":":"crucible""crucible",msg,"""level":,""time"level":"::[0] downstairs client at 127.0.0.1:32776 has UUID d2285a39-bf6a-4487-9493-8622023b1e4b"30Sep 22 23:14:27.611 INFO UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } is now active (read-write)
13222 30,","time":"2023-09-22T23:14:27.611073029Z"v,""hostname":":0ip-10-150-1-74.us-west-2.compute.internal",,""pid"name:"2023-09-22T23:14:27.611043655Z4301:""}crucible
13223 ",,"",timehostname""":":level"2023-09-22T23:14:27.611084826Z"":ip-10-150-1-74.us-west-2.compute.internal"30,","hostnamepid""::"4301}
13224 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
13225 "msg,"":"time":"{2023-09-22T23:14:27.611119562Z",""hostnamemsg"Sep 22 23:14:27.611 INFO Current flush_numbers [0..12]: [0, 0]
13226 :"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 45f3b954-a0cc-44a1-92c8-91ac1da9b5f8, encrypted: true, database_read_version: 1, database_write_version: 1 }"":"ip-10-150-1-74.us-west-2.compute.internal,""v",:"0pid",":name"4301:"crucible"},[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 35ccb45b-0c22-4a14-bf83-50fbfaffa7a6, encrypted: true, database_read_version: 1, database_write_version: 1 }"
13227 "{,"level"v":":msg300"{,:""name":"crucible",""level"msg":[0] downstairs client at 127.0.0.1:36835 has UUID 9008985f-4d59-43db-b559-a48d3978dfda30:"","v":0,"name":"crucible","level":30,"time[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d2285a39-bf6a-4487-9493-8622023b1e4b, encrypted: true, database_read_version: 1, database_write_version: 1 }"":","v"2023-09-22T23:14:27.611177307Z":,0",,"time"Sep 22 23:14:27.611 INFO UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } is now active (read-write)
13228 ",:""timename"2023-09-22T23:14:27.611184864Z":"":"hostname"crucible:",2023-09-22T23:14:27.61119239Z""",hostname"",ip-10-150-1-74.us-west-2.compute.internal""hostname:"level",:"30:""pid":4301ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal""},,""pidpid""::43014301
13229 }}
13230 
13231 {{,""{msg":time""msg"":msg"a697641e-15fb-475d-87b4-6f9c9b92b978 WaitActive WaitActive WaitActive"":",""2023-09-22T23:14:27.611234968Z"v"::,0[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9008985f-4d59-43db-b559-a48d3978dfda, encrypted: true, database_read_version: 1, database_write_version: 1 },""""name,"":v"":crucible0",,""namelevel""::"30hostname"5b98377f-d3ca-45bf-893d-9aae2fd5a48e WaitActive WaitActive WaitActive"crucible":,,""v"level"::030,,"""name"timeip-10-150-1-74.us-west-2.compute.internal":,""crucible"":pid"",:"4301,time2023-09-22T23:14:27.611282365Z"":",}"2023-09-22T23:14:27.611292168Z""
13232 level",hostname""hostname"::""Sep 22 23:14:27.611 INFO Current flush_numbers [0..12]: [0, 0]
13233 ip-10-150-1-74.us-west-2.compute.internal{"ip-10-150-1-74.us-west-2.compute.internal,"",:pid30"":pid"Sep 22 23:14:27.611 INFO Downstairs has completed Negotiation, task: proc
13234 msg""4301::"4301}
13235 }
13236 {66688a47-67b1-4e47-8e76-e0e145532b3a WaitActive WaitActive WaitActive"{","msgmsg""::"",""v":time"0[1] downstairs client at 127.0.0.1:33784 has UUID fe394f4d-b22e-45c5-ad25-c835a8779c919ed4d6bc-e430-4200-9ad2-68905b0ff40c WaitActive WaitActive WaitActive:""",,,""v"v:"0:,0",name"":name"""crucible:"",name":"2023-09-22T23:14:27.611331929Z"cruciblelevel"crucible",:"30level""":,30","level"hostname"::"30ip-10-150-1-74.us-west-2.compute.internal",",time"",:""pid"time2023-09-22T23:14:27.611377076Z""::,4301""hostname":"2023-09-22T23:14:27.61138063Z"},
13237 ip-10-150-1-74.us-west-2.compute.internal"",,hostname""pid"":time":"":{2023-09-22T23:14:27.611385182Z"4301ip-10-150-1-74.us-west-2.compute.internal",},
13238 ""pid""{:hostname":msg"":""4301msg"}:
13239 "[2] downstairs client at 127.0.0.1:49774 has UUID ed248c78-99b2-4b20-8580-19eb24b6c3f5"ip-10-150-1-74.us-west-2.compute.internal{",""v":msg",":"0pid",:"4301name"[1] downstairs client at 127.0.0.1:52116 has UUID 54deed7d-517b-47d9-a428-2bbdc0729fba[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fe394f4d-b22e-45c5-ad25-c835a8779c91, encrypted: true, database_read_version: 1, database_write_version: 1 }"}",,""v":v"0:,0",name""
13240 name:""crucible","level":30{:"crucible",""level"msg":":30[1] downstairs client at 127.0.0.1:50563 has UUID 594f2637-eae4-459c-bc2e-92b501adec4b",,""v":0,"name":"time,"crucible"":,time""":2023-09-22T23:14:27.611462933Z""level":,2023-09-22T23:14:27.611469371Z""30,hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid:":43014301}}
13241 
13242 {{"msg":""msg":Sep 22 23:14:27.611 INFO Downstairs has completed Negotiation, task: proc
13243 ","time"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 54deed7d-517b-47d9-a428-2bbdc0729fba, encrypted: true, database_read_version: 1, database_write_version: 1 }a697641e-15fb-475d-87b4-6f9c9b92b978 WaitActive WaitActive WaitActive"":,,""v":v0",:"0name"":,""namecrucible"":,""2023-09-22T23:14:27.61148799Z"levelcrucible"":,,"30level"":30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13244 ,,""timetime""::""2023-09-22T23:14:27.611527198Z{2023-09-22T23:14:27.611525227Z"",,""hostnamehostname""::"""msg":"ip-10-150-1-74.us-west-2.compute.internal",ip-10-150-1-74.us-west-2.compute.internal""pid,"":4301pid":}4301
13245 }
13246 {[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 594f2637-eae4-459c-bc2e-92b501adec4b, encrypted: true, database_read_version: 1, database_write_version: 1 }"{msg""":"msg":,""v":0,"[2] downstairs client at 127.0.0.1:39493 has UUID 4bb2c32c-5001-4960-9cd0-22694b9c6a8cname"9ed4d6bc-e430-4200-9ad2-68905b0ff40c WaitActive WaitActive WaitActive,""":v,"""v:"0:,0"cruciblename,"":name"""crucible:"",,"cruciblelevel"":,30""levellevel""::3030,"time":"2023-09-22T23:14:27.611577651Z","hostname":,""time":"ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:14:27.611581807Z""pid",:"4301hostname"}:
13247 "Sep 22 23:14:27.611 INFO Current flush_numbers [0..12]: [0, 0]
13248 ,{"time""ip-10-150-1-74.us-west-2.compute.internalmsg"",:""pid":":4301}2023-09-22T23:14:27.611581735Z"
13249 ,"hostname":"{[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4bb2c32c-5001-4960-9cd0-22694b9c6a8c, encrypted: true, database_read_version: 1, database_write_version: 1 }"",msg""v:"":ip-10-150-1-74.us-west-2.compute.internal"0,",pid"[2] downstairs client at 127.0.0.1:33349 has UUID 922c2bc8-c976-47a3-990e-e4ec311f6c5b""name",:""v"crucible":,0",level"":name30"::"4301crucible","}level"
13250 :30,"time":"2023-09-22T23:14:27.611632811Z"{,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal",","time"pid:"":4301}66688a47-67b1-4e47-8e76-e0e145532b3a WaitActive WaitActive WaitActive"
13251 2023-09-22T23:14:27.611641895Z",",{"v"hostname""msg:"":":0ip-10-150-1-74.us-west-2.compute.internal,""a697641e-15fb-475d-87b4-6f9c9b92b978 WaitActive WaitActive WaitActive,"",pid""v"::43010name},Sep 22 23:14:27.611 INFO Current flush_numbers [0..12]: [0, 0]
13252 
13253 ""name{":"":"crucible"cruciblemsg"",:""level",:"30level":30[2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 922c2bc8-c976-47a3-990e-e4ec311f6c5b, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,",name"":time"":"crucible","level"2023-09-22T23:14:27.611690958Z:"30,"hostname":"Sep 22 23:14:27.611 INFO [0] downstairs client at 127.0.0.1:42970 has UUID 122e249d-ed33-49c6-bb2e-b79a59ebf9a6
13254 Sep 22 23:14:27.611 INFO Current flush_numbers [0..12]: [0, 0]
13255 ,",ip-10-150-1-74.us-west-2.compute.internal""time",time""pid"::"4301:"}
13256 2023-09-22T23:14:27.611703634Z"2023-09-22T23:14:27.611692722Z",","hostname"hostname"::""ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,""pid"pid"::43014301}
13257 }Sep 22 23:14:27.611 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 122e249d-ed33-49c6-bb2e-b79a59ebf9a6, encrypted: true, database_read_version: 1, database_write_version: 1 }
13258 {
13259 "msg"{:""9ed4d6bc-e430-4200-9ad2-68905b0ff40c WaitActive WaitActive WaitActive"msg":,""v":0:,""name":"Sep 22 23:14:27.611 INFO Downstairs has completed Negotiation, task: proc
13260 [2] downstairs client at 127.0.0.1:39722 has UUID bb12d53e-6697-4186-ab01-3149bf11c327"cruciblecrucible"",",,""levellevel""::3030v":0,"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:27.611789368Z2023-09-22T23:14:27.611789529Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal,"",",""pid"pid:"4301:time"4301}:"}
13261 
13262 2023-09-22T23:14:27.61179493Z{"Sep 22 23:14:27.611 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db WaitActive WaitActive WaitActive
13263 ","msg"hostname":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13264 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ed248c78-99b2-4b20-8580-19eb24b6c3f5, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":{"crucible","level":"30msg":"Sep 22 23:14:27.611 INFO Downstairs has completed Negotiation, task: proc
13265 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: bb12d53e-6697-4186-ab01-3149bf11c327, encrypted: true, database_read_version: 1, database_write_version: 1 }","v",":time0":,""name":"crucible2023-09-22T23:14:27.611851597Z"",","hostnameSep 22 23:14:27.611 INFO Downstairs has completed Negotiation, task: proc
13266 ":"level"ip-10-150-1-74.us-west-2.compute.internal:"30,"pid":4301}
13267 {"msg":"5b98377f-d3ca-45bf-893d-9aae2fd5a48e WaitActive WaitActive WaitActive","v":0,"name":"crucible",",level":"30time":"2023-09-22T23:14:27.611881413Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301},
13268 "time":"Sep 22 23:14:27.611 INFO [1] downstairs client at 127.0.0.1:62060 has UUID 9191f668-24b7-4176-b853-43cf34e8ccd3
13269 {2023-09-22T23:14:27.611898217Z"","msghostname"":":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13270 66688a47-67b1-4e47-8e76-e0e145532b3a WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30Sep 22 23:14:27.611 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9191f668-24b7-4176-b853-43cf34e8ccd3, encrypted: true, database_read_version: 1, database_write_version: 1 }
13271 ,"time":"2023-09-22T23:14:27.611960257Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13272 {"msg":"Upstairs starts"Sep 22 23:14:27.611 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db WaitActive WaitActive WaitActive
13273 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612013875Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13274 {"msg":"Crucible Version: BuildInfo {\n version: Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13275 \"0.0.1\",\n git_sha: \"Sep 22 23:14:27.612 INFO [2] downstairs client at 127.0.0.1:42513 has UUID b37a15c7-b55a-4f3b-ab04-a618bf111318
13276 ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}"Sep 22 23:14:27.612 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b37a15c7-b55a-4f3b-ab04-a618bf111318, encrypted: true, database_read_version: 1, database_write_version: 1 }
13277 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612127745Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13278 Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13279 {Sep 22 23:14:27.612 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db WaitActive WaitActive WaitActive
13280 "msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612184121Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13281 {"msg":"Crucible stats registered with UUID: 9fe7fa85-122e-4e82-8591-96595eb36a20","v":0,"name":"crucible","level":30,"time":"The guest has requested activation
13282 2023-09-22T23:14:27.612229436Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13283 Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
132842023-09-22T23:14:27.612ZINFOcrucible: Crucible 9fe7fa85-122e-4e82-8591-96595eb36a20 has session id: baa610ca-0e7f-4837-b89d-19a907fb476a
13285 Sep 22 23:14:27.612 INFO listening on 127.0.0.1:0, task: main
13286 Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13287 Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13288 Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
13289 {"msg":"[0] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"Sep 22 23:14:27.612 INFO listening on 127.0.0.1:0, task: main
13290 Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13291 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612392805Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13292 {"msg":"[0] Transition from WaitActive to WaitQuorum","v"Sep 22 23:14:27.612 INFO listening on 127.0.0.1:0, task: main
13293 :0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612446323Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13294 {"msg":"[0] new RM replaced this: None","v":0,"name":"crucible","level":40Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13295 ,"time":"2023-09-22T23:14:27.612488758Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13296 {"msg":"[0] Starts reconcile loop","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
13297 2023-09-22T23:14:27.612532558Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13298 {"msg":"Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
13299 [1] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612577967Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13300 Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
133012023-09-22T23:14:27.612ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
13302 {"msg":"[1] new RM replaced this: None","v":0,"name":"crucible","level":{40"msg":"[0] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v":,0",time"":name"":"crucible"2023-09-22T23:14:27.612657922Z",{,""level"hostname:":"30ip-10-150-1-74.us-west-2.compute.internal","pid":"4301msg}"
13303 :"{[0] connecting to 127.0.0.1:58086"",msg",:"""time[1] Starts reconcile loop"":","v"2023-09-22T23:14:27.612676054Zv:"":00,",,""hostname"name:"":"name":"ip-10-150-1-74.us-west-2.compute.internalcrucible"crucible,"",Sep 22 23:14:27.612 INFO Downstairs has completed Negotiation, task: proc
13304 ""pidlevel""::430130,"}level"
13305 :30{,""time"msg:"":"2023-09-22T23:14:27.612718265Z","[0] Transition from WaitActive to WaitQuorum"hostname":,""v":0,"ip-10-150-1-74.us-west-2.compute.internalname"":,""pidcrucible"":,4301"level}"
13306 :30{"msg":","time":[2] 64be6ec7-0ac5-4896-883a-00ec34140b84 (58d25f3b-f138-43ff-93b7-02b5e6bdc53e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"",,""vtime""::0","2023-09-22T23:14:27.61272565Zname2023-09-22T23:14:27.612739601Z"":","crucible"hostname,"":""level",:"30ip-10-150-1-74.us-west-2.compute.internalhostname"",:""pid":4301}
13307 ,"ip-10-150-1-74.us-west-2.compute.internal"time"{:",""pidmsg2023-09-22T23:14:27.612761922Z"":","":[0] new RM replaced this: Nonehostname"",:""v"4301:0ip-10-150-1-74.us-west-2.compute.internal",","name"pid"::"4301,"}crucible
13308 "looper":"{,"0"level"":msg40}":"
13309 [2] Transition from WaitActive to WaitQuorum","v":0,"name":","crucible"time",:""{level2023-09-22T23:14:27.612801291Z"":30,"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid[1] connecting to 127.0.0.1:50371"":4301,"}v",
13310 ":time0{":,""name":"msg"":crucible2023-09-22T23:14:27.612815166Z""",[0] Starts reconcile loop"",",level"hostname""v:""::0,ip-10-150-1-74.us-west-2.compute.internal""name",:""pid"30:crucible"4301,"}level
13311 ":30{"msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:14:27.612852681Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal",,""2023-09-22T23:14:27.612849199Z"pidtime""::,""4301hostname":"}2023-09-22T23:14:27.612862214Z
13312 "ip-10-150-1-74.us-west-2.compute.internal",","hostname{"pid"":msg:"":"4301ip-10-150-1-74.us-west-2.compute.internal",","looper"pid"::"4301[1] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum}"
13313 ,"1{v""":msg0",:""}Sep 22 23:14:27.612 INFO Current flush_numbers [0..12]: [0, 0]
13314 [2] Starts reconcile loopname
13315 "":",crucible""v",:"0{,level"":name"30:"crucible",""level"msg"::30"[2] connecting to 127.0.0.1:54983","v":0,"name":"crucible","level":30{"msg":"[0] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.612920009Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
133162023-09-22T23:14:27.612ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
13317 ,"time":"2023-09-22T23:14:27.613065626Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"",msg""pid":":4301[0] Transition from WaitActive to WaitQuorum}"Sep 22 23:14:27.613 INFO Downstairs has completed Negotiation, task: proc
13318 ,"
13319 v":0,"name":"{crucible","level":"30msg":"[1] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:14:27.613117111Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time",:""pid":43012023-09-22T23:14:27.61312507Z"},
13320 "hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4301"}msg
13321 ":"[0] new RM replaced this: None"{,"v":0,"name"":msg"":"crucible","[1] Starts reconcile loop"level":,"40v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.613175489Z",,""hostname":"time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.613180476Z,""pid",":hostname":"4301}ip-10-150-1-74.us-west-2.compute.internal"
13322 ,"pid":4301}{
13323 "Sep 22 23:14:27.613 INFO Current flush_numbers [0..12]: [0, 0]
13324 Sep 22 23:14:27.613 INFO accepted connection from 127.0.0.1:36467, task: main
13325 msg"{:"[0] Starts reconcile loop"","msg"v"::0","name":"crucible","level":30[2] 9ed4d6bc-e430-4200-9ad2-68905b0ff40c (ad0fa1b4-52c3-4329-aeea-287c43c0dc1e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.613241907Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal"","time"pid"::4301"}
13326 2023-09-22T23:14:27.613250147Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid":"4301msg":"}
13327 [1] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"{crucible","level":30"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.613 INFO accepted connection from 127.0.0.1:54842, task: main
13328 ,"time":"2023-09-22T23:14:27.613294848Z","hostname":","ip-10-150-1-74.us-west-2.compute.internaltime":"","pid"2023-09-22T23:14:27.613304107Z":4301,"hostname":}"
13329 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}{
13330 "msg":"{[1] Transition from WaitActive to WaitQuorum","v"":msg"0:,""name":"[2] new RM replaced this: Nonecrucible"",",level"":v":300,"name":"crucible","level":40,"time":","2023-09-22T23:14:27.613360261Z"time":","hostname":2023-09-22T23:14:27.613364294Z"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pidip-10-150-1-74.us-west-2.compute.internal"":,"4301pid":4301}
13331 }
13332 {"{msg":""[1] new RM replaced this: None"msg":","v":[2] Starts reconcile loop"0,,""namev":"":crucible0",",name""level:"":crucible"40,"level":30,"time":","2023-09-22T23:14:27.613420032Ztime"":","hostname":"2023-09-22T23:14:27.613421816Z","hostname"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.613 INFO Downstairs has completed Negotiation, task: proc
13333 ,:""pid":4301ip-10-150-1-74.us-west-2.compute.internal"},
13334 "pid":4301}{
13335 "msg":"[1] Starts reconcile loop"{,"v":0,"name"":msg"":"crucible","level":[0] 127.0.0.1:36835 task reports connection:true"30,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.61348132Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time":","pid":2023-09-22T23:14:27.613487039Z"4301,"}hostname
13336 ":"ip-10-150-1-74.us-west-2.compute.internal","{pid":4301}
13337 "msg":"{"msg":"[2] a697641e-15fb-475d-87b4-6f9c9b92b978 (01d84cdc-ab54-426d-be83-dc686be9c9e6) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":09ed4d6bc-e430-4200-9ad2-68905b0ff40c WaitQuorum WaitQuorum WaitQuorum,""name":","vcrucible"",":level"0:,"30name":"crucible","level":30,"time":","2023-09-22T23:14:27.61354247Z"time":","hostname":"2023-09-22T23:14:27.613545692Z","ip-10-150-1-74.us-west-2.compute.internalhostname":"","pid":4301ip-10-150-1-74.us-west-2.compute.internal"},"
13338 pid":4301}
13339 {"{msg":""[2] Transition from WaitActive to WaitQuorum"msg":","v":[0]R flush_numbers: [0, 0]0",","name":"v"crucible:"0,,""levelname""::"30crucible","level":30,"time":","time"2023-09-22T23:14:27.613604793Z":","hostname"2023-09-22T23:14:27.613607044Z":",The guest has finished waiting for activation
13340 "The guest has finished waiting for activation
13341 hostname"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4301ip-10-150-1-74.us-west-2.compute.internal"},"
13342 pid":4301}
13343 {"{msg":"[2] new RM replaced this: None""msg",:""v":[0]R generation: [0, 0]"0,",name"":v"":crucible"0,,""name"level"::"40crucible","level":30,"time":","time":2023-09-22T23:14:27.613677912Z"","2023-09-22T23:14:27.613680411Z"hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4301Sep 22 23:14:27.613 INFO Current flush_numbers [0..12]: [0, 0]
13344 }"
13345 ,"pid":4301{}
13346 "msg":"[2] Starts reconcile loop"{,"v":Sep 22 23:14:27.613 INFO accepted connection from 127.0.0.1:41021, task: main
13347 0","msg"name"::""crucible"[0]R dirty: [false, false]","level,""v"::300,"name":"crucible","level":30,"time":","2023-09-22T23:14:27.613746309Z"time":","hostname":"2023-09-22T23:14:27.613750415Z","hostname":ip-10-150-1-74.us-west-2.compute.internal"","pid":4301ip-10-150-1-74.us-west-2.compute.internal","}pid"
13348 :4301}
13349 {{"msg":""msg"[0] 127.0.0.1:56524 task reports connection:true":","v":[1]R flush_numbers: [0, 0]"0,","vname":"":crucibleThe guest has finished waiting for activation
13350 "0,","level"name"::"30crucible","level":30,"time":","time":"2023-09-22T23:14:27.613810965Z",2023-09-22T23:14:27.613813262Z""hostname":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"ip-10-150-1-74.us-west-2.compute.internal":,4301"pid":}4301
13351 }
13352 {"{msg":""msg":"a697641e-15fb-475d-87b4-6f9c9b92b978 WaitQuorum WaitQuorum WaitQuorum","[1]R generation: [0, 0]"v":,0",v"":name"0:,""cruciblename"":","crucible"level":,"30level":30,","time":time"":"2023-09-22T23:14:27.613873108Z"2023-09-22T23:14:27.613874159Z",","hostnamehostname":"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",","pidpid""::43014301}}
13353 
13354 {"msg"{:"[1]R dirty: [false, false]",""v"Sep 22 23:14:27.613 INFO Downstairs has completed Negotiation, task: proc
13355 msg":0:","name":"[0]R flush_numbers: [0, 0]"crucible",,""levelv":"0:,"30name":"crucible","level":30,"time":"2023-09-22T23:14:27.613947194Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:14:27.61395059Z""pid":,"4301hostname":"}
13356 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
13357 "msg":"[2]R flush_numbers: [0, 0]"{,"v":0,"name":""msg":crucible"","level":[0]R generation: [0, 0]"30,,""v":time0":,""name":"2023-09-22T23:14:27.61292534Zcrucible"","level",:"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13358 ,"time":","time":"2023-09-22T23:14:27.614000501Z"{2023-09-22T23:14:27.614006342Z",","hostname"hostname""msg:"":":"ip-10-150-1-74.us-west-2.compute.internal","[0] 127.0.0.1:45897 task reports connection:truepid"":,4301"ip-10-150-1-74.us-west-2.compute.internal"}v
13359 ",":pid"{0:,""4301namemsg""::""}crucible"[0]R dirty: [false, false]",
13360 ","levelv""::030,"name":"crucible{","level":30Sep 22 23:14:27.614 INFO Connection request from 9fe7fa85-122e-4e82-8591-96595eb36a20 with version 4, task: proc
13361 ",,""timetime"":":"msg":"2023-09-22T23:14:27.614049083Z"2023-09-22T23:14:27.61405302Z",",[2]R generation: [0, 0]"hostname"":","hostnamev""ip-10-150-1-74.us-west-2.compute.internal:"",":pid"0ip-10-150-1-74.us-west-2.compute.internal:,"4301"name"},
13362 ":pid"":{crucible"4301,"}msg
13363 ""Sep 22 23:14:27.614 INFO upstairs UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } connected, version 4, task: proc
13364 {:"level"":msg":30"64be6ec7-0ac5-4896-883a-00ec34140b84 WaitQuorum WaitQuorum WaitQuorum","[1]R flush_numbers: [0, 0]v"":,0",v"":name0":,""namecrucible"":","crucible"level",:"30level":30,,",time""time:"":""2023-09-22T23:14:27.614106139Ztime"2023-09-22T23:14:27.614106831Z:""",,""2023-09-22T23:14:27.614099629Z"hostnamehostname""::"","hostname":"The guest has finished waiting for activation
13365 ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pid"pid:"4301:ip-10-150-1-74.us-west-2.compute.internal}4301
13366 }"
13367 {,Sep 22 23:14:27.614 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
13368 ""{msg"pid"":msg"":":[0]R flush_numbers: [0, 0]"4301,[1]R generation: [0, 0]""v",:"}0
13369 ,v"":name0":,""namecrucible"":","crucible"level,"":level30":{30Sep 22 23:14:27.614 INFO [0] Transition from WaitActive to WaitQuorum
13370 "msg":",","timetime""::""[2]R dirty: [false, false]"Sep 22 23:14:27.614 INFO Connection request from 9fe7fa85-122e-4e82-8591-96595eb36a20 with version 4, task: proc
13371 ,"2023-09-22T23:14:27.614174558Z"2023-09-22T23:14:27.614173117Z,""hostnameSep 22 23:14:27.614 WARN [0] new RM replaced this: None
13372 v":,"":"hostname"0:"ip-10-150-1-74.us-west-2.compute.internal,""ip-10-150-1-74.us-west-2.compute.internal,"",pid"":pid4301"name":"Sep 22 23:14:27.614 INFO upstairs UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } connected, version 4, task: proc
13373 crucible",}:
13374 4301"}{
13375 "levelmsg"{"::""30msg":[1]R dirty: [false, false]"","v"[0]R generation: [0, 0]:"0,,""vname""::0","crucible"name",:""level"crucible:"30,"level":30,"time",,""timetime""::"":"2023-09-22T23:14:27.614239738Z2023-09-22T23:14:27.61423839Z""2023-09-22T23:14:27.614231676Z",,"","hostnamehostname""::""hostname":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::43014301ip-10-150-1-74.us-west-2.compute.internal}}
13376 
13377 {",""{pidmsg""msg:"""::[2]R flush_numbers: [0, 0]""4301,"[0]R dirty: [false, false]v""Sep 22 23:14:27.614 INFO [0] Starts reconcile loop
13378 }
13379 :,"0v",:"0name,"":"name":crucible"{"crucible",","level":level":30"30msg":"Max found gen is 1","v":0,"nameSep 22 23:14:27.614 INFO Connection request from 9fe7fa85-122e-4e82-8591-96595eb36a20 with version 4, task: proc
13380 ":,,""timetime""::""{"2023-09-22T23:14:27.614305769Z"2023-09-22T23:14:27.614304649Z",crucible"",hostname"""hostname:"":,msg",:""ip-10-150-1-74.us-west-2.compute.internaltime""","[2] Proc runs for 127.0.0.1:56258 in state New:""ip-10-150-1-74.us-west-2.compute.internalpid"":,4301""2023-09-22T23:14:27.612934972Z,level"""v"Sep 22 23:14:27.614 INFO upstairs UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } connected, version 4, task: proc
13381 :pid:300","name":":crucible"4301,"}level"
13382 :30{,"time":""2023-09-22T23:14:27.614359459Z"msg"{:,""hostname"[2]R generation: [0, 0]":",""v"msg":ip-10-150-1-74.us-west-2.compute.internal"0,,:"",""nametimepid":"""[0] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) WaitActive WaitActive WaitActive ds_transition to WaitQuorum2023-09-22T23:14:27.614373795Z"":",,""v"::hostname0",:""namecrucible"4301"ip-10-150-1-74.us-west-2.compute.internal:"},"",pid
13383 }"crucible
13384 {""level":,:"4301level"30}:
13385 30{""msgmsg""::""[1]R flush_numbers: [0, 0]"Generation requested: 1 >= found:1",,""vv""::00,",name"Sep 22 23:14:27.614 INFO [1] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
13386 ,,name"":":""timecruciblecrucible"""",,"time":":level""level""::302023-09-22T23:14:27.614429688Z2023-09-22T23:14:27.614435208Z"30","hostname",:""hostname":ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.614 INFO [1] Transition from WaitActive to WaitQuorum
13387 ",ip-10-150-1-74.us-west-2.compute.internal"",pid,"",time""time":"4301::""pid}2023-09-22T23:14:27.6144702Z2023-09-22T23:14:27.614472413Z""
13388 Sep 22 23:14:27.614 WARN [1] new RM replaced this: None
13389 ",{",:hostname"""hostname:"":msg"4301"ip-10-150-1-74.us-west-2.compute.internal":,}ip-10-150-1-74.us-west-2.compute.internal"
13390 ""pid",:"4301pid":4301[0] Transition from WaitActive to WaitQuorum"}}
13391 
13392 ,"{{{v":0""msgmsg",""Sep 22 23:14:27.614 INFO [1] Starts reconcile loop
13393 msg":""name:Next flush: 1:":"""",[1]R generation: [0, 0]""[2]R dirty: [false, false]"v,"":v0",:"0,",name""name:"":"cruciblev"crucible"":,,0"",levellevel""::3030"name":"crucible","level":30,,""timetime""::""2023-09-22T23:14:27.614569682Z2023-09-22T23:14:27.61456946Z""crucible,,"""hostnamehostname""::"","level":ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal30"",,"",pidpid""::43014301"}}
13394 
13395 {time{"msg":""msg"All extents match:"","v":[1]R dirty: [false, false]0",","namev""::"0,crucible,"""name,"":"level"crucible:"30,"time"level:"":302023-09-22T23:14:27.614591683Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":,4301"time":",}Sep 22 23:14:27.614 INFO [2] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
13396 2023-09-22T23:14:27.614614613Z"",time"":hostname"":
13397 "2023-09-22T23:14:27.61461774Z"",ip-10-150-1-74.us-west-2.compute.internal"":{"hostname,"":"pid":2023-09-22T23:14:27.614574118Z"4301ip-10-150-1-74.us-west-2.compute.internal"}"Sep 22 23:14:27.614 INFO [2] Transition from WaitActive to WaitQuorum
13398 ,"
13399 pid":,msg"{4301:"""}msg
13400 "[0] new RM replaced this: None:{"""hostname"No downstairs repair requiredmsg",":v":"Sep 22 23:14:27.614 WARN [2] new RM replaced this: None
13401 ,""v[2]R flush_numbers: [0, 0]"":,0",v":"0:name,"0:,"""cruciblename"",:"""levelcrucible"":,30"ip-10-150-1-74.us-west-2.compute.internallevel"name":"30:,"pid":4301}
13402 ,,""Sep 22 23:14:27.614 INFO [2] Starts reconcile loop
13403 "timetime""::""crucible"{2023-09-22T23:14:27.61471051Z2023-09-22T23:14:27.614706203Z"",,,"""hostnamehostname"""::msg""":"level":ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal40"",,""pid"pid"::43014301Max found gen is 1}}
13404 
13405 {"{"msg":""msg":"No initial repair work was required"[2]R generation: [0, 0],""v,"":v0":,0","name"name:"":"crucible"crucible",",,"time"level""level:"30:30:"2023-09-22T23:14:27.614745893Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301},
13406 ,""timetime""::""2023-09-22T23:14:27.614769614Z2023-09-22T23:14:27.614770283Z"",",hostname""{:hostname"":Sep 22 23:14:27.614 INFO [0] 127.0.0.1:42970 task reports connection:true
13407 ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal""",,""pid"pid:"4301:msg"}4301
13408 }:"
13409 {[0] Starts reconcile loop{""msg":"","v"Set Downstairs and Upstairs activemsg":",":v"":00[2]R dirty: [false, false],"",name"",name""v"::"0:,crucible""name,"":"levelcrucible"",:""Sep 22 23:14:27.614 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db WaitQuorum WaitQuorum WaitQuorum
13410 Sep 22 23:14:27.614 INFO UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } is now active (read-write)
13411 crucible"level30",:"30level":30,"time,"":"time":"2023-09-22T23:14:27.614853895Z"2023-09-22T23:14:27.614855966Z,"",hostname"":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid4301",}:"4301
13412 }time"
13413 {:"Sep 22 23:14:27.614 INFO [0]R flush_numbers: [0, 0]
13414 "{msg2023-09-22T23:14:27.614857535Z""":msg"":,""hostname"Max found gen is 1":9ed4d6bc-e430-4200-9ad2-68905b0ff40c is now active with session: ad0fa1b4-52c3-4329-aeea-287c43c0dc1e,""",v"":v"0:0,","nameip-10-150-1-74.us-west-2.compute.internal"name",":"":"crucible"pid",crucible""Sep 22 23:14:27.614 INFO [0]R generation: [0, 0]
13415 level,""::30level"4301:30}
13416 ,"{time,"":"time":""2023-09-22T23:14:27.61491817Z2023-09-22T23:14:27.614920044Z""Sep 22 23:14:27.614 INFO [0]R dirty: [false, false]
13417 ,msg"":hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13418 [1] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v":{0,"name":""crucible"msg,"":"level"Sep 22 23:14:27.614 INFO [1]R flush_numbers: [0, 0]
13419 Sep 22 23:14:27.614 INFO UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } is now active (read-write)
13420 ,9ed4d6bc-e430-4200-9ad2-68905b0ff40c Set Active after no repair"":,hostname""v:""30:0,ip-10-150-1-74.us-west-2.compute.internal""name,"":"pid":crucible"4301,"}
13421 level"Sep 22 23:14:27.614 INFO [1]R generation: [0, 0]
13422 :{30",msg"":"time":"Generation requested: 1 >= found:1",2023-09-22T23:14:27.614972581Z""v",:"0,",hostname"time""name"::"":"2023-09-22T23:14:27.61498456Zcrucible""Sep 22 23:14:27.614 INFO [1]R dirty: [false, false]
13423 ip-10-150-1-74.us-west-2.compute.internal",,"level"":hostname30",":pid"":4301}ip-10-150-1-74.us-west-2.compute.internal"
13424 ,,""pidtime""::,{"4301"}2023-09-22T23:14:27.615012302Z"
13425 ,""{hostnamemsg"v"msg"Sep 22 23:14:27.615 INFO [2]R flush_numbers: [0, 0]
13426 ":""::""[1] Transition from WaitActive to WaitQuorum":,"0ip-10-150-1-74.us-west-2.compute.internalNotify all downstairs, region set compare is done.",",,""v""namevpid""::04301,""::name}Sep 22 23:14:27.615 INFO [2]R generation: [0, 0]
13427 0"
13428 ,"crucible{name"""msg:"":":"crucible"crucible",,Next flush: 1""level""",:"30v,level":""level"::30300Sep 22 23:14:27.615 INFO [2]R dirty: [false, false]
13429 ,,""timename""::""2023-09-22T23:14:27.61508988Zcrucible"",,""level"hostname:"30:","ip-10-150-1-74.us-west-2.compute.internal"time,"":"pid":43012023-09-22T23:14:27.61509698Z"}
13430 ,,"{time""":msg""Sep 22 23:14:27.615 INFO Max found gen is 1
13431 hostname"2023-09-22T23:14:27.615108611Z:"",,""Set check for repairhostname""time":,"":v"ip-10-150-1-74.us-west-2.compute.internal::0",,""namepid""":":4301"2023-09-22T23:14:27.615096838Z"ip-10-150-1-74.us-west-2.compute.internal"crucible,"},
13432 ",level{""Sep 22 23:14:27.615 INFO Generation requested: 1 >= found:1
13433 hostname"":":msg30""pid"::"4301ip-10-150-1-74.us-west-2.compute.internal"All extents match,"}pid"":
13434 ,,""v{Sep 22 23:14:27.615 INFO Next flush: 1
13435 ""time:"4301:0",}"
13436 msg"name2023-09-22T23:14:27.615167287Z"":{:,"""hostnamecrucible"":"",[1] new RM replaced this: Noneip-10-150-1-74.us-west-2.compute.internal"""msg",:"",level""Generation requested: 1 >= found:1":pid30"v"::,0"4301v"Sep 22 23:14:27.615 INFO All extents match
13437 ,,}":"0
13438 time"name":"{:",crucible"2023-09-22T23:14:27.615217927Z",""Sep 22 23:14:27.615 INFO No downstairs repair required
13439 name"",msg""hostname:":"":"level":crucibleip-10-150-1-74.us-west-2.compute.internal40"[1] 127.0.0.1:52116 task reports connection:true,"""pid,"":v4301",}:
13440 0,",{"Sep 22 23:14:27.615 INFO No initial repair work was required
13441 level""name"msg":::""30"No downstairs repair requiredcrucible""time",,""v"level:"0:,30:""name"Sep 22 23:14:27.615 INFO Set Downstairs and Upstairs active
13442 2023-09-22T23:14:27.615259798Z":","crucible"hostname",:"","level":time30":ip-10-150-1-74.us-west-2.compute.internal"",","2023-09-22T23:14:27.615294104Zpid"time":"4301:,"},"2023-09-22T23:14:27.615287762Z"
13443 hostname""time:"":"{{,ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.615305098Z,"",msg"""""pidhostnamehostnamemsg"":::"":":""Sep 22 23:14:27.615 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db is now active with session: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8
13444 ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal4301[0] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) WaitActive WaitActive WaitActive ds_transition to WaitQuorum,""",pid"[1] Starts reconcile loop"v""::}"
13445 ,",04301,"{pid"}"
13446 namev"":msg4301"{"}:
13447 ":Sep 22 23:14:27.615 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db Set Active after no repair
13448 {9ed4d6bc-e430-4200-9ad2-68905b0ff40c Active Active Active":""cruciblemsg"",:""0,""msgv""::"Next flush: 1level"No initial repair work was required0",,""namev"",,"":"30Sep 22 23:14:27.615 INFO Notify all downstairs, region set compare is done.
13449 v":name":"0crucible,"name"::0","crucible"name,":""level"crucible:"30,,:""",time"""crucible:level"":,2023-09-22T23:14:27.615401901Z"",",level""level"30hostname:time"":""30:":2023-09-22T23:14:27.615420581Z",ip-10-150-1-74.us-west-2.compute.internal"",,hostname""time:"":30""Sep 22 23:14:27.615 INFO Set check for repair
13450 ,pid""time:2023-09-22T23:14:27.615434845Zip-10-150-1-74.us-west-2.compute.internal""4301":,,""pid"hostname:"4301:}"
13451 "},
13452 ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.615440169Z{"{,"",""msg"hostname:":"""pidmsg"":ip-10-150-1-74.us-west-2.compute.internal4301:time"[0] Transition from WaitActive to WaitQuorum,"""pid,"":v:"4301:}"
13453 Set check for repair"0{",}"2023-09-22T23:14:27.615448433Z"name
13454 "",msg{:Sep 22 23:14:27.615 INFO [1] 127.0.0.1:62060 task reports connection:true
13455 "",:""vSet Downstairs and Upstairs active"":,"0vhostname""msg"crucible:""","":name0",:"All extents match,name"":"crucible:"","crucible"level,"":level30"":ip-10-150-1-74.us-west-2.compute.internal"level""Sep 22 23:14:27.615 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db Active Active Active
13456 ,":v30",30":,time"0"pid:,""":name4301,2023-09-22T23:14:27.615540858Z""time",:""Sep 22 23:14:27.615 INFO Set check for repair
13457 2023-09-22T23:14:27.615554032Zhostname""},:"""hostname":,"":
13458 "ip-10-150-1-74.us-west-2.compute.internal"crucibletime"",:""{ip-10-150-1-74.us-west-2.compute.internal,"",pid"":pid4301""}:
13459 4301msg"}{
13460 "2023-09-22T23:14:27.615552786Z:{msg""",level"":""msg"Sep 22 23:14:27.615 INFO [2] 127.0.0.1:42513 task reports connection:true
13461 [2] 66688a47-67b1-4e47-8e76-e0e145532b3a (ff839372-f996-4bbe-90dd-222e8d070509) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum",:[2] 127.0.0.1:33349 task reports connection:true""",v:hostname30":"64be6ec7-0ac5-4896-883a-00ec34140b84 is now active with session: 58d25f3b-f138-43ff-93b7-02b5e6bdc53e""v",:"0v:",",:00,,"""namename""::""ip-10-150-1-74.us-west-2.compute.internal""time,":""pid"crucible"crucible,"",level""level:"30:2023-09-22T23:14:27.615620212Z:"30,4301Sep 22 23:14:27.615 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db Active Active Active
13462 name}"
13463 hostname""{::""msg"":ip-10-150-1-74.us-west-2.compute.internal"",,[0] new RM replaced this: None,""crucible"""timetime""::"",pid,"":v4301"2023-09-22T23:14:27.615650172Z2023-09-22T23:14:27.615646927Z"""}Sep 22 23:14:27.615 INFO Set check for repair
13464 
13465 :,,""hostname"level{0":hostname"":"",msg""name:"":":ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,"crucibleNo downstairs repair required"",pid""pid:"430130,""levelv:}4301
13466 }""::400{
13467 ",msg{"":""msg"name":,64be6ec7-0ac5-4896-883a-00ec34140b84 Set Active after no repair"",,:""time"crucible:""9ed4d6bc-e430-4200-9ad2-68905b0ff40c Active Active Active,""",v""v:"0:,""time"name0",:"":level"cruciblename"",:"""2023-09-22T23:14:27.615711047Z"cruciblelevel","",":hostname30level"":2023-09-22T23:14:27.615718899Z30":":,"Sep 22 23:14:27.615 INFO [0] received reconcile message
13468 hostname,,""30time"ip-10-150-1-74.us-west-2.compute.internal:""time":":"ip-10-150-1-74.us-west-2.compute.internal",","2023-09-22T23:14:27.615760812Z,"",pid""hostname:"4301:"}pid"2023-09-22T23:14:27.615757489Z""time",:"ip-10-150-1-74.us-west-2.compute.internal
13469 ":2023-09-22T23:14:27.615776371Z"4301"hostname",:""{,}""msgpid"":ip-10-150-1-74.us-west-2.compute.internalhostname"4301:,}""pid
13470 "Sep 22 23:14:27.615 INFO [0] All repairs completed, exit
13471 "
13472 :[0] Starts reconcile loop{"4301,""}msg
13473 "v:"":{{No initial repair work was required0","name"":""crucible,msg""v":,0",level""name:"msg"":30:"":Sep 22 23:14:27.615 INFO [0] Starts cmd_loop
13474 "crucibleNotify all downstairs, region set compare is done.",",",time"""[2] Transition from WaitActive to WaitQuorum"v"level:"::"300,,""v"name":2023-09-22T23:14:27.615855099Z":","0crucible","hostname,"":time"":",name"":"level"crucibleip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:14:27.615874955Z"",:"30",pid"","hostname:"4301:"}level"
13475 ip-10-150-1-74.us-west-2.compute.internal":,"{30pid":4301"msg"}:"
13476 ,{"time""[1] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorummsg"":",:""Set Downstairs and Upstairs active"v":,"02023-09-22T23:14:27.615899124Z",v"":name0",:"",namecrucible"":,Sep 22 23:14:27.615 INFO [1] received reconcile message
13477 ,"""cruciblelevel"",:"30time"level"::"302023-09-22T23:14:27.615910124Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",",pid":"4301time,"":"time"}:2023-09-22T23:14:27.615942648Z""
13478 Sep 22 23:14:27.615 INFO [1] All repairs completed, exit
13479 2023-09-22T23:14:27.615946163Z,""{hostname,"":hostname""":msg"ip-10-150-1-74.us-west-2.compute.internal":","pid":4301[2] new RM replaced this: None"}
13480 ,"v":0{,"name":""cruciblemsg"":","level":40[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13481 {"Sep 22 23:14:27.615 INFO [1] Starts cmd_loop
13482 ,"hostname"time"::","msg":time"""":"2023-09-22T23:14:27.615992744Z"2023-09-22T23:14:27.615998569Za697641e-15fb-475d-87b4-6f9c9b92b978 is now active with session: 01d84cdc-ab54-426d-be83-dc686be9c9e6"",,"",v""ip-10-150-1-74.us-west-2.compute.internal"hostname"hostname:"0:,"",name"ip-10-150-1-74.us-west-2.compute.internal:"":"crucible,"",pid""pid"level:"4301:"}30
13483 :ip-10-150-1-74.us-west-2.compute.internal"4301{,"pid":}4301
13484 }",msg""time:"":"{
13485 "2023-09-22T23:14:27.616044451Z[1] new RM replaced this: None""msg":,{",v"":hostname0"""msg",:"":Set check for repair""ip-10-150-1-74.us-west-2.compute.internalname"",:,Sep 22 23:14:27.616 INFO [2] received reconcile message
13486 ""pid[2] Starts reconcile loop"crucible"":,4301","v"level}"
13487 :":v"0{40,:""0msg","name":name":"":",crucible"crucible,"a697641e-15fb-475d-87b4-6f9c9b92b978 Set Active after no repairtime""","Sep 22 23:14:27.616 INFO [2] All repairs completed, exit
13488 ,":v"""levellevel"":2023-09-22T23:14:27.616104225Z0"::,,""hostnamename""::""30crucible30ip-10-150-1-74.us-west-2.compute.internal"",,""levelpid""::304301}
13489 {"msg":"[1] Starts reconcile loop","v":0,","name":time"":"crucible","level"2023-09-22T23:14:27.616143665Z:"30,,""hostname":time"":","2023-09-22T23:14:27.616140434Z"ip-10-150-1-74.us-west-2.compute.internal",,","time""pid:"":hostname"43012023-09-22T23:14:27.616158063Z"}:time",
13490 "":{hostname""":msg"":"2023-09-22T23:14:27.616141914Z"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"Notify all downstairs, region set compare is done.,hostname""",",pid"pidv""Sep 22 23:14:27.616 INFO [2] Starts cmd_loop
13491 :"0:",:""ip-10-150-1-74.us-west-2.compute.internal":,43014301name"}:
13492 ""}{crucible"pid"
13493 :",4301msg"":"level":}{
13494 "[2] 5b98377f-d3ca-45bf-893d-9aae2fd5a48e (d3623aa7-ee8c-40f8-a982-d0eadbcd4a76) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum30"msg",":v{"":0,"name""[0] 127.0.0.1:32776 task reports connection:truemsg"",:""time"crucible:"",",level2023-09-22T23:14:27.616232403Z"":"30,:v""":hostname"0[1] 127.0.0.1:43400 task reports connection:true",":name":,"","crucible"timeip-10-150-1-74.us-west-2.compute.internal"":,"",pid2023-09-22T23:14:27.616249219Z":"4301"},
13495 ""The guest has finished waiting for activation
13496 level"{hostname"::"30"msgip-10-150-1-74.us-west-2.compute.internal","pid""::"4301}Set check for repair
13497 ","v"{:0,"name"":msg"":"crucible","level":30[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.616287602Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:14:27.616300299Z,""pidtime,"":"hostname""::"2023-09-22T23:14:27.616305499Z"4301,"ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid":}4301ip-10-150-1-74.us-west-2.compute.internal"
13498 },"
13499 pid":{4301{}
13500 ""msg"{:"msg"":msg[1] 127.0.0.1:33784 task reports connection:true"":",""v":0,"[2] new RM replaced this: None"name":,""66688a47-67b1-4e47-8e76-e0e145532b3a WaitQuorum WaitQuorum WaitQuorum"vcrucible"":,0",level""name:"30:","crucible"v",:"0level",":name"40:",crucible"","time"level:"":302023-09-22T23:14:27.616355482Z",,""timehostname""::""2023-09-22T23:14:27.616363422Z"ip-10-150-1-74.us-west-2.compute.internal,"","hostname"pid:"":4301}
13501 ip-10-150-1-74.us-west-2.compute.internal","pid":{4301}
13502 "msg":"{,"a697641e-15fb-475d-87b4-6f9c9b92b978 Active Active Activemsg"",:""v"":[2] Starts reconcile loop"0time",,""namev""::"0:,crucible""name,""level:"":"crucible30","level"2023-09-22T23:14:27.616370658Z":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"}time,"":"
13503 time2023-09-22T23:14:27.616403365Z"":","hostname":2023-09-22T23:14:27.616406322Z""{,"ip-10-150-1-74.us-west-2.compute.internal"hostname",:""pid"":ip-10-150-1-74.us-west-2.compute.internal"4301,"}pid
13504 ":msg"{4301:"}"
13505 msg"[0]R flush_numbers: [0, 0]":{","v"Set check for repair""msg",:""v"::0[0] 127.0.0.1:43186 task reports connection:true0",",name""v:"":,crucible0",","namename""level"::""crucible30":,""level"crucible":,"30level":30,"time":"2023-09-22T23:14:27.616457137Z","hostname",:""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.616461942Z,""pid,"":4301hostname":"}
13506 ip-10-150-1-74.us-west-2.compute.internal","pid"{:,4301""msg}"
13507 :time"{":""msg"2023-09-22T23:14:27.616463328Z":[2] 127.0.0.1:39493 task reports connection:true"",","hostnamev5b98377f-d3ca-45bf-893d-9aae2fd5a48e WaitQuorum WaitQuorum WaitQuorum"":":"0,","v"name:"0:,""ip-10-150-1-74.us-west-2.compute.internalcruciblename"",:"""levelcrucible,"",:"30"levelpid""::304301}
13508 ,"time":",2023-09-22T23:14:27.616504582Z""time",":"hostname":"{2023-09-22T23:14:27.616508584Z","ip-10-150-1-74.us-west-2.compute.internal",hostname"":pid""":4301ip-10-150-1-74.us-west-2.compute.internal"},
13509 "msgpid"":{4301:"}"
13510 msg"[0]R generation: [0, 0]"{:","v"":a697641e-15fb-475d-87b4-6f9c9b92b978 Active Active Activemsg""0:,"","v[0]R flush_numbers: [0, 0]"",:"0v,"":name0",:""name"crucible:"","cruciblelevel"":,30"name"level":":30crucible","level":30,"time":"2023-09-22T23:14:27.616555931Z",,""time"hostname:"":"2023-09-22T23:14:27.616559772Z",ip-10-150-1-74.us-west-2.compute.internal"","hostname"pid:"":4301}
13511 ip-10-150-1-74.us-west-2.compute.internal","pid":{4301},"
13512 msg""{:"time":""msgSet check for repair"":","2023-09-22T23:14:27.616562756Z"[0]R generation: [0, 0]v"":,0",v""name:"0:,"",namecrucible"":,""level"crucible:"30,""level":hostname"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13513 ,,""timetime""::""{2023-09-22T23:14:27.616601748Z2023-09-22T23:14:27.616598819Z"",,""hostnamehostname""::"""msg":"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid[0]R dirty: [false, false]"""::,43014301"}}
13514 
13515 {v":{"0"msgmsg""::,"""name":"[0]R dirty: [false, false][0] received reconcile message"",crucible",",v""v:"0:"0,level",""namename:""::""30cruciblecrucible"",,""levellevel""::3030:"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,,""timetime""}::""
13516 ,2023-09-22T23:14:27.616656051Z2023-09-22T23:14:27.616656127Z""",,""{time"hostnamehostname""::"":msg""ip-10-150-1-74.us-west-2.compute.internal""2023-09-22T23:14:27.616654042Z:,ip-10-150-1-74.us-west-2.compute.internal""pid,"":""4301pid"Set check for repair",",}:
13517 4301hostname""v}{
13518 ""::"0{msg":""msg"ip-10-150-1-74.us-west-2.compute.internal,"","[0] All repairs completed, exit:""namepid""::"4301[1]R flush_numbers: [0, 0],""}crucible
13519 ",v"",{":level"":0v,"":name0",":name""msg30"crucible:"":,crucible""level,"":level30":"30[1]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30,",time"":time"",:2023-09-22T23:14:27.616745782Z""","2023-09-22T23:14:27.616747427Z"time"hostname,":"":,""ip-10-150-1-74.us-west-2.compute.internalhostname"":,""2023-09-22T23:14:27.616740211Ztime""pidip-10-150-1-74.us-west-2.compute.internal"":,4301",:""pid}"
13520 :2023-09-22T23:14:27.616754126Z4301{hostname"""}msg"
13521 :":,""[0] Starts cmd_loop{"hostname",""ip-10-150-1-74.us-west-2.compute.internal:"",vmsg""::"0ip-10-150-1-74.us-west-2.compute.internal"[1]R generation: [0, 0],"",name"":"pid,"":pid"4301:"v"crucible:0","}4301
13522 }name,"":level"":crucible30"{
13523 ",msg{":"""levelmsg[0] received reconcile message"":",",:[1]R generation: [0, 0]""v",:"0v":,0",name""name:"":30"crucible""time",crucible""level,"":level30:"",":time30"2023-09-22T23:14:27.61681141Z:"","hostname"2023-09-22T23:14:27.616830251Z:"","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal4301",,",time""time:"":""}
13524 pid"2023-09-22T23:14:27.61683642Z2023-09-22T23:14:27.616840076Z""{:,,"""4301msg"}:
13525 "hostnamehostname""::"{[1] received reconcile message""",msg""v:"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",:"0",pid"[1]R dirty: [false, false],""pid""::43014301name,"":v""}}
13526 
13527 {:crucible0",,""levelname""::30"{"crucible"msg""msg:"":","[1]R dirty: [false, false]"[0] All repairs completed, exit",,""vv""::00,,"",level""time:"30:namename""::"""cruciblecrucible"",,""2023-09-22T23:14:27.616891461Zlevellevel""::3030,""time,"":"hostname":"2023-09-22T23:14:27.616906102Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",:"pid":4301}
13528 ",,""timetime{ip-10-150-1-74.us-west-2.compute.internal""",msg""pid:""::"":[1] All repairs completed, exit4301""}2023-09-22T23:14:27.616914356Z2023-09-22T23:14:27.616913886Z""
13529 ,,,""{"hostnamehostname""::"""v"msg:":0",ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"[2]R flush_numbers: [0, 0]name"""",,""pid"pid:"4301:,:""v"crucible:"0,,""4301}}
13530 
13531 {namelevel""::"30"{msg"crucible:""msg"":[2]R flush_numbers: [0, 0]"","[0] Starts cmd_loop,"",v""v:"0:,0",level""timename,"":name"":"::30"crucible"",crucible""2023-09-22T23:14:27.616966502Z",level""level:"30:,30"hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","2023-09-22T23:14:27.616985812Zpid"":,4301"hostname}"
13532 :,,""{timetime""::""""msg":"2023-09-22T23:14:27.616991149Z2023-09-22T23:14:27.61699255Z"",[1] Starts cmd_loopip-10-150-1-74.us-west-2.compute.internal""",,,""pidv""::43010hostname,}"
13533 name"""hostname:""::{""ip-10-150-1-74.us-west-2.compute.internal""msg,ip-10-150-1-74.us-west-2.compute.internal"","crucible:"","pid"level[2]R generation: [0, 0]"":pid""::43014301,30"}}
13534 
13535 {v":0"{msg":",,""msg"":nametime""::"[1] received reconcile message""crucible""[2]R generation: [0, 0],"",v"":v0",":name02023-09-22T23:14:27.617037647Z,"",level"":",:""30hostname"cruciblename""::,"""levelcrucible"":,30"level"ip-10-150-1-74.us-west-2.compute.internal",:",time""pid:"":3043012023-09-22T23:14:27.617067608Z"},
13536 ","hostname{":time"""msg":,""ip-10-150-1-74.us-west-2.compute.internal:"","time2023-09-22T23:14:27.61707419Z"":",[2] received reconcile messagepid"",":v4301":}"0
13537 ,"2023-09-22T23:14:27.61708062Zhostname""{name",:""":msg"hostnameip-10-150-1-74.us-west-2.compute.internal"":,"":""pidip-10-150-1-74.us-west-2.compute.internal"":,4301"[2]R dirty: [false, false]crucible""}pid
13538 ",,""levelv":"30:{:04301",}msg"
13539 :""name":{,"""[1] All repairs completed, exitmsg"timecrucible"":,"",""2023-09-22T23:14:27.617127648Zlevel"",:"30v:"":hostname"[2]R dirty: [false, false]0",,""namev""::0","crucible"name,"":level"",:""crucible:"30,"ip-10-150-1-74.us-west-2.compute.internaltime""level":,"":pid"2023-09-22T23:14:27.617145429Z":,4301"30}hostname
13540 ",,"{:time""time:"":"""msg2023-09-22T23:14:27.61716812Z2023-09-22T23:14:27.61715974Z""ip-10-150-1-74.us-west-2.compute.internal",,"":"",hostnamehostname""::""[2] All repairs completed, exit""pid",:"4301vip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""}"
13541 :pidpid""::43014301{}}
13542 
13543 {0",{""msg"namemsg""::""msg"Max found gen is 1crucible"""::"",,""Max found gen is 1[1] Starts cmd_loop"",,""vv""::00,,""name"name"::""vlevel""::030,cruciblecrucible"",,""levellevel""::3030"name":"crucible","level":30,"time":"2023-09-22T23:14:27.617231827Z","hostname":",,""timetime""::"",ip-10-150-1-74.us-west-2.compute.internal""time,"2023-09-22T23:14:27.617237509Z:""pid"2023-09-22T23:14:27.617237349Z""2023-09-22T23:14:27.617241223Z:"4301,,""},
13544 hostname""hostname:""hostname{,""":msghostname"":"":ip-10-150-1-74.us-west-2.compute.internal:"","[2] Starts cmd_loop"ip-10-150-1-74.us-west-2.compute.internal,ip-10-150-1-74.us-west-2.compute.internal""v,"":pid0""",pid"ip-10-150-1-74.us-west-2.compute.internal":,4301"pid""::43014301name}"
13545 }
13546 {,}"
13547 :"{msg{"""msg""cruciblemsg"",:""::""Generation requested: 1 >= found:1level""Generation requested: 1 >= found:1[2] received reconcile message"":,30"pid",,""vv""::00,,"":namename""::",v""time:"0:,""crucible"4301crucible"",2023-09-22T23:14:27.617319144Z,"name",:"",hostname"crucible:""""level"level:",":3030"ip-10-150-1-74.us-west-2.compute.internallevel"",:"30pid"looper"::4301"2"}
13548 ,,""time,}time""::""
13549 2023-09-22T23:14:27.617350055Z2023-09-22T23:14:27.617350508Z""",,time"""hostname"hostname"::"":{"ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,"2023-09-22T23:14:27.617353803Z""pid,"pidmsg""::43014301"}}
13550 
13551 {":""{msg"up_listen starts"hostname"v,"::"0":msg""ip-10-150-1-74.us-west-2.compute.internal,"","name""pid:"v:"4301crucible"}"
13552 ,[2] All repairs completed, exit:"",{"Next flush: 1""":msglevel0""::"30,,v""v:"0:,"0name,"":name""Next flush: 1""name"crucible:,,""timev""::"0",2023-09-22T23:14:27.617427059Z"crucible":"","crucible"level","",":,hostnamename""::""level""30level"ip-10-150-1-74.us-west-2.compute.internalcrucible":",,""pidlevel""::43013030},:"30time"
13553 :"{2023-09-22T23:14:27.617460122Z",""time",,""msg:"":"hostnametime""::""2023-09-22T23:14:27.617467667Z"64be6ec7-0ac5-4896-883a-00ec34140b84 Active Active Active",",ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:14:27.617470694Z"","hostnamev""::"0,",,"ip-10-150-1-74.us-west-2.compute.internalnamepid"""hostname:"4301time":}"
13554 ip-10-150-1-74.us-west-2.compute.internal":{,""",:""pid""crucible:"4301msg},
13555 "2023-09-22T23:14:27.617469085Zlevel{""":,"30msg":""":"pid":All extents match"4301[2] Starts cmd_loop,}"
13556 ,""{vhostname""msg:v,"":time0""0:",,"2023-09-22T23:14:27.617524574Z""name":",crucible""hostname,"::""level"":ip-10-150-1-74.us-west-2.compute.internal30":""name,"ip-10-150-1-74.us-west-2.compute.internal"pid",All extents match"":"",crucible""v,""level:"0:,pid"30":,4301"name"::"time}"
13557 :",crucible43012023-09-22T23:14:27.617557626Z{""",time""level:""",msg""2023-09-22T23:14:27.617572724Z:"30,,hostname:"":"""Set check for repair"ip-10-150-1-74.us-west-2.compute.internal"task":",hostname""up_listen",,}""vpid""::04301,:time"":
13558 ip-10-150-1-74.us-west-2.compute.internal""}"
13559 name":"{2023-09-22T23:14:27.617592613Z,""{"cruciblemsg"",:""level""pid,"":msg"No downstairs repair required:":30"4301hostname"}:
13560 ","Wait for all three downstairs to come online"v"ip-10-150-1-74.us-west-2.compute.internal,:"0time,"":"name"",,"2023-09-22T23:14:27.617637861Z:""v,crucible"""hostname"",:":level"pid""ip-10-150-1-74.us-west-2.compute.internal:"30,0":pid"4301:,}"4301name"
13561 :,}"
13562 time":""{{crucible"2023-09-22T23:14:27.617665866Z"",",msg"":hostname"""msg:[2] 127.0.0.1:60582 task reports connection:true""level""ip-10-150-1-74.us-west-2.compute.internal,"",v""::pid:"30:04301"},
13563 "No downstairs repair requiredname{"":"","msgcrucible"":,""v"levelNo initial repair work was required"":,30":v0,",""name":":time"crucible:"0,,""name"time:"2023-09-22T23:14:27.61769915Z":"""crucible"2023-09-22T23:14:27.61771882Z,"",level,"":,""level"30hostname":hostname30":":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",",pid"":time4301"pid"}:
13564 ":43012023-09-22T23:14:27.61774417Z{","hostname}"":
13565 ,msg""":"time":ip-10-150-1-74.us-west-2.compute.internal""64be6ec7-0ac5-4896-883a-00ec34140b84 Active Active Active,"",pid""v:":430102023-09-22T23:14:27.617747001Z"{},,"
13566 name"":{hostname""msg":"""msg"crucible:":",""ip-10-150-1-74.us-west-2.compute.internalFlush timeout: 0.5"Set Downstairs and Upstairs activelevel""":,"30v,,""pid":"v:"0:4301,,""name0}time""::"",crucible"2023-09-22T23:14:27.617801903Z
13567 ""name,,"""level{:"hostname:"30:"""crucible"msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid","No initial repair work was required",:"4301,"time}"
13568 :v"level"{"::"2023-09-22T23:14:27.617829466Zmsg""30,:""0hostnameSet check for repair"":",",vip-10-150-1-74.us-west-2.compute.internal"",:"0pid,"":name4301":}"
13569 crucible"",{"name"level""msg:"30::""crucible",","time":level5b98377f-d3ca-45bf-893d-9aae2fd5a48e is now active with session: d3623aa7-ee8c-40f8-a982-d0eadbcd4a76""":,,""30timev""::"02023-09-22T23:14:27.617852654Z",2023-09-22T23:14:27.617871046Z""name,,"":hostname"":crucible""","ip-10-150-1-74.us-west-2.compute.internal"level,""pid:"30:hostname"4301:"}
13570 ip-10-150-1-74.us-west-2.compute.internal",{","time"",msg""time:"":"pid"[0] received reconcile message:2023-09-22T23:14:27.617882211Z4301":,"}""v",2023-09-22T23:14:27.617894311Z:
13571 0",",""name"hostname:"{hostname":"":crucible"ip-10-150-1-74.us-west-2.compute.internal"","ip-10-150-1-74.us-west-2.compute.internal,"""pidlevel,msg"""::430130:""}pid"
13572 9fe7fa85-122e-4e82-8591-96595eb36a20 active request set":{4301,",msg}"":""time":v"5b98377f-d3ca-45bf-893d-9aae2fd5a48e Set Active after no repair""
13573 :2023-09-22T23:14:27.617942182Z,""0v,"":{hostname0",:""name","ip-10-150-1-74.us-west-2.compute.internal:"",msg"crucible""pid,"":level4301""}:
13574 30name":{:"""crucible"msg":,"Set Downstairs and Upstairs active""[0] All repairs completed, exitlevel",:",,""time"v:"":30"2023-09-22T23:14:27.617984009Z0",",name""v"hostname:"":"crucible":,ip-10-150-1-74.us-west-2.compute.internal""0,level""pid:"30:,4301"}name"
13575 :"crucible",,",{"timetime"":msg""":2023-09-22T23:14:27.618012776Z"":,""Notify all downstairs, region set compare is done."level,"""hostnamev""::"02023-09-22T23:14:27.618000987Z,:"ip-10-150-1-74.us-west-2.compute.internalname""",:""pid"crucible":,,""4301level"hostname30:}30
13576 ":"{ip-10-150-1-74.us-west-2.compute.internal"",msg"":"pid",[0] Starts cmd_loop""time",:":v""4301:02023-09-22T23:14:27.618051813Z",",name"":hostname,}""":crucible"
13577 ip-10-150-1-74.us-west-2.compute.internal"",,""pid"level:"4301{}:
13578 30"time"{:msg"":msg""":"2023-09-22T23:14:27.61805108Z[0] 9fe7fa85-122e-4e82-8591-96595eb36a20 looper connected"Set check for repair,"""time,,",:""v""hostnamev"":2023-09-22T23:14:27.618086866Z":"0:0,,",""name"hostname:name"":""":crucible""crucible,"ip-10-150-1-74.us-west-2.compute.internal"",level,"":"ip-10-150-1-74.us-west-2.compute.internallevel""30pid",:"30:pid"4301:}4301
13579 ,"}time{":"
13580 "2023-09-22T23:14:27.618127393Z"msg":,""hostname":"[1] received reconcile message","v"ip-10-150-1-74.us-west-2.compute.internal,:"0,,""namepid""::4301"{crucible}"
13581 ,""{"time""levelmsg""::30"msg":":"[1] 127.0.0.1:46381 task reports connection:true","v":2023-09-22T23:14:27.618130906Z"066688a47-67b1-4e47-8e76-e0e145532b3a is now active with session: ff839372-f996-4bbe-90dd-222e8d070509",,,",""nametime""::"""crucible"2023-09-22T23:14:27.618165479Z,""hostname,level"""hostname:"30:":v"":ip-10-150-1-74.us-west-2.compute.internal"0,"ip-10-150-1-74.us-west-2.compute.internalpid",""name",":,4301"pid":}time
13582 "::"4301"{2023-09-22T23:14:27.618185912Z",,""msg"hostname:"":""crucible"looper"ip-10-150-1-74.us-west-2.compute.internal[1] All repairs completed, exit"",,""pidv":"4301:,0},
13583 ":name{"""0":msg"":crucible""","level"5b98377f-d3ca-45bf-893d-9aae2fd5a48e Active Active Activelevel"",:}30"
13584 v"::030,"name"{:,""timecrucible"":","level":"302023-09-22T23:14:27.618242508Z"msg",":"hostname":"[0] Proc runs for 127.0.0.1:58086 in state New"ip-10-150-1-74.us-west-2.compute.internal",,""v"pid,""::4301time0"}:
13585 ",",2023-09-22T23:14:27.618256242Z{"name,":"""hostname"msg:"":"crucible"time",":ip-10-150-1-74.us-west-2.compute.internal""",[1] Starts cmd_loop""2023-09-22T23:14:27.618251334Zlevel"",pid""v:,:"304301":}0
13586 ,"hostname"{name":"":"msg":crucible"",ip-10-150-1-74.us-west-2.compute.internal"Set check for repair",""level,""v:"30:pid"0:,"4301name":",}"
13587 time"crucible,"",time"":level""::"{302023-09-22T23:14:27.618309896Z"2023-09-22T23:14:27.618295408Z,"""hostname"msg,":hostname"",:""ip-10-150-1-74.us-west-2.compute.internaltime"",:""pid"ip-10-150-1-74.us-west-2.compute.internal:2023-09-22T23:14:27.618328232Z4301"}""
13588 ,","hostname{"pid":""msg:"ip-10-150-1-74.us-west-2.compute.internal:"",":[2] received reconcile messagepid"",:"4301v"}:
13589 04301,"{"}name""msg:"":"crucible66688a47-67b1-4e47-8e76-e0e145532b3a Set Active after no repair""
13590 ,[2] 127.0.0.1:49774 task reports connection:true"",",level""v:{"30:v"0:,"0msg"":name":""crucible,"",,"time"level:"":[1] 9fe7fa85-122e-4e82-8591-96595eb36a20 looper connected302023-09-22T23:14:27.618383175Z"""name",:","crucible"v",:"0level,","""timehostname""::"":name"2023-09-22T23:14:27.618398921Z"ip-10-150-1-74.us-west-2.compute.internal:","",crucible"hostname""pid:30,"":"4301level"}ip-10-150-1-74.us-west-2.compute.internal
13591 ":,"30pid{":4301}"
13592 msg":"{[2] All repairs completed, exit"","msg"v:"":0,"name":",crucible5b98377f-d3ca-45bf-893d-9aae2fd5a48e Active Active Active"",",""time"vlevel""::030,:""name"2023-09-22T23:14:27.61842509Z:","crucible,""time,"":,level""":2023-09-22T23:14:27.618450533Z30"time,""":"hostname":"hostname"2023-09-22T23:14:27.618434288Z":ip-10-150-1-74.us-west-2.compute.internal"",,",ip-10-150-1-74.us-west-2.compute.internal""time",:""pid""pid"hostname":2023-09-22T23:14:27.61846684Z:"4301,":}"
13593 hostname":"ip-10-150-1-74.us-west-2.compute.internal"{ip-10-150-1-74.us-west-2.compute.internal"4301,,""msgpid""}":pid
13594 ""::4301[2] Starts cmd_loop"}4301,
13595 ",v{"{"":looper"0msg,""::""name"msg"1":Set check for repair"":},
13596 ""v":crucible0",,""{levelname""::30"""crucible"msg",":"level"Notify all downstairs, region set compare is done.":[1] Proc runs for 127.0.0.1:50371 in state New"30,,",""vtimev"":":"02023-09-22T23:14:27.61853807Z":,,"",0","name"nametimehostname""::"":""ip-10-150-1-74.us-west-2.compute.internal2023-09-22T23:14:27.618547314Zcrucible":,"",,"""hostnamepid""::"4301crucible""ip-10-150-1-74.us-west-2.compute.internal}level""
13597 ,:,""pid"level30"::430130}
13598 ,"time":",2023-09-22T23:14:27.618589483Z""time,"":"hostname":"2023-09-22T23:14:27.618591814Z"ip-10-150-1-74.us-west-2.compute.internal",","hostnamepid":"4301:"}
13599 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
13600 "msg":"{[2] 9fe7fa85-122e-4e82-8591-96595eb36a20 looper connected","v":0","msg":name"":"crucible"Set check for repair,""level":,"30v":0,"name":"crucible","level"Sep 22 23:14:27.615 INFO UpstairsConnection { upstairs_id: 9fe7fa85-122e-4e82-8591-96595eb36a20, session_id: 57e450d1-3a66-4c19-9890-8b892a800667, gen: 1 } is now active (read-write)
13601 :30,"time":"2023-09-22T23:14:27.618652533Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2","}time"
13602 :"2023-09-22T23:14:27.618663302Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg":"pid":4301[2] Proc runs for 127.0.0.1:54983 in state New}"
13603 ,"v":0,"name":"{crucible","level":30"msg":"[1] 127.0.0.1:50563 task reports connection:true","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.618709274Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"}time
13604 ":"2023-09-22T23:14:27.61871864Z"{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg":"pid":4301}
13605 [0] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) New New New ds_transition to WaitActive","v":0,"{name":"crucible",""level"msg:":30"66688a47-67b1-4e47-8e76-e0e145532b3a Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.618763792Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"}time
13606 ":"2023-09-22T23:14:27.618772441Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msg"pid"::"4301}[0] Transition from New to WaitActive
13607 ","v":0,"{name":"crucible","level"":msg"30:"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.618825772Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time":","pid":43012023-09-22T23:14:27.618832895Z",}"
13608 hostname":"ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4301}
13609 "msg":"{[0] client is_active_req TRUE, promote! session 57e450d1-3a66-4c19-9890-8b892a800667""msg,"":v"":0,"name":"[2] 127.0.0.1:39722 task reports connection:true"crucible",",v"":level0",":name":30"crucible","level":30,,""time":time"":"2023-09-22T23:14:27.618889904Z"2023-09-22T23:14:27.618887113Z",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,4301"pid":}4301
13610 }
13611 {"msg":{"66688a47-67b1-4e47-8e76-e0e145532b3a Active Active Active"","msg"v":":0,"name":"crucible","level":30[1] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) WaitActive New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.618944231Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,"":time4301":"}
13612 2023-09-22T23:14:27.618950903Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal",""pid"msg"::4301"Set check for repair"}
13613 ,"v":0,"name":"{crucible","level":30"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.618996492Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""timepid""::4301"}
13614 2023-09-22T23:14:27.619005322Z","hostname":"{ip-10-150-1-74.us-west-2.compute.internal","pid"":msg"4301:"}
13615 [0] received reconcile message","v":{0,"name":""crucible"msg,"":"level":30[1] client is_active_req TRUE, promote! session 57e450d1-3a66-4c19-9890-8b892a800667","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.61905219Z",","time"hostname"::""2023-09-22T23:14:27.619060622Z"ip-10-150-1-74.us-west-2.compute.internal",","hostname"pid"::"4301}
13616 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}{
13617 "msg":"{[0] All repairs completed, exit""msg",:""v":0,"name":"crucible","level":30[2] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time",:""time":"2023-09-22T23:14:27.619105323Z"2023-09-22T23:14:27.619112035Z",","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internal"pid,"":pid"4301:4301}}
13618 
13619 {{""msg"msg"::""[0] Starts cmd_loop","[2] Transition from New to WaitActive"v",":v"0:,"0name",:""name":crucible"","crucible"level",:"30level":30,"time":"2023-09-22T23:14:27.619168608Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time,"":pid"":43012023-09-22T23:14:27.619167845Z"}
13620 ,"hostname":"{"ip-10-150-1-74.us-west-2.compute.internal"msg":,""pid":4301}
13621 [2] client is_active_req TRUE, promote! session 57e450d1-3a66-4c19-9890-8b892a800667","v":0,"name"{:"crucible","level":"30msg":"[1] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.619214451Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13622 {,"time"":msg":""2023-09-22T23:14:27.619222659Z","hostname":"[0] downstairs client at 127.0.0.1:58086 has UUID 97377133-8bb1-47f5-a2f0-3b28d74593db","v"ip-10-150-1-74.us-west-2.compute.internal":,0",pid"":name"4301:"crucible"},
13623 "level":30{"msg":"[1] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.619262179Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13624 {,"time":""msg":"2023-09-22T23:14:27.619274625Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13625 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 97377133-8bb1-47f5-a2f0-3b28d74593db, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,{"Sep 22 23:14:27.619 DEBG IO Write 1000 has deps []
13626 "msg":"[1] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.619335966Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136272023-09-22T23:14:27.619ZINFOcrucible: [2] received reconcile message
136282023-09-22T23:14:27.619ZINFOcrucible: [2] All repairs completed, exit
13629 {"msg":"[2] Starts cmd_loop","v":Sep 22 23:14:27.619 DEBG up_ds_listen was notified
13630 0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.619464587Z","hostname":"Sep 22 23:14:27.619 DEBG up_ds_listen process 1000
13631 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13632 Sep 22 23:14:27.619 DEBG [A] ack job 1000:1, : downstairs
13633 {"msg":"[0] received reconcile message","v":0,"name":"crucible","level":30Sep 22 23:14:27.619 DEBG up_ds_listen checked 1 jobs, back to waiting
13634 ,"time":"2023-09-22T23:14:27.619558376Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13635 {"msg":"[0] All repairs completed, exit","v":0,"name":"crucible","level":30The guest has finished waiting for activation
13636 ,"time":"2023-09-22T23:14:27.619604231Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13637 {"msg":"[0] Starts cmd_loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.619 INFO Current flush_numbers [0..12]: [0, 0]
13638 ,"time":"2023-09-22T23:14:27.619642727Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136392023-09-22T23:14:27.619ZINFOcrucible: [1] received reconcile message
136402023-09-22T23:14:27.619ZINFOcrucible: [1] All repairs completed, exit
136412023-09-22T23:14:27.619ZINFOcrucible: [1] Starts cmd_loop
136422023-09-22T23:14:27.619ZINFOcrucible: [2] received reconcile message
136432023-09-22T23:14:27.619ZINFOcrucible: [2] All repairs completed, exit
13644 {"msg":"[2] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.61984594Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.619 INFO Downstairs has completed Negotiation, task: proc
13645 }
13646 name":"crucible","level":30,"time":"2023-09-22T23:14:27.619920383Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136472023-09-22T23:14:27.619ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 WaitActive WaitActive WaitActive
136482023-09-22T23:14:27.619ZINFOcrucible: [1] downstairs client at 127.0.0.1:50371 has UUID 53cd4d63-78de-43d1-b2db-bd20d98528e7
136492023-09-22T23:14:27.620ZINFOcrucible: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 53cd4d63-78de-43d1-b2db-bd20d98528e7, encrypted: true, database_read_version: 1, database_write_version: 1 }
136502023-09-22T23:14:27.620ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 WaitActive WaitActive WaitActive
13651 {"msg":"Sep 22 23:14:27.620 INFO Current flush_numbers [0..12]: [0, 0]
13652 [2] downstairs client at 127.0.0.1:54983 has UUID bb88743f-4e54-484f-8e97-8fa2d4bde987","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.62011642Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136532023-09-22T23:14:27.620ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: bb88743f-4e54-484f-8e97-8fa2d4bde987, encrypted: true, database_read_version: 1, database_write_version: 1 }
136542023-09-22T23:14:27.620ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 WaitActive WaitActive WaitActive
13655 Sep 22 23:14:27.620 INFO Downstairs has completed Negotiation, task: proc
13656 Sep 22 23:14:27.620 INFO Connection request from 1f7159d2-76c3-4968-a4c3-9cc9b7758094 with version 4, task: proc
13657 Sep 22 23:14:27.620 INFO Current flush_numbers [0..12]: [0, 0]
13658 Sep 22 23:14:27.620 INFO upstairs UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } connected, version 4, task: proc
13659 Sep 22 23:14:27.620 INFO current number of open files limit 65536 is already the maximum
13660 Sep 22 23:14:27.620 INFO Connection request from 1f7159d2-76c3-4968-a4c3-9cc9b7758094 with version 4, task: proc
13661 Sep 22 23:14:27.620 INFO upstairs UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } connected, version 4, task: proc
13662 Sep 22 23:14:27.620 INFO Created new region file "/tmp/downstairs-zdyp8uYh/region.json"
13663 Sep 22 23:14:27.620 INFO Downstairs has completed Negotiation, task: proc
13664 Sep 22 23:14:27.620 INFO Connection request from 1f7159d2-76c3-4968-a4c3-9cc9b7758094 with version 4, task: proc
13665 Sep 22 23:14:27.620 INFO upstairs UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } connected, version 4, task: proc
13666 {{""msgmsg""::""[0] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) New New New ds_transition to WaitActive"[0] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v",:"v":00,,""namename""::""cruciblecrucible"",,""levellevel""::3030,,""timetime""::""2023-09-22T23:14:27.620963142Z2023-09-22T23:14:27.620963186Z"",,""hostnamehostname""::""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::43014301}}
13667 
136682023-09-22T23:14:27.621ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
136692023-09-22T23:14:27.621ZWARNcrucible: [0] new RM replaced this: None
136702023-09-22T23:14:27.621ZINFOcrucible: [0] Starts reconcile loop
136712023-09-22T23:14:27.621ZINFOcrucible: [1] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
13672 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.621146448Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.621 DEBG Read :1000 deps:[] res:true
13673 ,"pid":4301}
136742023-09-22T23:14:27.621ZWARNcrucible: [1] new RM replaced this: None
136752023-09-22T23:14:27.621ZINFOcrucible: [1] Starts reconcile loop
136762023-09-22T23:14:27.621ZINFOcrucible: [2] 9fe7fa85-122e-4e82-8591-96595eb36a20 (57e450d1-3a66-4c19-9890-8b892a800667) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
136772023-09-22T23:14:27.621ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
136782023-09-22T23:14:27.621ZWARNcrucible: [2] new RM replaced this: None
136792023-09-22T23:14:27.621ZINFOcrucible: [2] Starts reconcile loop
136802023-09-22T23:14:27.621ZINFOcrucible: [0] 127.0.0.1:58086 task reports connection:true
136812023-09-22T23:14:27.621ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 WaitQuorum WaitQuorum WaitQuorum
136822023-09-22T23:14:27.621ZINFOcrucible: [0]R flush_numbers: [0, 0]
136832023-09-22T23:14:27.621ZINFOcrucible: [0]R generation: [0, 0]
136842023-09-22T23:14:27.621ZINFOcrucible: [0]R dirty: [false, false]
136852023-09-22T23:14:27.621ZINFOcrucible: [1]R flush_numbers: [0, 0]
13686 {"msg":"[1]R generation: [0, 0]","vSep 22 23:14:27.621 DEBG Read :1000 deps:[] res:true
13687 ":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.621561778Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13688 {"msg":"[1]R dirty: [false, false]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.621591798Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":{4301}
13689 "msg":"{"[0] Transition from New to WaitActive"msg":","v":0[2]R flush_numbers: [0, 0]",","name"v:"":0crucible,"",name"":"level":crucible"30,"level":30,,""timetime""::""2023-09-22T23:14:27.621622349Z"2023-09-22T23:14:27.621621209Z",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",","pid"pid:":43014301}}
13690 
13691 {"{msg":""msg"[2]R generation: [0, 0]:"","v":0,"name":"crucible"[0] client is_active_req TRUE, promote! session 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054,"",level"":v"30:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.621656998Z",","time":hostname"":"2023-09-22T23:14:27.621660769Z","ip-10-150-1-74.us-west-2.compute.internal",hostname"":pid"":4301}
13692 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
13693 "msg":"[2]R dirty: [false, false]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.621691078Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136942023-09-22T23:14:27.621ZINFOcrucible: Max found gen is 1
136952023-09-22T23:14:27.621ZINFOcrucible: Generation requested: 1 >= found:1
136962023-09-22T23:14:27.621ZINFOcrucible: Next flush: 1
13697 {"msg":"All extents match","v":0,"name":"crucible","level":The guest has finished waiting for activation
13698 30,"time":"2023-09-22T23:14:27.621843699Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
136992023-09-22T23:14:27.621ZINFOcrucible: No downstairs repair required
137002023-09-22T23:14:27.621ZINFOcrucible: No initial repair work was required
137012023-09-22T23:14:27.621ZINFOcrucible: Set Downstairs and Upstairs active
137022023-09-22T23:14:27.622ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 is now active with session: 57e450d1-3a66-4c19-9890-8b892a800667
137032023-09-22T23:14:27.622ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 Set Active after no repair
137042023-09-22T23:14:27.622ZINFOcrucible: Notify all downstairs, region set compare is done.
13705 Sep 22 23:14:27.622 DEBG Read :1000 deps:[] res:true
137062023-09-22T23:14:27.622ZINFOcrucible: Set check for repair
137072023-09-22T23:14:27.622ZINFOcrucible: [1] 127.0.0.1:50371 task reports connection:true
137082023-09-22T23:14:27.622ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 Active Active Active
137092023-09-22T23:14:27.622ZINFOcrucible: Set check for repair
137102023-09-22T23:14:27.622ZINFOcrucible: [2] 127.0.0.1:54983 task reports connection:true
137112023-09-22T23:14:27.622ZINFOcrucible: 9fe7fa85-122e-4e82-8591-96595eb36a20 Active Active Active
13712 {{"msg":""Set check for repairmsg"":","v":0,"name":"crucible","level":[1] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) WaitActive New New ds_transition to WaitActive30","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.622299554Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"}time
13713 ":"2023-09-22T23:14:27.622305761Z"{,"hostname"":msg"":"[0] received reconcile messageip-10-150-1-74.us-west-2.compute.internal"",,""pidv""::43010,"name}":
13714 "crucible","level":{30"msg":"[1] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.622337892Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301},
13715 "time":"{2023-09-22T23:14:27.622347015Z"","msg":hostname"":"[0] All repairs completed, exit"ip-10-150-1-74.us-west-2.compute.internal,"",v"":pid"0:,4301"name"}:"
13716 crucible","level"{:30"msg":"[1] client is_active_req TRUE, promote! session 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054","v":0,"name":"crucible",,""leveltime""::"302023-09-22T23:14:27.622372098Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13717 ,"time":"{2023-09-22T23:14:27.622383084Z""msg,"":"hostname":"[0] Starts cmd_loop","v":ip-10-150-1-74.us-west-2.compute.internal"0,,""pid"name:":4301"}crucible
13718 ","level":30,"time":"2023-09-22T23:14:27.62240594Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137192023-09-22T23:14:27.622ZINFOcrucible: [1] received reconcile message
137202023-09-22T23:14:27.622ZINFOcrucible: [1] All repairs completed, exit
137212023-09-22T23:14:27.622ZINFOcrucible: [1] Starts cmd_loop
137222023-09-22T23:14:27.622ZINFOcrucible: [2] received reconcile message
137232023-09-22T23:14:27.622ZINFOcrucible: [2] All repairs completed, exit
137242023-09-22T23:14:27.622ZINFOcrucible: [2] Starts cmd_loop
137252023-09-22T23:14:27.622ZINFOcrucible: [2] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) WaitActive WaitActive New ds_transition to WaitActive
137262023-09-22T23:14:27.622ZINFOcrucible: [2] Transition from New to WaitActive
137272023-09-22T23:14:27.622ZINFOcrucible: [2] client is_active_req TRUE, promote! session 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054
13728 Sep 22 23:14:27.622 INFO UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } is now active (read-write)
13729 Sep 22 23:14:27.623 INFO UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } is now active (read-write)
13730 Sep 22 23:14:27.623 INFO UpstairsConnection { upstairs_id: 1f7159d2-76c3-4968-a4c3-9cc9b7758094, session_id: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054, gen: 1 } is now active (read-write)
13731 Sep 22 23:14:27.623 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13732 Sep 22 23:14:27.623 INFO current number of open files limit 65536 is already the maximum
13733 Sep 22 23:14:27.623 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13734 Sep 22 23:14:27.623 INFO Opened existing region file "/tmp/downstairs-zdyp8uYh/region.json"
13735 Sep 22 23:14:27.623 INFO Database read version 1
13736 Sep 22 23:14:27.623 INFO Database write version 1
13737 {"msg":"[0] downstairs client at 127.0.0.1:33768 has UUID daca29a6-1615-4360-b286-c9a82fbfcd3d","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.623648747Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.623 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13738 }
137392023-09-22T23:14:27.623ZINFOcrucible: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: daca29a6-1615-4360-b286-c9a82fbfcd3d, encrypted: true, database_read_version: 1, database_write_version: 1 }
137402023-09-22T23:14:27.623ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 WaitActive WaitActive WaitActive
13741 {Sep 22 23:14:27.623 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13742 "msg":"[1] downstairs client at 127.0.0.1:54745 has UUID 80932238-df83-4f6e-96fc-9d0aac0f8ced","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.623809002Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137432023-09-22T23:14:27.623ZINFOcrucible: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 80932238-df83-4f6e-96fc-9d0aac0f8ced, encrypted: true, database_read_version: 1, database_write_version: 1 }
137442023-09-22T23:14:27.623ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 WaitActive WaitActive WaitActive
137452023-09-22T23:14:27.623ZINFOcrucible: [2] downstairs client at 127.0.0.1:56258 has UUID 67caa349-8834-4aba-8026-7fdaaae2f119
137462023-09-22T23:14:27.624ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 67caa349-8834-4aba-8026-7fdaaae2f119, encrypted: true, database_read_version: 1, database_write_version: 1 }
13747 {"msg":"1f7159d2-76c3-4968-a4c3-9cc9b7758094 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.624043887Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13748 "pid":4301}
13749 Sep 22 23:14:27.624 INFO Current flush_numbers [0..12]: [0, 0]
13750 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13751 Sep 22 23:14:27.624 INFO Downstairs has completed Negotiation, task: proc
13752 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13753 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13754 Sep 22 23:14:27.624 INFO Current flush_numbers [0..12]: [0, 0]
13755 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13756 Sep 22 23:14:27.624 INFO Downstairs has completed Negotiation, task: proc
13757 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13758 Sep 22 23:14:27.624 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13759 Sep 22 23:14:27.624 INFO UUID: ae376576-6754-4185-9b1d-d23a94866419
13760 Sep 22 23:14:27.624 INFO Blocks per extent:5 Total Extents: 2
13761 Sep 22 23:14:27.624 INFO Current flush_numbers [0..12]: [0, 0]
13762 Sep 22 23:14:27.624 INFO Crucible Version: Crucible Version: 0.0.1
13763 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13764 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13765 rustc: 1.70.0 stable x86_64-unknown-illumos
13766 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13767 Sep 22 23:14:27.625 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13768 Sep 22 23:14:27.625 INFO Using address: 127.0.0.1:62845, task: main
13769 Sep 22 23:14:27.625 INFO Downstairs has completed Negotiation, task: proc
13770 Sep 22 23:14:27.625 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
137712023-09-22T23:14:27.625ZINFOcrucible: [0] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
137722023-09-22T23:14:27.625ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
137732023-09-22T23:14:27.625ZWARNcrucible: [0] new RM replaced this: None
13774 {"msg":"[0] Starts reconcile loop","v"Sep 22 23:14:27.625 INFO Repair listens on 127.0.0.1:0, task: repair
13775 :0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.625488313Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137762023-09-22T23:14:27.625ZINFOcrucible: [1] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
137772023-09-22T23:14:27.625ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
13778 {"msg":"[1] new RM replaced this: None","v":0,"name":"Sep 22 23:14:27.625 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51597, task: repair
13779 crucible","level":40,"time":"2023-09-22T23:14:27.625632586Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13780 {"msg":"[1] Starts reconcile loop","v":0,"name":"crucible","levelSep 22 23:14:27.625 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51597, task: repair
13781 ":30,"time":"2023-09-22T23:14:27.625679187Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13782 {"msg":"[2] 1f7159d2-76c3-4968-a4c3-9cc9b7758094 (1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorumSep 22 23:14:27.625 INFO listening, local_addr: 127.0.0.1:51597, task: repair
13783 ","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.625729113Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137842023-09-22T23:14:27.625ZINFOcrucible: [2] Transition from WaitActive to WaitQuorum
137852023-09-22T23:14:27.625ZWARNcrucible: [2] new RM replaced this: None
137862023-09-22T23:14:27.625ZINFOcrucible: [2] Starts reconcile loop
137872023-09-22T23:14:27.625ZINFOcrucible: [0] 127.0.0.1:33768 task reports connection:true
13788 {"msg":"1f7159d2-76c3-4968-a4c3-9cc9b7758094 WaitQuorum WaitQuorum WaitQuorum","Sep 22 23:14:27.625 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51597, task: repair
13789 v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.625937464Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13790 {"msg":"[0]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.625 INFO Using repair address: 127.0.0.1:51597, task: main
13791 2023-09-22T23:14:27.62597782Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
13792 {Sep 22 23:14:27.626 INFO No SSL acceptor configured, task: main
13793 "msg":"[0]R generation: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.626027222Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137942023-09-22T23:14:27.626ZINFOcrucible: [0]R dirty: [false, false]
137952023-09-22T23:14:27.626ZINFOcrucible: [1]R flush_numbers: [0, 0]
13796 {"msg":"[1]R generation: [0, 0]"The guest has finished waiting for activation
13797 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.626151501Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
137982023-09-22T23:14:27.626ZINFOcrucible: [1]R dirty: [false, false]
137992023-09-22T23:14:27.626ZINFOcrucible: [2]R flush_numbers: [0, 0]
138002023-09-22T23:14:27.626ZINFOcrucible: [2]R generation: [0, 0]
138012023-09-22T23:14:27.626ZINFOcrucible: [2]R dirty: [false, false]
138022023-09-22T23:14:27.626ZINFOcrucible: Max found gen is 1
138032023-09-22T23:14:27.626ZINFOcrucible: Generation requested: 1 >= found:1
138042023-09-22T23:14:27.626ZINFOcrucible: Next flush: 1
138052023-09-22T23:14:27.626ZINFOcrucible: All extents match
138062023-09-22T23:14:27.626ZINFOcrucible: No downstairs repair required
138072023-09-22T23:14:27.626ZINFOcrucible: No initial repair work was required
138082023-09-22T23:14:27.626ZINFOcrucible: Set Downstairs and Upstairs active
138092023-09-22T23:14:27.626ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 is now active with session: 1b3283bb-b7f5-4e0f-94bf-8ec6efc0c054
138102023-09-22T23:14:27.626ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 Set Active after no repair
138112023-09-22T23:14:27.626ZINFOcrucible: Notify all downstairs, region set compare is done.
138122023-09-22T23:14:27.626ZINFOcrucible: Set check for repair
138132023-09-22T23:14:27.626ZINFOcrucible: [1] 127.0.0.1:54745 task reports connection:true
138142023-09-22T23:14:27.626ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 Active Active Active
138152023-09-22T23:14:27.626ZINFOcrucible: Set check for repair
138162023-09-22T23:14:27.626ZINFOcrucible: [2] 127.0.0.1:56258 task reports connection:true
138172023-09-22T23:14:27.626ZINFOcrucible: 1f7159d2-76c3-4968-a4c3-9cc9b7758094 Active Active Active
138182023-09-22T23:14:27.626ZINFOcrucible: Set check for repair
138192023-09-22T23:14:27.626ZINFOcrucible: [0] received reconcile message
138202023-09-22T23:14:27.627ZINFOcrucible: [0] All repairs completed, exit
138212023-09-22T23:14:27.627ZINFOcrucible: [0] Starts cmd_loop
138222023-09-22T23:14:27.627ZINFOcrucible: [1] received reconcile message
138232023-09-22T23:14:27.627ZINFOcrucible: [1] All repairs completed, exit
138242023-09-22T23:14:27.627ZINFOcrucible: [1] Starts cmd_loop
138252023-09-22T23:14:27.627ZINFOcrucible: [2] received reconcile message
138262023-09-22T23:14:27.627ZINFOcrucible: [2] All repairs completed, exit
138272023-09-22T23:14:27.627ZINFOcrucible: [2] Starts cmd_loop
13828 Sep 22 23:14:27.627 INFO listening on 127.0.0.1:0, task: main
13829 Sep 22 23:14:27.627 WARN a50fc3fb-3de2-4743-9c50-cc80cfba77db request to replace downstairs 127.0.0.1:42970 with 127.0.0.1:62845
13830 Sep 22 23:14:27.627 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db found old target: 127.0.0.1:42970 at 0
13831 Sep 22 23:14:27.627 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db replacing old: 127.0.0.1:42970 at 0
13832 Sep 22 23:14:27.628 INFO [0] client skip 1 in process jobs because fault, : downstairs
13833 Sep 22 23:14:27.628 INFO [0] changed 1 jobs to fault skipped, : downstairs
13834 Sep 22 23:14:27.628 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) Active Active Active ds_transition to Replacing
13835 Sep 22 23:14:27.628 INFO [0] Transition from Active to Replacing
13836 Sep 22 23:14:27.629 DEBG Write :1000 deps:[] res:true
13837 test test::integration_test_guest_downstairs_unwritten_sparse_mid ... ok
13838 test test::integration_test_guest_downstairs_unwritten_span_2 ... ok
13839 test test::integration_test_guest_downstairs_unwritten_sparse_end ... ok
13840 Sep 22 23:14:27.634 DEBG Write :1000 deps:[] res:true
13841 Sep 22 23:14:27.634 INFO current number of open files limit 65536 is already the maximum
13842 Sep 22 23:14:27.634 DEBG Write :1001 deps:[JobId(1000)] res:true
13843 Sep 22 23:14:27.634 INFO current number of open files limit 65536 is already the maximum
13844 Sep 22 23:14:27.634 INFO Created new region file "/tmp/downstairs-ZvKZ4Jxx/region.json"
13845 Sep 22 23:14:27.634 INFO Created new region file "/tmp/downstairs-uHYeHXUA/region.json"
13846 Sep 22 23:14:27.634 INFO current number of open files limit 65536 is already the maximum
13847 Sep 22 23:14:27.634 INFO Created new region file "/tmp/downstairs-RigWqboV/region.json"
13848 test test::integration_test_guest_downstairs_unwritten_span ... Sep 22 23:14:27.634 DEBG Write :1000 deps:[] res:true
13849 ok
13850 Sep 22 23:14:27.635 DEBG Write :1001 deps:[JobId(1000)] res:true
13851 Sep 22 23:14:27.635 INFO current number of open files limit 65536 is already the maximum
13852 Sep 22 23:14:27.635 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13853 Sep 22 23:14:27.635 WARN [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db WARNING finish job 1000 when downstairs state:Replacing
13854 Sep 22 23:14:27.635 WARN [0] Dropping already skipped job 1000, : downstairs
13855 Sep 22 23:14:27.635 INFO Created new region file "/tmp/downstairs-k0LAAe9c/region.json"
13856 Sep 22 23:14:27.635 WARN [0] will exit pm_task, this downstairs Replacing
13857 Sep 22 23:14:27.635 DEBG up_ds_listen was notified
13858 Sep 22 23:14:27.635 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13859 Sep 22 23:14:27.635 DEBG up_ds_listen checked 0 jobs, back to waiting
13860 Sep 22 23:14:27.635 ERRO 127.0.0.1:42970: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
13861 Sep 22 23:14:27.635 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
13862 Sep 22 23:14:27.635 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db Gone missing, transition from Replacing to Replaced
13863 Sep 22 23:14:27.635 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db connection to 127.0.0.1:42970 closed, looper: 0
13864 Sep 22 23:14:27.635 INFO [0] 127.0.0.1:42970 task reports connection:false
13865 Sep 22 23:14:27.635 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db Replaced Active Active
13866 Sep 22 23:14:27.635 INFO [0] 127.0.0.1:42970 task reports offline
13867 Sep 22 23:14:27.635 WARN upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } disconnected, 0 jobs left, task: main
13868 Sep 22 23:14:27.635 DEBG Read :1001 deps:[JobId(1000)] res:true
13869 Sep 22 23:14:27.635 DEBG Write :1001 deps:[JobId(1000)] res:true
13870 Sep 22 23:14:27.635 WARN upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } was previously active, clearing, task: main
13871 Sep 22 23:14:27.636 INFO connection (127.0.0.1:40268): all done
13872 Sep 22 23:14:27.636 DEBG Read :1001 deps:[JobId(1000)] res:true
13873 Sep 22 23:14:27.637 DEBG Read :1002 deps:[JobId(1001)] res:true
13874 Sep 22 23:14:27.637 DEBG Read :1002 deps:[JobId(1001)] res:true
13875 Sep 22 23:14:27.637 DEBG Read :1002 deps:[JobId(1001)] res:true
13876 Sep 22 23:14:27.637 DEBG Read :1001 deps:[JobId(1000)] res:true
13877 Sep 22 23:14:27.637 DEBG Read :1003 deps:[JobId(1001)] res:true
13878 Sep 22 23:14:27.638 INFO current number of open files limit 65536 is already the maximum
13879 Sep 22 23:14:27.638 INFO Opened existing region file "/tmp/downstairs-RigWqboV/region.json"
13880 Sep 22 23:14:27.638 INFO Database read version 1
13881 Sep 22 23:14:27.638 INFO Database write version 1
13882 Sep 22 23:14:27.638 DEBG Read :1003 deps:[JobId(1001)] res:true
13883 Sep 22 23:14:27.638 INFO current number of open files limit 65536 is already the maximum
13884 Sep 22 23:14:27.638 INFO Opened existing region file "/tmp/downstairs-uHYeHXUA/region.json"
13885 Sep 22 23:14:27.638 DEBG Read :1003 deps:[JobId(1001)] res:true
13886 Sep 22 23:14:27.638 INFO Database read version 1
13887 Sep 22 23:14:27.638 INFO Database write version 1
13888 Sep 22 23:14:27.638 INFO current number of open files limit 65536 is already the maximum
13889 Sep 22 23:14:27.638 INFO Opened existing region file "/tmp/downstairs-ZvKZ4Jxx/region.json"
13890 Sep 22 23:14:27.638 INFO Database read version 1
13891 Sep 22 23:14:27.638 INFO Database write version 1
13892 Sep 22 23:14:27.638 INFO current number of open files limit 65536 is already the maximum
13893 Sep 22 23:14:27.638 INFO Opened existing region file "/tmp/downstairs-k0LAAe9c/region.json"
13894 Sep 22 23:14:27.638 INFO Database read version 1
13895 Sep 22 23:14:27.638 INFO Database write version 1
13896 Sep 22 23:14:27.639 INFO UUID: ae16b993-7324-48a0-87ab-26e33b2b0247
13897 Sep 22 23:14:27.639 INFO Blocks per extent:5 Total Extents: 2
13898 Sep 22 23:14:27.639 INFO Crucible Version: Crucible Version: 0.0.1
13899 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13900 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13901 rustc: 1.70.0 stable x86_64-unknown-illumos
13902 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13903 Sep 22 23:14:27.639 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13904 Sep 22 23:14:27.639 INFO Using address: 127.0.0.1:47517, task: main
13905 Sep 22 23:14:27.639 INFO UUID: 7300b28a-83f2-47df-a265-634df7adc55c
13906 Sep 22 23:14:27.639 INFO Blocks per extent:5 Total Extents: 2
13907 Sep 22 23:14:27.639 INFO Crucible Version: Crucible Version: 0.0.1
13908 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13909 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13910 rustc: 1.70.0 stable x86_64-unknown-illumos
13911 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13912 Sep 22 23:14:27.639 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13913 Sep 22 23:14:27.639 INFO UUID: 6f874cfa-c3be-49c7-9256-3b7afc29b96b
13914 Sep 22 23:14:27.639 INFO Using address: 127.0.0.1:53663, task: main
13915 Sep 22 23:14:27.639 INFO Blocks per extent:5 Total Extents: 2
13916 Sep 22 23:14:27.639 INFO Repair listens on 127.0.0.1:0, task: repair
13917 Sep 22 23:14:27.639 INFO Crucible Version: Crucible Version: 0.0.1
13918 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13919 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13920 rustc: 1.70.0 stable x86_64-unknown-illumos
13921 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13922 Sep 22 23:14:27.639 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51325, task: repair
13923 Sep 22 23:14:27.639 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13924 Sep 22 23:14:27.639 INFO Using address: 127.0.0.1:36461, task: main
13925 Sep 22 23:14:27.639 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51325, task: repair
13926 Sep 22 23:14:27.639 INFO listening, local_addr: 127.0.0.1:51325, task: repair
13927 Sep 22 23:14:27.639 INFO Repair listens on 127.0.0.1:0, task: repair
13928 Sep 22 23:14:27.639 INFO UUID: 23321635-9e40-4165-b7e2-14f978a59fa9
13929 Sep 22 23:14:27.639 INFO Blocks per extent:5 Total Extents: 2
13930 Sep 22 23:14:27.639 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51325, task: repair
13931 Sep 22 23:14:27.639 INFO Using repair address: 127.0.0.1:51325, task: main
13932 Sep 22 23:14:27.639 INFO Crucible Version: Crucible Version: 0.0.1
13933 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13934 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13935 rustc: 1.70.0 stable x86_64-unknown-illumos
13936 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13937 Sep 22 23:14:27.639 INFO No SSL acceptor configured, task: main
13938 Sep 22 23:14:27.639 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13939 Sep 22 23:14:27.639 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33584, task: repair
13940 Sep 22 23:14:27.639 INFO Using address: 127.0.0.1:49699, task: main
13941 Sep 22 23:14:27.639 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33584, task: repair
13942 Sep 22 23:14:27.639 INFO Repair listens on 127.0.0.1:0, task: repair
13943 Sep 22 23:14:27.640 INFO listening, local_addr: 127.0.0.1:33584, task: repair
13944 Sep 22 23:14:27.640 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45034, task: repair
13945 Sep 22 23:14:27.640 INFO current number of open files limit 65536 is already the maximum
13946 Sep 22 23:14:27.640 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45034, task: repair
13947 Sep 22 23:14:27.640 INFO listening, local_addr: 127.0.0.1:45034, task: repair
13948 Sep 22 23:14:27.640 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33584, task: repair
13949 Sep 22 23:14:27.640 INFO Created new region file "/tmp/downstairs-dsMmDgYS/region.json"
13950 Sep 22 23:14:27.640 INFO Using repair address: 127.0.0.1:33584, task: main
13951 Sep 22 23:14:27.640 INFO No SSL acceptor configured, task: main
13952 Sep 22 23:14:27.640 INFO Repair listens on 127.0.0.1:0, task: repair
13953 Sep 22 23:14:27.640 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45034, task: repair
13954 Sep 22 23:14:27.640 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38823, task: repair
13955 Sep 22 23:14:27.640 INFO Using repair address: 127.0.0.1:45034, task: main
13956 Sep 22 23:14:27.640 INFO No SSL acceptor configured, task: main
13957 Sep 22 23:14:27.640 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38823, task: repair
13958 Sep 22 23:14:27.640 INFO current number of open files limit 65536 is already the maximum
13959 Sep 22 23:14:27.640 INFO listening, local_addr: 127.0.0.1:38823, task: repair
13960 Sep 22 23:14:27.640 INFO Created new region file "/tmp/downstairs-GUWqp6Vc/region.json"
13961 Sep 22 23:14:27.640 INFO current number of open files limit 65536 is already the maximum
13962 Sep 22 23:14:27.640 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38823, task: repair
13963 Sep 22 23:14:27.640 INFO Using repair address: 127.0.0.1:38823, task: main
13964 Sep 22 23:14:27.640 INFO No SSL acceptor configured, task: main
13965 Sep 22 23:14:27.640 INFO Created new region file "/tmp/downstairs-virqsJW3/region.json"
13966 Sep 22 23:14:27.640 INFO current number of open files limit 65536 is already the maximum
13967 Sep 22 23:14:27.641 INFO Created new region file "/tmp/downstairs-WtHBKHj4/region.json"
13968 Sep 22 23:14:27.643 INFO current number of open files limit 65536 is already the maximum
13969 Sep 22 23:14:27.643 INFO Opened existing region file "/tmp/downstairs-dsMmDgYS/region.json"
13970 Sep 22 23:14:27.643 INFO Database read version 1
13971 Sep 22 23:14:27.643 INFO Database write version 1
13972 test test::integration_test_guest_downstairs_unwritten_sparse_1 ... ok
13973 Sep 22 23:14:27.644 INFO current number of open files limit 65536 is already the maximum
13974 Sep 22 23:14:27.644 INFO Created new region file "/tmp/downstairs-Xe2fQWgH/region.json"
13975 Sep 22 23:14:27.645 INFO UUID: 08d2b418-826a-41f9-a799-580b98907013
13976 Sep 22 23:14:27.645 INFO Blocks per extent:5 Total Extents: 2
13977 Sep 22 23:14:27.645 INFO Crucible Version: Crucible Version: 0.0.1
13978 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
13979 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
13980 rustc: 1.70.0 stable x86_64-unknown-illumos
13981 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
13982 Sep 22 23:14:27.645 INFO Upstairs <-> Downstairs Message Version: 4, task: main
13983 Sep 22 23:14:27.645 INFO Using address: 127.0.0.1:64593, task: main
13984 Sep 22 23:14:27.645 INFO current number of open files limit 65536 is already the maximum
13985 Sep 22 23:14:27.645 INFO current number of open files limit 65536 is already the maximum
13986 Sep 22 23:14:27.645 INFO Opened existing region file "/tmp/downstairs-virqsJW3/region.json"
13987 Sep 22 23:14:27.645 INFO Opened existing region file "/tmp/downstairs-GUWqp6Vc/region.json"
13988 Sep 22 23:14:27.645 INFO Database read version 1
13989 Sep 22 23:14:27.645 INFO Database read version 1
13990 Sep 22 23:14:27.645 INFO Database write version 1
13991 Sep 22 23:14:27.645 INFO Database write version 1
13992 Sep 22 23:14:27.645 INFO Repair listens on 127.0.0.1:0, task: repair
13993 Sep 22 23:14:27.645 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62090, task: repair
13994 Sep 22 23:14:27.645 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62090, task: repair
13995 Sep 22 23:14:27.645 INFO listening, local_addr: 127.0.0.1:62090, task: repair
13996 Sep 22 23:14:27.645 INFO current number of open files limit 65536 is already the maximum
13997 Sep 22 23:14:27.645 INFO Opened existing region file "/tmp/downstairs-WtHBKHj4/region.json"
13998 Sep 22 23:14:27.645 INFO Database read version 1
13999 Sep 22 23:14:27.645 INFO Database write version 1
14000 Sep 22 23:14:27.645 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62090, task: repair
14001 Sep 22 23:14:27.645 INFO Using repair address: 127.0.0.1:62090, task: main
14002 Sep 22 23:14:27.645 INFO No SSL acceptor configured, task: main
14003 Sep 22 23:14:27.646 INFO current number of open files limit 65536 is already the maximum
14004 Sep 22 23:14:27.646 INFO Created new region file "/tmp/downstairs-F0NkJkt9/region.json"
14005 Sep 22 23:14:27.647 INFO UUID: 4c5d3d8c-93c2-4478-a576-d240d19aa24c
14006 Sep 22 23:14:27.647 INFO Blocks per extent:5 Total Extents: 2
14007 Sep 22 23:14:27.647 INFO UUID: 16a7ce45-bb2e-4212-a780-53352bc92fab
14008 Sep 22 23:14:27.647 INFO Blocks per extent:5 Total Extents: 2
14009 Sep 22 23:14:27.647 INFO Crucible Version: Crucible Version: 0.0.1
14010 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14011 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14012 rustc: 1.70.0 stable x86_64-unknown-illumos
14013 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14014 Sep 22 23:14:27.647 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14015 Sep 22 23:14:27.647 INFO Using address: 127.0.0.1:48269, task: main
14016 Sep 22 23:14:27.647 INFO Crucible Version: Crucible Version: 0.0.1
14017 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14018 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14019 rustc: 1.70.0 stable x86_64-unknown-illumos
14020 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14021 Sep 22 23:14:27.647 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14022 Sep 22 23:14:27.647 INFO Using address: 127.0.0.1:49337, task: main
14023 Sep 22 23:14:27.647 INFO UUID: 7423012d-517a-406f-808c-d47b1ac32a6d
14024 Sep 22 23:14:27.647 INFO Blocks per extent:5 Total Extents: 2
14025 test test::integration_test_guest_downstairs ... ok
14026 Sep 22 23:14:27.648 INFO Crucible Version: Crucible Version: 0.0.1
14027 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14028 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14029 rustc: 1.70.0 stable x86_64-unknown-illumos
14030 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14031 Sep 22 23:14:27.648 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14032 Sep 22 23:14:27.648 INFO Using address: 127.0.0.1:39986, task: main
14033 Sep 22 23:14:27.648 INFO Repair listens on 127.0.0.1:0, task: repair
14034 Sep 22 23:14:27.648 INFO Repair listens on 127.0.0.1:0, task: repair
14035 Sep 22 23:14:27.648 INFO current number of open files limit 65536 is already the maximum
14036 Sep 22 23:14:27.648 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60654, task: repair
14037 Sep 22 23:14:27.648 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53160, task: repair
14038 Sep 22 23:14:27.648 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53160, task: repair
14039 Sep 22 23:14:27.648 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60654, task: repair
14040 Sep 22 23:14:27.648 INFO Created new region file "/tmp/downstairs-kK2UANdj/region.json"
14041 Sep 22 23:14:27.648 INFO Repair listens on 127.0.0.1:0, task: repair
14042 Sep 22 23:14:27.648 INFO listening, local_addr: 127.0.0.1:60654, task: repair
14043 Sep 22 23:14:27.648 INFO listening, local_addr: 127.0.0.1:53160, task: repair
14044 Sep 22 23:14:27.648 INFO current number of open files limit 65536 is already the maximum
14045 Sep 22 23:14:27.648 INFO Opened existing region file "/tmp/downstairs-Xe2fQWgH/region.json"
14046 Sep 22 23:14:27.648 INFO Database read version 1
14047 Sep 22 23:14:27.648 INFO Database write version 1
14048 Sep 22 23:14:27.648 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43794, task: repair
14049 Sep 22 23:14:27.648 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60654, task: repair
14050 Sep 22 23:14:27.648 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53160, task: repair
14051 Sep 22 23:14:27.648 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43794, task: repair
14052 Sep 22 23:14:27.648 INFO Using repair address: 127.0.0.1:60654, task: main
14053 Sep 22 23:14:27.648 INFO No SSL acceptor configured, task: main
14054 Sep 22 23:14:27.648 INFO Using repair address: 127.0.0.1:53160, task: main
14055 Sep 22 23:14:27.648 INFO listening, local_addr: 127.0.0.1:43794, task: repair
14056 Sep 22 23:14:27.648 INFO No SSL acceptor configured, task: main
14057 Sep 22 23:14:27.648 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43794, task: repair
14058 Sep 22 23:14:27.648 INFO Using repair address: 127.0.0.1:43794, task: main
14059 Sep 22 23:14:27.648 INFO No SSL acceptor configured, task: main
14060 Sep 22 23:14:27.649 INFO current number of open files limit 65536 is already the maximum
14061 Sep 22 23:14:27.649 INFO current number of open files limit 65536 is already the maximum
14062 Sep 22 23:14:27.649 INFO current number of open files limit 65536 is already the maximum
14063 Sep 22 23:14:27.649 INFO Created new region file "/tmp/downstairs-w6kg4mJs/region.json"
14064 Sep 22 23:14:27.649 INFO Created new region file "/tmp/downstairs-L6GVuLbY/region.json"
14065 Sep 22 23:14:27.649 INFO Created new region file "/tmp/downstairs-VKXIreUh/region.json"
14066 Sep 22 23:14:27.649 INFO current number of open files limit 65536 is already the maximum
14067 Sep 22 23:14:27.649 INFO Opened existing region file "/tmp/downstairs-F0NkJkt9/region.json"
14068 Sep 22 23:14:27.649 INFO Database read version 1
14069 Sep 22 23:14:27.649 INFO Database write version 1
14070 Sep 22 23:14:27.650 INFO UUID: 5ebd7851-0a07-43d6-ba5e-89e1c5b26e32
14071 Sep 22 23:14:27.650 INFO Blocks per extent:5 Total Extents: 2
14072 Sep 22 23:14:27.650 INFO Crucible Version: Crucible Version: 0.0.1
14073 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14074 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14075 rustc: 1.70.0 stable x86_64-unknown-illumos
14076 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14077 Sep 22 23:14:27.650 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14078 Sep 22 23:14:27.650 INFO Using address: 127.0.0.1:51538, task: main
14079 Sep 22 23:14:27.651 INFO Repair listens on 127.0.0.1:0, task: repair
14080 Sep 22 23:14:27.651 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62564, task: repair
14081 Sep 22 23:14:27.651 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62564, task: repair
14082 Sep 22 23:14:27.651 INFO listening, local_addr: 127.0.0.1:62564, task: repair
14083 Sep 22 23:14:27.651 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62564, task: repair
14084 Sep 22 23:14:27.651 INFO Using repair address: 127.0.0.1:62564, task: main
14085 Sep 22 23:14:27.651 INFO No SSL acceptor configured, task: main
14086 Sep 22 23:14:27.651 INFO current number of open files limit 65536 is already the maximum
14087 Sep 22 23:14:27.651 INFO Opened existing region file "/tmp/downstairs-kK2UANdj/region.json"
14088 Sep 22 23:14:27.651 INFO Database read version 1
14089 Sep 22 23:14:27.651 INFO Database write version 1
14090 Sep 22 23:14:27.651 INFO UUID: af195d7e-0034-4c88-a8c6-0f914578a66f
14091 Sep 22 23:14:27.651 INFO Blocks per extent:5 Total Extents: 2
14092 Sep 22 23:14:27.652 INFO current number of open files limit 65536 is already the maximum
14093 Sep 22 23:14:27.652 INFO Crucible Version: Crucible Version: 0.0.1
14094 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14095 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14096 rustc: 1.70.0 stable x86_64-unknown-illumos
14097 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14098 Sep 22 23:14:27.652 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14099 Sep 22 23:14:27.652 INFO Using address: 127.0.0.1:46072, task: main
14100 Sep 22 23:14:27.652 INFO Created new region file "/tmp/downstairs-pKPcS6al/region.json"
14101 Sep 22 23:14:27.652 INFO Repair listens on 127.0.0.1:0, task: repair
14102 Sep 22 23:14:27.652 INFO current number of open files limit 65536 is already the maximum
14103 Sep 22 23:14:27.652 INFO Opened existing region file "/tmp/downstairs-w6kg4mJs/region.json"
14104 Sep 22 23:14:27.652 INFO Database read version 1
14105 Sep 22 23:14:27.652 INFO Database write version 1
14106 Sep 22 23:14:27.652 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56817, task: repair
14107 Sep 22 23:14:27.652 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56817, task: repair
14108 Sep 22 23:14:27.652 INFO UUID: 892b80dc-b0fe-4fd6-9a73-88aa72d6d43b
14109 Sep 22 23:14:27.652 INFO listening, local_addr: 127.0.0.1:56817, task: repair
14110 Sep 22 23:14:27.652 INFO Blocks per extent:5 Total Extents: 2
14111 Sep 22 23:14:27.652 INFO Crucible Version: Crucible Version: 0.0.1
14112 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14113 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14114 rustc: 1.70.0 stable x86_64-unknown-illumos
14115 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14116 Sep 22 23:14:27.652 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14117 Sep 22 23:14:27.652 INFO Using address: 127.0.0.1:42286, task: main
14118 Sep 22 23:14:27.652 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56817, task: repair
14119 Sep 22 23:14:27.652 INFO current number of open files limit 65536 is already the maximum
14120 Sep 22 23:14:27.652 INFO Opened existing region file "/tmp/downstairs-L6GVuLbY/region.json"
14121 Sep 22 23:14:27.652 INFO Using repair address: 127.0.0.1:56817, task: main
14122 Sep 22 23:14:27.652 INFO Database read version 1
14123 Sep 22 23:14:27.652 INFO Database write version 1
14124 Sep 22 23:14:27.652 INFO No SSL acceptor configured, task: main
14125 Sep 22 23:14:27.652 INFO current number of open files limit 65536 is already the maximum
14126 Sep 22 23:14:27.653 INFO Opened existing region file "/tmp/downstairs-VKXIreUh/region.json"
14127 Sep 22 23:14:27.653 INFO Database read version 1
14128 Sep 22 23:14:27.653 INFO Database write version 1
14129 Sep 22 23:14:27.653 INFO Repair listens on 127.0.0.1:0, task: repair
14130 Sep 22 23:14:27.653 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49747, task: repair
14131 Sep 22 23:14:27.653 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49747, task: repair
14132 Sep 22 23:14:27.653 INFO listening, local_addr: 127.0.0.1:49747, task: repair
14133 Sep 22 23:14:27.653 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49747, task: repair
14134 Sep 22 23:14:27.653 INFO Using repair address: 127.0.0.1:49747, task: main
14135 Sep 22 23:14:27.653 INFO No SSL acceptor configured, task: main
14136 Sep 22 23:14:27.653 INFO UUID: 05359e3f-cf4a-4b15-afb8-9fe615a364f7
14137 Sep 22 23:14:27.653 INFO Blocks per extent:5 Total Extents: 2
14138 Sep 22 23:14:27.653 INFO Crucible Version: Crucible Version: 0.0.1
14139 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14140 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14141 rustc: 1.70.0 stable x86_64-unknown-illumos
14142 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14143 Sep 22 23:14:27.653 INFO current number of open files limit 65536 is already the maximum
14144 Sep 22 23:14:27.653 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14145 Sep 22 23:14:27.653 INFO Using address: 127.0.0.1:38443, task: main
14146 {Sep 22 23:14:27.654 INFO Created new region file "/tmp/downstairs-o4mV3CdV/region.json"
14147 "msg":"Upstairs starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.654040391Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14148 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30Sep 22 23:14:27.654 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14149 ,"time":"2023-09-22T23:14:27.65412444Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14150 {The guest has requested activation
14151 "msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.654173753Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14152 {"msg":"Crucible stats registered with UUID: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303","v":0,"name":"crucible","level":30,"Sep 22 23:14:27.654 INFO listening on 127.0.0.1:0, task: main
14153 time":"2023-09-22T23:14:27.654216893Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14154 {"msg":"Crucible 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 has session id: f210ac55-dab5-4449-83a0-f7a80ed3d221","v":0,"name":"crucible","level":30Sep 22 23:14:27.654 INFO Repair listens on 127.0.0.1:0, task: repair
14155 ,"Sep 22 23:14:27.654 INFO listening on 127.0.0.1:0, task: main
14156 Sep 22 23:14:27.654 INFO UUID: 24249434-76be-4dd4-92c1-0907ffa4ae22
14157 time":"2023-09-22T23:14:27.654268337Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14158 Sep 22 23:14:27.654 INFO Blocks per extent:5 Total Extents: 2
14159 Sep 22 23:14:27.654 INFO listening on 127.0.0.1:0, task: main
14160 Sep 22 23:14:27.654 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43048, task: repair
14161 Sep 22 23:14:27.654 INFO Crucible Version: Crucible Version: 0.0.1
14162 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14163 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14164 rustc: 1.70.0 stable x86_64-unknown-illumos
14165 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14166 Sep 22 23:14:27.654 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14167 Sep 22 23:14:27.654 INFO UUID: 3a5e7f4a-366b-4efc-b559-fdf518e087fb
14168 Sep 22 23:14:27.654 INFO Using address: 127.0.0.1:38320, task: main
14169 Sep 22 23:14:27.654 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43048, task: repair
14170 Sep 22 23:14:27.654 INFO Blocks per extent:5 Total Extents: 2
14171 {"msg":"[0] connecting to 127.0.0.1:47517","v":0,"name":"crucible","level":30Sep 22 23:14:27.654 INFO listening, local_addr: 127.0.0.1:43048, task: repair
14172 ,"time":"2023-09-22T23:14:27.654464485Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
14173 {"msg":"[1] connecting to 127.0.0.1:64593","v"Sep 22 23:14:27.654 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14174 :0,"name":"crucible","level":30Sep 22 23:14:27.654 INFO Crucible Version: Crucible Version: 0.0.1
14175 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14176 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14177 rustc: 1.70.0 stable x86_64-unknown-illumos
14178 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14179 ,"time":"2023-09-22T23:14:27.654596777Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
14180 Sep 22 23:14:27.654 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14181 Sep 22 23:14:27.654 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43048, task: repair
14182 Sep 22 23:14:27.654 INFO Using address: 127.0.0.1:61853, task: main
14183 {"msg":"[2] connecting to 127.0.0.1:46072","v":0,"name":"crucible","level":30Sep 22 23:14:27.654 INFO Using repair address: 127.0.0.1:43048, task: main
14184 ,"time":"2023-09-22T23:14:27.654686434Z","hostname":"Sep 22 23:14:27.654 INFO No SSL acceptor configured, task: main
14185 ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2"}
14186 Sep 22 23:14:27.654 INFO Repair listens on 127.0.0.1:0, task: repair
14187 {"msg":"up_listen starts","v":0,"name":"crucible","level":30Sep 22 23:14:27.654 INFO current number of open files limit 65536 is already the maximum
14188 ,"time":"2023-09-22T23:14:27.654782358Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"task":"up_listen"}
14189 Sep 22 23:14:27.654 INFO Opened existing region file "/tmp/downstairs-pKPcS6al/region.json"
14190 {"msg":"Wait for all three downstairs to come online","v":0,"name":"Sep 22 23:14:27.654 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58528, task: repair
14191 crucible"Sep 22 23:14:27.654 INFO Database read version 1
14192 ,"level":30Sep 22 23:14:27.654 INFO Database write version 1
14193 ,"time":"2023-09-22T23:14:27.654843701Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14194 {"Sep 22 23:14:27.654 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58528, task: repair
14195 msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.654890574Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14196 {"msg":"Sep 22 23:14:27.654 INFO listening, local_addr: 127.0.0.1:58528, task: repair
14197 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 active request set","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.654933878Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14198 Sep 22 23:14:27.654 INFO Repair listens on 127.0.0.1:0, task: repair
14199 Sep 22 23:14:27.655 INFO accepted connection from 127.0.0.1:51756, task: main
14200 Sep 22 23:14:27.655 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
14201 Sep 22 23:14:27.655 INFO Upstairs starts
14202 Sep 22 23:14:27.655 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58528, task: repair
14203 Sep 22 23:14:27.655 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46245, task: repair
14204 Sep 22 23:14:27.655 INFO accepted connection from 127.0.0.1:53016, task: main
14205 Sep 22 23:14:27.655 INFO Using repair address: 127.0.0.1:58528, task: main
14206 Sep 22 23:14:27.655 INFO Crucible Version: BuildInfo {
14207 version: "0.0.1",
14208 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14209 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14210 git_branch: "main",
14211 rustc_semver: "1.70.0",
14212 rustc_channel: "stable",
14213 rustc_host_triple: "x86_64-unknown-illumos",
14214 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14215 cargo_triple: "x86_64-unknown-illumos",
14216 debug: true,
14217 opt_level: 0,
14218 }
14219 Sep 22 23:14:27.655 INFO No SSL acceptor configured, task: main
14220 Sep 22 23:14:27.655 INFO Upstairs <-> Downstairs Message Version: 4
14221 Sep 22 23:14:27.655 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46245, task: repair
14222 Sep 22 23:14:27.655 INFO Crucible stats registered with UUID: a2292a1d-0eb4-4a70-b0af-896ace339068
14223 Sep 22 23:14:27.655 INFO listening, local_addr: 127.0.0.1:46245, task: repair
14224 Sep 22 23:14:27.655 INFO Crucible a2292a1d-0eb4-4a70-b0af-896ace339068 has session id: e1751d6e-8c4b-406c-915e-1c5bc2910fa3
14225 Sep 22 23:14:27.655 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46245, task: repair
14226 Sep 22 23:14:27.655 INFO accepted connection from 127.0.0.1:64660, task: main
14227 Sep 22 23:14:27.655 INFO Using repair address: 127.0.0.1:46245, task: main
14228 Sep 22 23:14:27.655 INFO No SSL acceptor configured, task: main
14229 The guest has requested activation
14230 {"msg":"[0] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 looper connected"Sep 22 23:14:27.655 INFO listening on 127.0.0.1:0, task: main
14231 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.655523244Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
14232 {"msg":Sep 22 23:14:27.655 INFO listening on 127.0.0.1:0, task: main
14233 "[0] Proc runs for 127.0.0.1:47517 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.655580383Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14234 {"msg":"Sep 22 23:14:27.655 INFO listening on 127.0.0.1:0, task: main
14235 [1] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.655624217Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
14236 {"msg":"[1] Proc runs for 127.0.0.1:64593 in state New","v":0,"name":"crucible","level":30Sep 22 23:14:27.655 INFO [0] connecting to 127.0.0.1:36461, looper: 0
14237 ,"time":"2023-09-22T23:14:27.655664627Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
142382023-09-22T23:14:27.655ZINFOcrucible: [2] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 looper connected looper = 2
142392023-09-22T23:14:27.655ZINFOcrucible: [2] Proc runs for 127.0.0.1:46072 in state New
14240 Sep 22 23:14:27.655 INFO [1] connecting to 127.0.0.1:48269, looper: 1
14241 Sep 22 23:14:27.655 INFO Connection request from 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 with version 4, task: proc
14242 Sep 22 23:14:27.655 INFO UUID: 189bf2c8-c3ff-478c-9ad5-fadafbe3e365
14243 Sep 22 23:14:27.655 INFO upstairs UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } connected, version 4, task: proc
14244 Sep 22 23:14:27.655 INFO Blocks per extent:5 Total Extents: 2
14245 Sep 22 23:14:27.655 INFO [2] connecting to 127.0.0.1:38443, looper: 2
14246 Sep 22 23:14:27.655 INFO Crucible Version: Crucible Version: 0.0.1
14247 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14248 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14249 rustc: 1.70.0 stable x86_64-unknown-illumos
14250 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14251 Sep 22 23:14:27.655 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14252 {Sep 22 23:14:27.656 INFO Using address: 127.0.0.1:38499, task: main
14253 "msg":"Upstairs starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.656035216Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14254 Sep 22 23:14:27.656 INFO Connection request from 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 with version 4, task: proc
14255 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\",\n git_sha: \"Sep 22 23:14:27.656 INFO up_listen starts, task: up_listen
14256 ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\nSep 22 23:14:27.656 INFO upstairs UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } connected, version 4, task: proc
14257 rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\nSep 22 23:14:27.656 INFO Wait for all three downstairs to come online
14258 rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","vThe guest has requested activation
14259 Sep 22 23:14:27.656 INFO Flush timeout: 0.5
142602023-09-22T23:14:27.656ZINFOcrucible: Upstairs starts
14261 Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14262 {Sep 22 23:14:27.656 INFO Connection request from 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 with version 4, task: proc
14263 "msg":"Crucible Version: BuildInfo {\n version: \"Sep 22 23:14:27.656 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 active request set
14264 The guest has requested activation
14265 0.0.1\",\n git_sha: \"Sep 22 23:14:27.656 INFO upstairs UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } connected, version 4, task: proc
14266 ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14267 x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":0,"name":"crucible","level":30,"Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14268 time":"2023-09-22T23:14:27.656277906Z"Sep 22 23:14:27.656 INFO current number of open files limit 65536 is already the maximum
14269 ,"hostname":Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14270 "ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14271 Sep 22 23:14:27.656 INFO Opened existing region file "/tmp/downstairs-o4mV3CdV/region.json"
14272 {"msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30Sep 22 23:14:27.656 INFO Database read version 1
14273 Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14274 Sep 22 23:14:27.656 INFO Repair listens on 127.0.0.1:0, task: repair
14275 ,"time":"2023-09-22T23:14:27.656350432Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.656 INFO Database write version 1
14276 }
14277 {"msg"Sep 22 23:14:27.656 INFO accepted connection from 127.0.0.1:52309, task: main
14278 :"Crucible stats registered with UUID: f3c3fd03-7420-43e1-a477-c989612ec069","v":0,"name":"crucible","level":30Sep 22 23:14:27.656 INFO listening on 127.0.0.1:0, task: main
14279 {,"time":"2023-09-22T23:14:27.6564084Z","hostname"":"msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14280 [0] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) New New New ds_transition to WaitActive","v":{0,"name":"crucible"","msglevel""::"30Crucible f3c3fd03-7420-43e1-a477-c989612ec069 has session id: 54bf6715-0ba8-499a-b96f-9e9970f9520b"Sep 22 23:14:27.656 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43831, task: repair
14281 ,"v":0,"name":"crucible","level":30Sep 22 23:14:27.656 INFO accepted connection from 127.0.0.1:34139, task: main
14282 ,"time":"2023-09-22T23:14:27.65644981Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal,"","time"pid":":43012023-09-22T23:14:27.65646628Z"},
14283 "hostname":"ip-10-150-1-74.us-west-2.compute.internal",{"pid":4301Sep 22 23:14:27.656 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43831, task: repair
14284 }"
14285 msg":"[0] Transition from New to WaitActive"{,"v":0,""name"msg"::""crucible"[0] connecting to 127.0.0.1:53663",",level""v"::030,"name":"crucible","level":30Sep 22 23:14:27.656 INFO accepted connection from 127.0.0.1:45771, task: main
14286 ,Sep 22 23:14:27.656 INFO listening, local_addr: 127.0.0.1:43831, task: repair
14287 "time":"2023-09-22T23:14:27.656530868Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14288 {"msg":"[0] client is_active_req TRUE, promote! session bc6a14b0-7427-4a2e-9f5b-e7289d18983f","v":0,"name":"crucible","level":30Sep 22 23:14:27.656 INFO [0] a2292a1d-0eb4-4a70-b0af-896ace339068 looper connected, looper: 0
14289 Sep 22 23:14:27.656 INFO [0] Proc runs for 127.0.0.1:36461 in state New
14290 ,"time":"2023-09-22T23:14:27.65653442Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,","time"looper:"":"0"2023-09-22T23:14:27.65658531Z"}
14291 ,Sep 22 23:14:27.656 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43831, task: repair
14292 {"hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","[1] connecting to 127.0.0.1:49337"pid":,4301"v":0},
14293 "name":"crucible","level":30{"msg":"[1] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) WaitActive New New ds_transition to WaitActive","v":0,"name":"crucible"Sep 22 23:14:27.656 INFO [1] a2292a1d-0eb4-4a70-b0af-896ace339068 looper connected, looper: 1
14294 ,,"Sep 22 23:14:27.656 INFO Using repair address: 127.0.0.1:43831, task: main
14295 level"":time30":"2023-09-22T23:14:27.656753341Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
14296 Sep 22 23:14:27.656 INFO UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } is now active (read-write)
14297 ,{Sep 22 23:14:27.656 INFO No SSL acceptor configured, task: main
14298 "Sep 22 23:14:27.656 INFO [1] Proc runs for 127.0.0.1:48269 in state New
14299 "msg"time":":"[2] connecting to 127.0.0.1:61853"2023-09-22T23:14:27.65678526Z","v",:"0hostname,""name":":"crucible",ip-10-150-1-74.us-west-2.compute.internal"","levelpid""::430130}
14300 {"msg":"[1] Transition from New to WaitActive","v":0,","timename""::""crucible","2023-09-22T23:14:27.656838971Z"level":,30"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2"}
14301 {,"time":""2023-09-22T23:14:27.65686539Zmsg":"",up_listen starts"",hostname"":v"":0,"nameip-10-150-1-74.us-west-2.compute.internal"":,""pid":crucible4301","}level
14302 ":30{"msg":"[1] client is_active_req TRUE, promote! session bc6a14b0-7427-4a2e-9f5b-e7289d18983f","v":0,"name":"Sep 22 23:14:27.656 INFO UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } is now active (read-write)
14303 ,"Sep 22 23:14:27.656 INFO [2] a2292a1d-0eb4-4a70-b0af-896ace339068 looper connected, looper: 2
14304 crucible"timeSep 22 23:14:27.656 INFO accepted connection from 127.0.0.1:42001, task: main
14305 ":0,"name":"crucible","level":30Sep 22 23:14:27.656 INFO [2] Proc runs for 127.0.0.1:38443 in state New
14306 ,"time":"2023-09-22T23:14:27.656981316Z",,""hostname":level"":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14307 {"msg",":"time":"Upstairs <-> Downstairs Message Version: 4"2023-09-22T23:14:27.657016236Z","v",":hostname"Sep 22 23:14:27.657 INFO UpstairsConnection { upstairs_id: 44f3bfa1-82bf-4e34-9a0d-b9e0db308303, session_id: bc6a14b0-7427-4a2e-9f5b-e7289d18983f, gen: 1 } is now active (read-write)
14308 Sep 22 23:14:27.657 INFO current number of open files limit 65536 is already the maximum
14309 :"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",",pid"":level"4301:30}
14310 {"msg":","time"Sep 22 23:14:27.657 INFO accepted connection from 127.0.0.1:52789, task: main
14311 :[2] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) WaitActive WaitActive New ds_transition to WaitActive"Sep 22 23:14:27.657 INFO accepted connection from 127.0.0.1:41119, task: main
14312 2023-09-22T23:14:27.657061489Z"",","hostname"v":":0,"ip-10-150-1-74.us-west-2.compute.internal"name":","crucible"pid",":level"4301:30}
14313 {"msg":"Crucible stats registered with UUID: 6478de22-510f-412e-afa6-6a9c47f1d3b8",","time"v"::"0,"name2023-09-22T23:14:27.657107143Z"":",crucible"","hostname"level:"":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14314 {"msg":"[2] Transition from New to WaitActive",,""time"v"::"0,"name"2023-09-22T23:14:27.657135435Z":",crucible"","hostnameSep 22 23:14:27.657 INFO accepted connection from 127.0.0.1:48885, task: main
14315 ":level"Sep 22 23:14:27.657 INFO Created new region file "/tmp/downstairs-9O3BNKFH/region.json"
14316 Sep 22 23:14:27.657 INFO accepted connection from 127.0.0.1:43369, task: main
14317 ":"2023-09-22T23:14:27.656907966Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"task":"up_listen"}
14318 {Sep 22 23:14:27.657 INFO Connection request from a2292a1d-0eb4-4a70-b0af-896ace339068 with version 4, task: proc
14319 "msg":"Wait for all three downstairs to come online","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 INFO accepted connection from 127.0.0.1:54706, task: main
14320 Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } connected, version 4, task: proc
14321 ,"time":"2023-09-22T23:14:27.657237122Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14322 {"msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30:30,"time":"2023-09-22T23:14:27.657283835Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301},
14323 "time":"{2023-09-22T23:14:27.65728835Z",""msg"hostname:"":"f3c3fd03-7420-43e1-a477-c989612ec069 active request set"ip-10-150-1-74.us-west-2.compute.internal",",v""pid"::04301,"name":"}crucible
14324 ","level":30{"msg":"[2] client is_active_req TRUE, promote! session bc6a14b0-7427-4a2e-9f5b-e7289d18983f","v":0,"name":"crucible"Sep 22 23:14:27.657 INFO UUID: 41cf1bdf-fb18-466b-8427-1ff781e7de45
14325 ,,"Sep 22 23:14:27.657 INFO Connection request from a2292a1d-0eb4-4a70-b0af-896ace339068 with version 4, task: proc
14326 level":"30time":"2023-09-22T23:14:27.657337669Z"Sep 22 23:14:27.657 INFO Blocks per extent:5 Total Extents: 2
14327 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14328 ,"time":"2023-09-22T23:14:27.657366672Z"{,"hostname":"Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } connected, version 4, task: proc
14329 ip-10-150-1-74.us-west-2.compute.internal"",msg":""pid":4301[0] f3c3fd03-7420-43e1-a477-c989612ec069 looper connected"}
14330 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.657411145Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
14331 {"Sep 22 23:14:27.657 INFO Crucible Version: Crucible Version: 0.0.1
14332 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14333 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14334 rustc: 1.70.0 stable x86_64-unknown-illumos
14335 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14336 msg":"[0] Proc runs for 127.0.0.1:53663 in state New","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14337 ,"time":"2023-09-22T23:14:27.65746288Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14338 Sep 22 23:14:27.657 INFO Connection request from a2292a1d-0eb4-4a70-b0af-896ace339068 with version 4, task: proc
14339 {Sep 22 23:14:27.657 INFO Using address: 127.0.0.1:52962, task: main
14340 "msg":"[1] f3c3fd03-7420-43e1-a477-c989612ec069 looper connected","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } connected, version 4, task: proc
14341 ,"time":"2023-09-22T23:14:27.657511448Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
143422023-09-22T23:14:27.657ZINFOcrucible: [1] Proc runs for 127.0.0.1:49337 in state New
14343 {"msg":"[2] f3c3fd03-7420-43e1-a477-c989612ec069 looper connected","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.65760441Z","hostname{":"ip-10-150-1-74.us-west-2.compute.internal","pid"":4301msg":","looper":"2"}
14344 Sep 22 23:14:27.657 INFO Connection request from 6478de22-510f-412e-afa6-6a9c47f1d3b8 with version 4, task: proc
14345 {[0] downstairs client at 127.0.0.1:47517 has UUID ae16b993-7324-48a0-87ab-26e33b2b0247"Sep 22 23:14:27.657 INFO [0] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) New New New ds_transition to WaitActive
14346 ,""v"msg"::"0,"name":"[2] Proc runs for 127.0.0.1:61853 in state New"crucible",",v":"0level",":name":30Sep 22 23:14:27.657 INFO [0] Transition from New to WaitActive
14347 Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } connected, version 4, task: proc
14348 "crucible","level":30,"time":"2023-09-22T23:14:27.657664603Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.657 INFO [0] client is_active_req TRUE, promote! session 6eb51b2f-ab23-44be-a134-dc7d267c1b0b
14349 },"
14350 time":"2023-09-22T23:14:27.657677888Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal"","msgpid""::"Sep 22 23:14:27.657 INFO Connection request from f3c3fd03-7420-43e1-a477-c989612ec069 with version 4, task: proc
14351 4301}
14352 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ae16b993-7324-48a0-87ab-26e33b2b0247, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } connected, version 4, task: proc
14353 ,"time":"2023-09-22T23:14:27.657731955Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid"":4301}
14354 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{Sep 22 23:14:27.657 INFO Connection request from 6478de22-510f-412e-afa6-6a9c47f1d3b8 with version 4, task: proc
14355 "}msg":"
14356 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 WaitActive WaitActive WaitActive","v":0,"{name":"crucible","level":30"msg":"Sep 22 23:14:27.657 INFO [1] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) WaitActive New New ds_transition to WaitActive
14357 Crucible 6478de22-510f-412e-afa6-6a9c47f1d3b8 has session id: d7d9f4d8-e572-4413-a570-f9bc320c45b5"Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } connected, version 4, task: proc
14358 ,"v":0,"name":","crucible"time",":"level":302023-09-22T23:14:27.657789133Z","hostname":"Sep 22 23:14:27.657 INFO [1] Transition from New to WaitActive
14359 ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.657 INFO Repair listens on 127.0.0.1:0, task: repair
14360 }
14361 ,"time":"{2023-09-22T23:14:27.657809126Z","hostname"":msg""Sep 22 23:14:27.657 INFO [1] client is_active_req TRUE, promote! session 6eb51b2f-ab23-44be-a134-dc7d267c1b0b
14362 ip-10-150-1-74.us-west-2.compute.internal:"","pid":[1] downstairs client at 127.0.0.1:64593 has UUID 08d2b418-826a-41f9-a799-580b989070134301Sep 22 23:14:27.657 INFO Connection request from f3c3fd03-7420-43e1-a477-c989612ec069 with version 4, task: proc
14363 }
14364 ","v":0,"name":"{crucible","level":30"msg":"[0] connecting to 127.0.0.1:49699","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } connected, version 4, task: proc
14365 ,"time":"2023-09-22T23:14:27.657860834Z","hostname":",ip-10-150-1-74.us-west-2.compute.internal"","time"pid"::4301"}
14366 2023-09-22T23:14:27.657869828Z"Sep 22 23:14:27.657 INFO Connection request from 6478de22-510f-412e-afa6-6a9c47f1d3b8 with version 4, task: proc
14367 {,""hostname"msg"::""ip-10-150-1-74.us-west-2.compute.internal","pid":4301,Sep 22 23:14:27.657 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52248, task: repair
14368 "[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 08d2b418-826a-41f9-a799-580b98907013, encrypted: true, database_read_version: 1, database_write_version: 1 }"looper":","0"v":}0
14369 ,"name":"Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } connected, version 4, task: proc
14370 {crucible"","msg"level":":30[1] connecting to 127.0.0.1:39986","v":0,"name":"crucible","level":30Sep 22 23:14:27.657 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52248, task: repair
14371 ,",time"Sep 22 23:14:27.657 INFO [2] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) WaitActive WaitActive New ds_transition to WaitActive
14372 Sep 22 23:14:27.657 INFO Connection request from f3c3fd03-7420-43e1-a477-c989612ec069 with version 4, task: proc
14373 time"":":"2023-09-22T23:14:27.657944974Z"2023-09-22T23:14:27.657938312Z",","hostname"hostname:":""ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",",pid"":pid4301"Sep 22 23:14:27.657 INFO upstairs UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } connected, version 4, task: proc
14374 ,"Sep 22 23:14:27.657 INFO listening, local_addr: 127.0.0.1:52248, task: repair
14375 :Sep 22 23:14:27.657 INFO [2] Transition from New to WaitActive
14376 4301looper":"}1"
14377 }
14378 {"{msg":""msg":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 WaitActive WaitActive WaitActive","[2] connecting to 127.0.0.1:38320"v":,0","vname"":Sep 22 23:14:27.658 INFO [2] client is_active_req TRUE, promote! session 6eb51b2f-ab23-44be-a134-dc7d267c1b0b
14379 :"0,"crucible"name":","crucible"level",":level"30:30,"time":"2023-09-22T23:14:27.658048863Z",,""time":hostname"":"2023-09-22T23:14:27.658049804Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname"pid":":4301}
14380 ip-10-150-1-74.us-west-2.compute.internal","pid":{4301","msg"looperSep 22 23:14:27.658 INFO Current flush_numbers [0..12]: [0, 0]
14381 :"":"2"[2] downstairs client at 127.0.0.1:46072 has UUID af195d7e-0034-4c88-a8c6-0f914578a66f"},"
14382 v":0,"name":"crucible",{"level":30"msg":"up_listen starts","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.658110004Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14383 ,"time":"2023-09-22T23:14:27.658119355Z"{,"hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":","pid":4301,"task":"up_listen"}
14384 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: af195d7e-0034-4c88-a8c6-0f914578a66f, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"{name":"crucible",""levelmsg":"":30Wait for all three downstairs to come online","v":0,"name":"crucible","level":30Sep 22 23:14:27.658 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52248, task: repair
14385 ,"time":"2023-09-22T23:14:27.658163557Z"Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } is now active (read-write)
14386 ,,""timehostname""::""2023-09-22T23:14:27.658170387Z"ip-10-150-1-74.us-west-2.compute.internal",,""hostname":"pid":4301ip-10-150-1-74.us-west-2.compute.internal}"
14387 ,"pid":Sep 22 23:14:27.658 INFO Using repair address: 127.0.0.1:52248, task: main
14388 {4301"msg"}:"
14389 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 WaitActive WaitActive WaitActive","v":0,"{name":"crucible"Sep 22 23:14:27.658 INFO No SSL acceptor configured, task: main
14390 ,""level"msg:"30:"Flush timeout: 0.5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.658233155Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14391 ,"time":"2023-09-22T23:14:27.658240728Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14392 {"msg":"6478de22-510f-412e-afa6-6a9c47f1d3b8 active request set","v":0,"name":"crucible","level":30Sep 22 23:14:27.658 INFO Downstairs has completed Negotiation, task: proc
14393 Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } is now active (read-write)
14394 ,"time":"2023-09-22T23:14:27.65828727Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
143952023-09-22T23:14:27.658ZINFOcrucible: [0] 6478de22-510f-412e-afa6-6a9c47f1d3b8 looper connected looper = 0
14396 Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } is now active (read-write)
14397 {Sep 22 23:14:27.658 INFO current number of open files limit 65536 is already the maximum
14398 "msg":"[0] Proc runs for 127.0.0.1:49699 in state New","v":0,"name":"crucible","level":30Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: a2292a1d-0eb4-4a70-b0af-896ace339068, session_id: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b, gen: 1 } is now active (read-write)
14399 ,"time":"2023-09-22T23:14:27.658399038Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14400 {"msg":"Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } is now active (read-write)
14401 [1] 6478de22-510f-412e-afa6-6a9c47f1d3b8 looper connected","v":0,"name":"crucible","level":30{"msg":"[0] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.658460181Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } is now active (read-write)
14402 "}
14403 ,"time":"2023-09-22T23:14:27.658486758Z"{,"hostname"Sep 22 23:14:27.658 INFO Created new region file "/tmp/downstairs-8awGcOg4/region.json"
14404 ":msg"":"ip-10-150-1-74.us-west-2.compute.internal"[1] Proc runs for 127.0.0.1:39986 in state New",","pid"v"::04301,"name":"}crucible
14405 ","level":30{"msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.658544518Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14406 Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } is now active (read-write)
14407 {,""timemsg":"":"2023-09-22T23:14:27.658556622Z"[2] 6478de22-510f-412e-afa6-6a9c47f1d3b8 looper connected",","hostname"v"::"0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",","pidlevel":"30:4301}
14408 {"msg":","time":"[0] client is_active_req TRUE, promote! session fe77f777-565a-4e57-8434-1353313e888f"2023-09-22T23:14:27.65860208Z",","v"hostname"::"Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: 6478de22-510f-412e-afa6-6a9c47f1d3b8, session_id: 2614894d-39a8-4be9-92fd-93de9a377555, gen: 1 } is now active (read-write)
14409 ip-10-150-1-74.us-west-2.compute.internal"0,","pidname""::"4301crucible",,""level"looper":":2"30}
14410 {"msg":"[2] Proc runs for 127.0.0.1:38320 in state New","v":0,","name":time"":"crucible","level"2023-09-22T23:14:27.658645195Z":30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14411 {,""time"msg"::""2023-09-22T23:14:27.658669363Z","hostname":"[1] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) WaitActive New New ds_transition to WaitActive"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.658 INFO UpstairsConnection { upstairs_id: f3c3fd03-7420-43e1-a477-c989612ec069, session_id: fe77f777-565a-4e57-8434-1353313e888f, gen: 1 } is now active (read-write)
14412 ,","pidv""::04301,"name":"}crucible
14413 ","level":30{"msg":"[0] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) New New New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.65871412Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14414 ,"time{":""2023-09-22T23:14:27.658729363Zmsg":"","[1] Transition from New to WaitActive"hostname",":v"":0,"name":"ip-10-150-1-74.us-west-2.compute.internal"crucible",","pid"level"::430130}
14415 {"msg":"[0] Transition from New to WaitActive",",time"":"v":02023-09-22T23:14:27.658764149Z,""name":","crucible"hostname":,""level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14416 {"msg":"[1] client is_active_req TRUE, promote! session fe77f777-565a-4e57-8434-1353313e888f",,""time":"v":02023-09-22T23:14:27.658791046Z","name,"":"hostname":crucible"","level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14417 ,"time":"{2023-09-22T23:14:27.658818964Z","hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}[0] client is_active_req TRUE, promote! session 2614894d-39a8-4be9-92fd-93de9a377555"
14418 ,"v":0,"name":{"crucible","level"":msg30":"[2] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.658855835Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,}"
14419 time":"2023-09-22T23:14:27.658867958Z"Sep 22 23:14:27.658 INFO [0] downstairs client at 127.0.0.1:36461 has UUID 6f874cfa-c3be-49c7-9256-3b7afc29b96b
14420 ,{""hostname"msg:"":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14421 [1] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) WaitActive New New ds_transition to WaitActive","v":0{,"name":"crucible"","msglevel""::"30[2] Transition from New to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:14:27.658 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6f874cfa-c3be-49c7-9256-3b7afc29b96b, encrypted: true, database_read_version: 1, database_write_version: 1 }
14422 ,"time":"2023-09-22T23:14:27.658919767Z",,""time"hostname"::""2023-09-22T23:14:27.658926828Z"ip-10-150-1-74.us-west-2.compute.internal",","hostnamepid":"":4301ip-10-150-1-74.us-west-2.compute.internal}"
14423 ,"pid":4301}
14424 {{"msg":""msg":"[1] Transition from New to WaitActive","v":0,"[2] client is_active_req TRUE, promote! session fe77f777-565a-4e57-8434-1353313e888f"name":","crucible"v",":level"0:,"30name":"crucible","level":30,"time":","2023-09-22T23:14:27.658979842Z"time":","hostname":"2023-09-22T23:14:27.658983028Z","hostname":ip-10-150-1-74.us-west-2.compute.internal"","Sep 22 23:14:27.658 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 WaitActive WaitActive WaitActive
14425 pid":ip-10-150-1-74.us-west-2.compute.internal4301","pid":}4301
14426 }
144272023-09-22T23:14:27.659ZINFOcrucible: [1] client is_active_req TRUE, promote! session 2614894d-39a8-4be9-92fd-93de9a377555
14428 {"msg":"[2] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) WaitActive WaitActive New ds_transition to WaitActive","v":0,"name":"Sep 22 23:14:27.659 INFO [1] downstairs client at 127.0.0.1:48269 has UUID 4c5d3d8c-93c2-4478-a576-d240d19aa24c
14429 crucible","level":30,"time":"2023-09-22T23:14:27.659095248Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":Sep 22 23:14:27.659 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4c5d3d8c-93c2-4478-a576-d240d19aa24c, encrypted: true, database_read_version: 1, database_write_version: 1 }
14430 4301}
144312023-09-22T23:14:27.659ZINFOcrucible: [2] Transition from New to WaitActive
14432 {"msg":Sep 22 23:14:27.659 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 WaitActive WaitActive WaitActive
14433 "[2] client is_active_req TRUE, promote! session 2614894d-39a8-4be9-92fd-93de9a377555","v":0,"name":"crucible","level":30Sep 22 23:14:27.659 INFO Current flush_numbers [0..12]: [0, 0]
14434 ,"time":"2023-09-22T23:14:27.659192279Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14435 {"msg":"[0] downstairs client at 127.0.0.1:49699 has UUID 23321635-9e40-4165-b7e2-14f978a59fa9","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.659235444Z","Sep 22 23:14:27.659 INFO [2] downstairs client at 127.0.0.1:38443 has UUID 05359e3f-cf4a-4b15-afb8-9fe615a364f7
14436 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14437 {"msg":"[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 23321635-9e40-4165-b7e2-14f978a59fa9, encrypted: true, database_read_version: 1, database_write_version: 1 }"Sep 22 23:14:27.659 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 05359e3f-cf4a-4b15-afb8-9fe615a364f7, encrypted: true, database_read_version: 1, database_write_version: 1 }
14438 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.659302336Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14439 {"msg":"6478de22-510f-412e-afa6-6a9c47f1d3b8 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30Sep 22 23:14:27.659 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 WaitActive WaitActive WaitActive
14440 ,"{time":"2023-09-22T23:14:27.659347229Z","hostname":""msg"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4301}
14441 [0] downstairs client at 127.0.0.1:53663 has UUID 7300b28a-83f2-47df-a265-634df7adc55c","v":{0,"name":""cruciblemsg"":","level":30[1] downstairs client at 127.0.0.1:39986 has UUID 7423012d-517a-406f-808c-d47b1ac32a6d","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.659389568Z",","time":hostname":""2023-09-22T23:14:27.659396914Z"ip-10-150-1-74.us-west-2.compute.internal",","hostname":"pid":4301ip-10-150-1-74.us-west-2.compute.internal","}pid"
14442 :4301}
14443 {"{msg":""msg":"[0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7300b28a-83f2-47df-a265-634df7adc55c, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7423012d-517a-406f-808c-d47b1ac32a6d, encrypted: true, database_read_version: 1, database_write_version: 1 }"name":","crucible"v",:"0level",:"30name":"crucible","level":30,"time":",2023-09-22T23:14:27.65946005Z"",Sep 22 23:14:27.659 INFO Current flush_numbers [0..12]: [0, 0]
14444 "time":hostname"":"2023-09-22T23:14:27.659463286Z"ip-10-150-1-74.us-west-2.compute.internal",","hostname":"pid":4301ip-10-150-1-74.us-west-2.compute.internal"},
14445 "pid":4301}{
14446 "msg":"{f3c3fd03-7420-43e1-a477-c989612ec069 WaitActive WaitActive WaitActive","v":"0msg",":"name":"crucible","level6478de22-510f-412e-afa6-6a9c47f1d3b8 WaitActive WaitActive WaitActive"":,30"v":0,"name":"crucible","level":30Sep 22 23:14:27.659 INFO Current flush_numbers [0..12]: [0, 0]
14447 ,"time":","2023-09-22T23:14:27.65952904Z"time":","hostname":"2023-09-22T23:14:27.65953469Z","ip-10-150-1-74.us-west-2.compute.internal"hostname",":pid"":4301ip-10-150-1-74.us-west-2.compute.internal"},
14448 "pid":4301}
14449 {"{msg":""msg":"[1] downstairs client at 127.0.0.1:49337 has UUID 16a7ce45-bb2e-4212-a780-53352bc92fab","v":0[2] downstairs client at 127.0.0.1:38320 has UUID 24249434-76be-4dd4-92c1-0907ffa4ae22",",name""v":":crucible0",","namelevel":"":crucible"30,"level":30,"time":","2023-09-22T23:14:27.659596485Z"time":","hostname":2023-09-22T23:14:27.659598351Z"",Sep 22 23:14:27.659 INFO Current flush_numbers [0..12]: [0, 0]
14450 "hostname":"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.659 INFO current number of open files limit 65536 is already the maximum
14451 ,"ip-10-150-1-74.us-west-2.compute.internalpid":"4301,"pid}"
14452 :4301}
14453 {"msg{"Sep 22 23:14:27.659 INFO Opened existing region file "/tmp/downstairs-9O3BNKFH/region.json"
14454 :""msg":"Sep 22 23:14:27.659 INFO Downstairs has completed Negotiation, task: proc
14455 [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 24249434-76be-4dd4-92c1-0907ffa4ae22, encrypted: true, database_read_version: 1, database_write_version: 1 }"[1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 16a7ce45-bb2e-4212-a780-53352bc92fab, encrypted: true, database_read_version: 1, database_write_version: 1 }Sep 22 23:14:27.659 INFO Database read version 1
14456 ",,""v"v"::00,","name":"name"crucibleSep 22 23:14:27.659 INFO Database write version 1
14457 :""crucible",","level"level"::3030,"time":"2023-09-22T23:14:27.659694452Z",,""time"hostname"::""2023-09-22T23:14:27.659694239Z"ip-10-150-1-74.us-west-2.compute.internal",,"Sep 22 23:14:27.659 INFO Downstairs has completed Negotiation, task: proc
14458 pid":4301}
14459 {"msg":"f3c3fd03-7420-43e1-a477-c989612ec069 WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30"hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid":time":"43012023-09-22T23:14:27.659820448Z","}hostname":"
14460 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14461 {{""msg":"msg":"6478de22-510f-412e-afa6-6a9c47f1d3b8 WaitActive WaitActive WaitActive[2] downstairs client at 127.0.0.1:61853 has UUID 3a5e7f4a-366b-4efc-b559-fdf518e087fb"","v":,"0v",":name":"0crucible",",name":""crucible"level":,"30level":30,"time":"2023-09-22T23:14:27.659873806Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14462 ,"time":"2023-09-22T23:14:27.659872602Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
144632023-09-22T23:14:27.659ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3a5e7f4a-366b-4efc-b559-fdf518e087fb, encrypted: true, database_read_version: 1, database_write_version: 1 }
144642023-09-22T23:14:27.659ZINFOcrucible: f3c3fd03-7420-43e1-a477-c989612ec069 WaitActive WaitActive WaitActive
14465 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14466 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14467 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14468 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14469 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14470 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14471 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14472 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14473 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14474 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14475 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14476 Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14477 Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14478 Sep 22 23:14:27.660 INFO UUID: fa1fe4a9-5c4a-42db-ac50-6ea8a2af770b
14479 Sep 22 23:14:27.660 INFO Blocks per extent:5 Total Extents: 2
14480 Sep 22 23:14:27.660 INFO Crucible Version: Crucible Version: 0.0.1
14481 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14482 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14483 rustc: 1.70.0 stable x86_64-unknown-illumos
14484 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14485 Sep 22 23:14:27.660 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14486 Sep 22 23:14:27.660 INFO Using address: 127.0.0.1:54668, task: main
14487 {"msg":"Sep 22 23:14:27.660 INFO Current flush_numbers [0..12]: [0, 0]
14488 [0] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) WaitActive WaitActive WaitActive ds_transition to WaitQuorumSep 22 23:14:27.660 INFO [0] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
14489 ","v":0,"name":"crucible","level":30Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14490 Sep 22 23:14:27.660 INFO [0] Transition from WaitActive to WaitQuorum
14491 ,"time":"2023-09-22T23:14:27.660835843Z"Sep 22 23:14:27.660 WARN [0] new RM replaced this: None
14492 ,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
144932023-09-22T23:14:27.660ZINFOcrucible: [0] Transition from WaitActive to WaitQuorum
144942023-09-22T23:14:27.660ZWARNcrucible: [0] new RM replaced this: None
14495 {"msg":"[0] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.660 INFO [0] Starts reconcile loop
14496 ,"time":"2023-09-22T23:14:27.660968778Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14497 {"msg":"[1] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.660 INFO Downstairs has completed Negotiation, task: proc
14498 ,"time":"2023-09-22T23:14:27.661010469Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14499 {"msg":"[1] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO current number of open files limit 65536 is already the maximum
14500 ,"time":"2023-09-22T23:14:27.661043683Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14501 {"msg":"Sep 22 23:14:27.661 INFO [1] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
14502 Sep 22 23:14:27.661 INFO Opened existing region file "/tmp/downstairs-8awGcOg4/region.json"
14503 [1] new RM replaced this: None",Sep 22 23:14:27.661 INFO Repair listens on 127.0.0.1:0, task: repair
14504 "v":0,"name":"crucible","level":40Sep 22 23:14:27.661 INFO Database read version 1
14505 Sep 22 23:14:27.661 INFO [1] Transition from WaitActive to WaitQuorum
14506 ,"time":"2023-09-22T23:14:27.661098482Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14507 Sep 22 23:14:27.661 INFO Database write version 1
14508 Sep 22 23:14:27.661 WARN [1] new RM replaced this: None
14509 {"msg":"[1] Starts reconcile loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.661136477Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.661 INFO [1] Starts reconcile loop
14510 }
14511 {"msg":"[2] 44f3bfa1-82bf-4e34-9a0d-b9e0db308303 (bc6a14b0-7427-4a2e-9f5b-e7289d18983f) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44247, task: repair
14512 ,"time":"2023-09-22T23:14:27.661183502Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14513 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.661 INFO [2] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
14514 2023-09-22T23:14:27.661224965Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14515 {Sep 22 23:14:27.661 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44247, task: repair
14516 "Sep 22 23:14:27.661 INFO [2] Transition from WaitActive to WaitQuorum
14517 msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40Sep 22 23:14:27.661 WARN [2] new RM replaced this: None
14518 ,"time":"2023-09-22T23:14:27.661278328Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14519 {"msg":"[2] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO [2] Starts reconcile loop
14520 Sep 22 23:14:27.661 INFO listening, local_addr: 127.0.0.1:44247, task: repair
14521 ,"time":"2023-09-22T23:14:27.661311568Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
145222023-09-22T23:14:27.661ZINFOcrucible: [0] 127.0.0.1:47517 task reports connection:true
14523 {"msg":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 WaitQuorum WaitQuorum WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO [0] 127.0.0.1:36461 task reports connection:true
14524 ,"time":"2023-09-22T23:14:27.661382869Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14525 {"msg":"[0]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 WaitQuorum WaitQuorum WaitQuorum
14526 ,"time":"2023-09-22T23:14:27.661417597Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14527 {"msg":"[0]R generation: [0, 0]","v":0,"name":"Sep 22 23:14:27.661 INFO [0]R flush_numbers: [0, 0]
14528 crucible","level":30,"time":"Sep 22 23:14:27.661 INFO [0]R generation: [0, 0]
14529 2023-09-22T23:14:27.661461953Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14530 {"msg":Sep 22 23:14:27.661 INFO [0]R dirty: [false, false]
14531 "[0]R dirty: [false, false]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44247, task: repair
14532 Sep 22 23:14:27.661 INFO [1]R flush_numbers: [0, 0]
14533 ,"time":"2023-09-22T23:14:27.661503049Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14534 Sep 22 23:14:27.661 INFO [1]R generation: [0, 0]
14535 {"msg":"[1]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO Using repair address: 127.0.0.1:44247, task: main
14536 Sep 22 23:14:27.661 INFO [1]R dirty: [false, false]
14537 ,"time":"2023-09-22T23:14:27.661547214Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14538 {"msg":"Sep 22 23:14:27.661 INFO [2]R flush_numbers: [0, 0]
14539 [1]R generation: [0, 0]"Sep 22 23:14:27.661 INFO No SSL acceptor configured, task: main
14540 ,"v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO [2]R generation: [0, 0]
14541 ,"time":"2023-09-22T23:14:27.66159248Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14542 Sep 22 23:14:27.661 INFO [2]R dirty: [false, false]
14543 {"msg":"[1]R dirty: [false, false]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO Max found gen is 1
14544 ,"time":"2023-09-22T23:14:27.661627568Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14545 {"msg":"[2]R flush_numbers: [0, 0]",Sep 22 23:14:27.661 INFO Generation requested: 1 >= found:1
14546 "v":0,"name":"crucible","level":30The guest has finished waiting for activation
14547 Sep 22 23:14:27.661 INFO Next flush: 1
14548 ,"time":"2023-09-22T23:14:27.661664928Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14549 {"msg":"[2]R generation: [0, 0]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO All extents match
14550 ,"time":"2023-09-22T23:14:27.661705216Z","hostname":The guest has finished waiting for activation
14551 "Sep 22 23:14:27.661 INFO No downstairs repair required
14552 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14553 {"msg":"[2]R dirty: [false, false]","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO No initial repair work was required
14554 ,"time":"2023-09-22T23:14:27.661745572Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.661 INFO Set Downstairs and Upstairs active
14555 pid":4301}
14556 {"msg":"Max found gen is 1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.661786075Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",Sep 22 23:14:27.661 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 is now active with session: 6eb51b2f-ab23-44be-a134-dc7d267c1b0b
14557 "pid":4301}
14558 {"msg":"Generation requested: 1 >= found:1","v":0,"name":"crucible","level":30Sep 22 23:14:27.661 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 Set Active after no repair
14559 ,"time":"2023-09-22T23:14:27.6618255Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14560 {"Sep 22 23:14:27.661 INFO Notify all downstairs, region set compare is done.
14561 msg":"Next flush: 1","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.661860884Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14562 {"msg":"All extents match"Sep 22 23:14:27.661 INFO Set check for repair
14563 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.661898903Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
145642023-09-22T23:14:27.661ZINFOcrucible: No downstairs repair required
14565 {"msg":"No initial repair work was required","v":0,"name":"crucible","level":30The guest has finished waiting for activation
14566 ,"time":"2023-09-22T23:14:27.661961543Z","Sep 22 23:14:27.661 INFO [1] 127.0.0.1:48269 task reports connection:true
14567 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14568 {"msg":"Set Downstairs and Upstairs active","v":0,"name":"crucible","level"Sep 22 23:14:27.661 INFO Upstairs starts
14569 :30Sep 22 23:14:27.662 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 Active Active Active
14570 ,"time":"2023-09-22T23:14:27.662015326Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14571 {Sep 22 23:14:27.662 INFO Set check for repair
14572 "msg":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 is now active with session: bc6a14b0-7427-4a2e-9f5b-e7289d18983f","v":0,"name":"crucibleSep 22 23:14:27.662 INFO Crucible Version: BuildInfo {
14573 version: "0.0.1",
14574 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14575 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14576 git_branch: "main",
14577 rustc_semver: "1.70.0",
14578 rustc_channel: "stable",
14579 rustc_host_triple: "x86_64-unknown-illumos",
14580 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14581 cargo_triple: "x86_64-unknown-illumos",
14582 debug: true,
14583 opt_level: 0,
14584 }
14585 ","level":30,"time":"2023-09-22T23:14:27.662065939Z","hostname":"Sep 22 23:14:27.662 INFO Upstairs <-> Downstairs Message Version: 4
14586 ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.662 INFO [2] 127.0.0.1:38443 task reports connection:true
14587 ,"pid":4301}
14588 {"msg":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 Set Active after no repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.662 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 Active Active Active
14589 ,"Sep 22 23:14:27.662 INFO Crucible stats registered with UUID: 0cb33d2c-901d-4ca7-884a-5b8b12a112c2
14590 time":"2023-09-22T23:14:27.662112672Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14591 Sep 22 23:14:27.662 INFO Set check for repair
14592 {"msg":"Notify all downstairs, region set compare is done.","v":0,"name":"crucible","level":30Sep 22 23:14:27.662 INFO Crucible 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 has session id: 40298b4a-cb20-4101-85d3-564bfb0231b1
14593 ,"time":"2023-09-22T23:14:27.662159875Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
145942023-09-22T23:14:27.662ZINFOcrucible: Set check for repair
14595 {"msg":"[1] 127.0.0.1:64593 task reports connection:true","v":0,"name":"crucible","level":30Sep 22 23:14:27.662 INFO [0] received reconcile message
14596 ,"time":"2023-09-22T23:14:27.662223391Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14597 {"msg":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 Active Active Active","v":0,Sep 22 23:14:27.662 INFO [0] All repairs completed, exit
14598 "name":"crucible","level":30,"time":"2023-09-22T23:14:27.662261504Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":Sep 22 23:14:27.662 INFO [0] Starts cmd_loop
14599 4301}
14600 {"msg":"Set check for repair","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.662298418Z"{,"hostname":"ip-10-150-1-74.us-west-2.compute.internal",""pid":4301msg"}:
14601 "{"msg":"[0] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) WaitActive WaitActive WaitActive ds_transition to WaitQuorum"[2] 127.0.0.1:46072 task reports connection:true",","vv""::00,",name"":"name"crucible":,""cruciblelevel"":30,"level":30,"time":"2023-09-22T23:14:27.662345695Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14602 ,"{time":""msg":"2023-09-22T23:14:27.662348487Z","hostname":"44f3bfa1-82bf-4e34-9a0d-b9e0db308303 Active Active Active"Sep 22 23:14:27.662 INFO listening on 127.0.0.1:0, task: main
14603 ,ip-10-150-1-74.us-west-2.compute.internal""v":,0",pid"":name"4301:"crucible"},
14604 "level":30{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,","time"name"::""crucible"2023-09-22T23:14:27.66239733Z,"",level"":hostname"30:"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14605 {"msg":"Set check for repair","Sep 22 23:14:27.662 INFO listening on 127.0.0.1:0, task: main
14606 ,v"":time"0:,""name":"2023-09-22T23:14:27.662418216Z"crucible",","hostname"level":":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14607 ,"time{":""2023-09-22T23:14:27.662448997Z"msg",:""hostname":"[0] new RM replaced this: None","v":ip-10-150-1-74.us-west-2.compute.internal"0,",pid"":name4301":"}
14608 crucible","level"{Sep 22 23:14:27.662 INFO listening on 127.0.0.1:0, task: main
14609 :"40msg":"[0] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.662491224Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal",,""pid"time"::"4301}
14610 Sep 22 23:14:27.662 INFO UUID: f00f8ef1-bf2e-4a59-9818-84a4661ee77c
14611 2023-09-22T23:14:27.662501059Z"{",msg":""hostname":[0] Starts reconcile loop"","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name",":pid"":crucible"4301,"level"}:
14612 Sep 22 23:14:27.662 INFO Blocks per extent:5 Total Extents: 2
14613 30{Sep 22 23:14:27.662 INFO [0] connecting to 127.0.0.1:51538, looper: 0
14614 "msg":"[0] All repairs completed, exit",",time"":v"":0,"2023-09-22T23:14:27.662550694Z"name":,""crucible"hostname,"":"level":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14615 {"msg":","time":"2023-09-22T23:14:27.662575384Z","hostname":"[1] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum","v":ip-10-150-1-74.us-west-2.compute.internal"0,","pid"name:"4301:"}crucible
14616 ","level":{30"msg":"[0] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.662608215Z",,""hostnametime"":":"Sep 22 23:14:27.662 INFO Crucible Version: Crucible Version: 0.0.1
14617 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14618 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14619 rustc: 1.70.0 stable x86_64-unknown-illumos
14620 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14621 2023-09-22T23:14:27.662616756Z"ip-10-150-1-74.us-west-2.compute.internal,"","hostname"pid"::"4301}
14622 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14623 {{""msgmsg""::Sep 22 23:14:27.662 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14624 ""[1] Transition from WaitActive to WaitQuorum"[1] received reconcile message",,""v"v"::00,","namename":"":crucible""crucible,"",levelSep 22 23:14:27.662 INFO [1] connecting to 127.0.0.1:38499, looper: 1
14625 ""level:Sep 22 23:14:27.662 INFO Using address: 127.0.0.1:63966, task: main
14626 30":30,"time":"2023-09-22T23:14:27.662700006Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid"2023-09-22T23:14:27.662697809Z":4301,"}
14627 hostname":"{ip-10-150-1-74.us-west-2.compute.internal"","msgpid""::4301"}
14628 {"msg":"[1] new RM replaced this: None","v":0,"name":"crucible","level":40Sep 22 23:14:27.662 INFO [2] connecting to 127.0.0.1:54668, looper: 2
14629 Sep 22 23:14:27.662 INFO [1] received reconcile message
14630 ,"time":"2023-09-22T23:14:27.662830106Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14631 {"msg":"Sep 22 23:14:27.662 INFO [1] All repairs completed, exit
14632 [1] Starts reconcile loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.662 INFO [1] Starts cmd_loop
14633 Sep 22 23:14:27.662 INFO [2] received reconcile message
14634 Sep 22 23:14:27.662 INFO up_listen starts, task: up_listen
14635 Sep 22 23:14:27.662 INFO [2] All repairs completed, exit
14636 Sep 22 23:14:27.662 INFO [2] Starts cmd_loop
14637 Sep 22 23:14:27.662 INFO Wait for all three downstairs to come online
14638 ,"time":"2023-09-22T23:14:27.662880474Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14639 Sep 22 23:14:27.663 INFO Flush timeout: 0.5
14640 {"msg":"[2] 6478de22-510f-412e-afa6-6a9c47f1d3b8 (2614894d-39a8-4be9-92fd-93de9a377555) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum","v":0Sep 22 23:14:27.663 INFO Repair listens on 127.0.0.1:0, task: repair
14641 ,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.663064681Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"The guest has finished waiting for activation
14642 ,"pid":4301}
14643 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.663113964Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.663 INFO current number of open files limit 65536 is already the maximum
14644 }
14645 Sep 22 23:14:27.663 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36339, task: repair
14646 {"msg":"{[2] new RM replaced this: None","v":0,"name":"crucible"","msglevel""::40"[0] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) WaitActive WaitActive WaitActive ds_transition to WaitQuorum","v":0,"name":"crucible","level,"":time":"302023-09-22T23:14:27.663183004Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14647 {"msg":"[2] Starts reconcile loop","v":0,"name":"crucible","level,"":time"30:"Sep 22 23:14:27.663 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36339, task: repair
14648 2023-09-22T23:14:27.663199942Z","hostname":","time"ip-10-150-1-74.us-west-2.compute.internal":","pid":2023-09-22T23:14:27.663224553Z"4301,"hostname":"}
14649 ip-10-150-1-74.us-west-2.compute.internal{"msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","Sep 22 23:14:27.663 INFO Created new region file "/tmp/downstairs-vsXtRvAk/region.json"
14650 level":30Sep 22 23:14:27.663 INFO listening, local_addr: 127.0.0.1:36339, task: repair
14651 ","pid":4301}
14652 {,"time"":msg"":"2023-09-22T23:14:27.663276982Z","[0] 127.0.0.1:49699 task reports connection:true"hostname":","v":0,ip-10-150-1-74.us-west-2.compute.internal"","namepid":4301}
14653 ":"crucible","level":{30"msg":"[0] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:14:27.663317164Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301[1] All repairs completed, exit}
14654 ","time":",{"v"2023-09-22T23:14:27.663327324Z:"0,,""hostnamename"":":"crucible"","msg"level"::"ip-10-150-1-74.us-west-2.compute.internal"30,"pid":6478de22-510f-412e-afa6-6a9c47f1d3b8 WaitQuorum WaitQuorum WaitQuorum"4301,"v"}:
14655 0,"name":"crucible"Sep 22 23:14:27.663 INFO [2] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 looper connected, looper: 2
14656 {,,"""timelevel""::"30msg":"2023-09-22T23:14:27.663359456Z"[0] Starts reconcile loop",","hostnamev""::0","name":"crucible",ip-10-150-1-74.us-west-2.compute.internal""time",:""pid",:2023-09-22T23:14:27.663380304Z4301"",}"
14657 hostname":"level":30ip-10-150-1-74.us-west-2.compute.internal"{,"pid":4301}
14658 "msg":"{"msg":"[0]R flush_numbers: [0, 0]","v":0,"name":"crucible"[1] Starts cmd_loop,"",level,"":v30"":time"0:Sep 22 23:14:27.663 INFO [2] Proc runs for 127.0.0.1:54668 in state New
14659 "2023-09-22T23:14:27.663404514Z",",name""time:"":,crucible""","2023-09-22T23:14:27.663422775Zlevel""hostname",:"30:hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,""pid"pid"::43014301}
14660 ,}"
14661 time{":""2023-09-22T23:14:27.663442661Zmsg"":","{hostname[0]R generation: [0, 0]"":","v":0",ip-10-150-1-74.us-west-2.compute.internal""name,"":pid""Sep 22 23:14:27.663 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36339, task: repair
14662 :crucible4301"msg},
14663 ""{level"::""30msg":"[2] received reconcile message","v":0,"name":"[1] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum"crucible","level,"":,30v"":time":0","name":"2023-09-22T23:14:27.663489559Z"crucible",","hostname"level":,"":time":"30ip-10-150-1-74.us-west-2.compute.internal",2023-09-22T23:14:27.663499846Z""pid",:"4301hostname":}"
14664 ip-10-150-1-74.us-west-2.compute.internal","pid":{4301}
14665 "msg":"{Sep 22 23:14:27.663 INFO Using repair address: 127.0.0.1:36339, task: main
14666 "[0]R dirty: [false, false]"msg":",",v"[2] All repairs completed, exit:"0,,""v":name0",:""name":crucible"""Sep 22 23:14:27.663 INFO accepted connection from 127.0.0.1:41168, task: main
14667 time":"crucible,""2023-09-22T23:14:27.66351222Z"level,"":level"30:,30Sep 22 23:14:27.663 INFO No SSL acceptor configured, task: main
14668 "hostname":"ip-10-150-1-74.us-west-2.compute.internal",,","timetime""::"""pid":2023-09-22T23:14:27.663551622Z2023-09-22T23:14:27.663553388Z""4301,,""hostnamehostname"}:"
14669 :""ip-10-150-1-74.us-west-2.compute.internalip-10-150-1-74.us-west-2.compute.internal"",,""pidpid""::43014301{}}
14670 
14671 {"msg{""":msgmsg""::"""[2] Starts cmd_loop"[1]R flush_numbers: [0, 0]",",v""[1] Transition from WaitActive to WaitQuorum":v"0:,0,,"""namename""::""v"cruciblecrucible"",:",0"levellevel""::3030,"name":"crucible","level":30Sep 22 23:14:27.663 INFO accepted connection from 127.0.0.1:53809, task: main
14672 ,,""timetime""::"",2023-09-22T23:14:27.663610259Z2023-09-22T23:14:27.663610163Z""",,"time"hostname""hostname:"":":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.663614674Z,ip-10-150-1-74.us-west-2.compute.internal""pid,"":4301,"}
14673 hostname":""pidip-10-150-1-74.us-west-2.compute.internal"",:"4301pid":}4301
14674 }
14675 {"msg":"{[1]R generation: [0, 0]","v":0","msg"name"::""crucible"[1] new RM replaced this: None",",level"":v"30:0,"name":"crucible","level":40,"time":"2023-09-22T23:14:27.663678835Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14676 ,"time":"2023-09-22T23:14:27.66368312Z"{,"hostname":""ip-10-150-1-74.us-west-2.compute.internal","pid":4301msg":"}
14677 [1]R dirty: [false, false]","v":{0,"name":""msg":"crucible"[1] Starts reconcile loop",",level""v"::300,"name":"crucible","level":30,"time":","2023-09-22T23:14:27.66372369Z"time":","hostname":"2023-09-22T23:14:27.663726236Z","hostname"ip-10-150-1-74.us-west-2.compute.internal":","pid":4301ip-10-150-1-74.us-west-2.compute.internal"},"
14678 pid":4301}
14679 {"msg":"[2]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30{"msg":","time":"2023-09-22T23:14:27.663755066Z","[2] f3c3fd03-7420-43e1-a477-c989612ec069 (fe77f777-565a-4e57-8434-1353313e888f) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum"hostname":","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name",":"pid":crucible"4301,"}
14680 level":30{"msg":"[2]R generation: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.663782161Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"":,""pid":4301}
14681 2023-09-22T23:14:27.663778084Z"{,""hostnamemsg":"":"[2]R dirty: [false, false]","v":0,"ip-10-150-1-74.us-west-2.compute.internal"name":,""crucible"pid,""level"::304301}
14682 ,"time":"2023-09-22T23:14:27.663811524Z","hostname"{:"ip-10-150-1-74.us-west-2.compute.internal","pid":4301"}
14683 msg":"{"msg":"[2] Transition from WaitActive to WaitQuorum"Max found gen is 1",","v":v"0:,0",name":""crucible"name",":level"":30crucible","level":30,"time":"2023-09-22T23:14:27.663842118Z","hostname":"Sep 22 23:14:27.663 INFO accepted connection from 127.0.0.1:51241, task: main
14684 ip-10-150-1-74.us-west-2.compute.internal",,""timepid""::4301"}
14685 2023-09-22T23:14:27.663844931Z","{hostname":""msgip-10-150-1-74.us-west-2.compute.internal":"","pid":Generation requested: 1 >= found:14301","}v"
14686 {"msg":"[2] new RM replaced this: None","v":0,"name":"crucible","level":40:0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.663984039Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14687 ,"time":"{"2023-09-22T23:14:27.663994283Z"msg":","[2] Starts reconcile loop"hostname":","v":0,"name":"ip-10-150-1-74.us-west-2.compute.internalcrucible"",",level":"30pid":4301}
14688 ,"time":"2023-09-22T23:14:27.664025309Z","{hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":"4301msg":}"
14689 Next flush: 1"{,"v":0,"name""msg":":"crucible"[0] 127.0.0.1:53663 task reports connection:true",","levelv":"0:,"30name":"crucible","level":30,"time":"2023-09-22T23:14:27.664064987Z",",hostname":""time":"ip-10-150-1-74.us-west-2.compute.internal","pid":43012023-09-22T23:14:27.664063158Z"}
14690 ,"hostname":"{"msg":"ip-10-150-1-74.us-west-2.compute.internal","pid":f3c3fd03-7420-43e1-a477-c989612ec069 WaitQuorum WaitQuorum WaitQuorum"4301,"v":0},"
14691 name":"crucible","level":30{"msg":"All extents match","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.664100369Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14692 ,"time":"{2023-09-22T23:14:27.664109623Z",""hostnamemsg""::""[0]R flush_numbers: [0, 0]"ip-10-150-1-74.us-west-2.compute.internal",,""v":pid":04301,"name":"}crucible"
14693 ,"level":30{"msg":"No downstairs repair required","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.664148387Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14694 Sep 22 23:14:27.664 INFO [0] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 looper connected, looper: 0
14695 {"msg":"[0]R generation: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.664159204Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"time",":"pid":43012023-09-22T23:14:27.664193186Z",}"
14696 hostname":"ip-10-150-1-74.us-west-2.compute.internal","{pid":4301}
14697 Sep 22 23:14:27.664 INFO [0] Proc runs for 127.0.0.1:51538 in state New
14698 "{msg":""msg":"No initial repair work was required"[0]R dirty: [false, false]",","v"v:"0:,"0name",":name":"Sep 22 23:14:27.664 INFO Upstairs starts
14699 crucible"","crucible"level",:"30level":30,"time":"2023-09-22T23:14:27.664257297Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14700 ,"time":"Sep 22 23:14:27.664 INFO Crucible Version: BuildInfo {
14701 version: "0.0.1",
14702 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
14703 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
14704 git_branch: "main",
14705 rustc_semver: "1.70.0",
14706 rustc_channel: "stable",
14707 rustc_host_triple: "x86_64-unknown-illumos",
14708 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
14709 cargo_triple: "x86_64-unknown-illumos",
14710 debug: true,
14711 opt_level: 0,
14712 }
14713 {2023-09-22T23:14:27.664258694Z""msg",":"hostname":"[1]R flush_numbers: [0, 0]","ip-10-150-1-74.us-west-2.compute.internal"v",:"0pid",Sep 22 23:14:27.664 INFO Upstairs <-> Downstairs Message Version: 4
14714 :"4301name":"}crucible"
14715 ,"level":30{"msg":"Sep 22 23:14:27.664 INFO [1] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 looper connected, looper: 1
14716 Set Downstairs and Upstairs active","v":0,,""time":name"":"crucible2023-09-22T23:14:27.66431883Z"",,""level"Sep 22 23:14:27.664 INFO Crucible stats registered with UUID: 6e979958-69ff-4fc0-a35a-d73365a539fc
14717 hostname"::"30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14718 {,"time":""msg":2023-09-22T23:14:27.664348283Z"","Sep 22 23:14:27.664 INFO Crucible 6e979958-69ff-4fc0-a35a-d73365a539fc has session id: 5b9ef146-85d4-44fa-b242-97d4416ee69a
14719 hostname":"Sep 22 23:14:27.664 INFO [1] Proc runs for 127.0.0.1:38499 in state New
14720 ip-10-150-1-74.us-west-2.compute.internal","[1]R generation: [0, 0]pid"":4301},
14721 "v":0,"{name":"crucible",""msg"level"::"306478de22-510f-412e-afa6-6a9c47f1d3b8 is now active with session: 2614894d-39a8-4be9-92fd-93de9a377555","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.664394724Z","hostname":,""time":"ip-10-150-1-74.us-west-2.compute.internal"2023-09-22T23:14:27.664403891Z",","pid"hostname"::4301"}
14722 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14723 {{"msg":""msg":"[1]R dirty: [false, false]","v":06478de22-510f-412e-afa6-6a9c47f1d3b8 Set Active after no repair,""name":","crucible"v",:"0level",:"30name":"crucible","level":30,"time",:""time":2023-09-22T23:14:27.664458488Z"","hostname"2023-09-22T23:14:27.664455612Z":","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":ip-10-150-1-74.us-west-2.compute.internal"4301,"pid}"
14724 :4301}
14725 {{"msg":""[2]R flush_numbers: [0, 0]msg"":","v":0,"Notify all downstairs, region set compare is done.name"":","crucible"v",:"0level",:"30name":"crucible","level":30,"time":",2023-09-22T23:14:27.664516162Z""time",:Sep 22 23:14:27.664 INFO listening on 127.0.0.1:0, task: main
14726 ""hostname":"2023-09-22T23:14:27.66451958Z","ip-10-150-1-74.us-west-2.compute.internal"hostname":","pid":4301ip-10-150-1-74.us-west-2.compute.internal","pid}"
14727 :4301}
14728 {"{msg":"[2]R generation: [0, 0]"",msg"":v"":0Set check for repair",",name"":"v"crucible":,0","level"name"::30"crucible","level":30Sep 22 23:14:27.664 INFO listening on 127.0.0.1:0, task: main
14729 ,,""time":"time":"2023-09-22T23:14:27.664593054Z"2023-09-22T23:14:27.664590144Z","hostname":","hostnameip-10-150-1-74.us-west-2.compute.internal"",:""pid":4301ip-10-150-1-74.us-west-2.compute.internal",}"
14730 pid":4301}{
14731 "msg":"[1] 127.0.0.1:39986 task reports connection:true"{,"v":0,"name":""cruciblemsg"":,""level":30[2]R dirty: [false, false]Sep 22 23:14:27.664 INFO listening on 127.0.0.1:0, task: main
14732 ","v":0,"name":"crucible",","time":level"":2023-09-22T23:14:27.664659907Z"30,"hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14733 {"msg":","time":"6478de22-510f-412e-afa6-6a9c47f1d3b8 Active Active Active","2023-09-22T23:14:27.664675877Z"v":,0","hostname":name":""crucible","level"ip-10-150-1-74.us-west-2.compute.internal":,"30pid":4301}
14734 {"msg":","Max found gen is 1time":"","2023-09-22T23:14:27.664707797Z"v"Sep 22 23:14:27.664 INFO [0] connecting to 127.0.0.1:42286, looper: 0
14735 ,:"0hostname",:""name":"crucible"ip-10-150-1-74.us-west-2.compute.internal",",level"":pid30":4301}
14736 {"msg":"Set check for repair","v":0,"name":"crucible","level",:"30time":"2023-09-22T23:14:27.664745288Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14737 Sep 22 23:14:27.664 INFO Connection request from 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 with version 4, task: proc
14738 {,""time"msg:"":"2023-09-22T23:14:27.664762557Z"Generation requested: 1 >= found:1,""hostname":,""v":0,"ip-10-150-1-74.us-west-2.compute.internalname"":,""pid":crucible"4301,"level"}:
14739 Sep 22 23:14:27.664 INFO upstairs UpstairsConnection { upstairs_id: 0cb33d2c-901d-4ca7-884a-5b8b12a112c2, session_id: 7c8684a1-73e7-46fa-b1fc-40fcde8c5854, gen: 1 } connected, version 4, task: proc
14740 30{"msg":"[2] 127.0.0.1:38320 task reports connection:true","v":0,"name":"crucible","level":,30"time":"2023-09-22T23:14:27.664807121Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14741 {"msg":"Next flush: 1","v":0,"name",":"time"crucible:"","level2023-09-22T23:14:27.664820657Z"":,"30hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.664 INFO [1] connecting to 127.0.0.1:52962, looper: 1
14742 pid":4301}
14743 ,"time":"{2023-09-22T23:14:27.66485084Z","hostname""msg:":""ip-10-150-1-74.us-west-2.compute.internal"6478de22-510f-412e-afa6-6a9c47f1d3b8 Active Active Active,""pid":,"4301v":0},
14744 "name":"crucible","level"{:30"msg":"All extents match","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.664893057Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid,"":time4301":"}
14745 Sep 22 23:14:27.664 INFO Connection request from 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 with version 4, task: proc
14746 2023-09-22T23:14:27.664901902Z"{"msg":"Set check for repair",","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14747 Sep 22 23:14:27.664 INFO upstairs UpstairsConnection { upstairs_id: 0cb33d2c-901d-4ca7-884a-5b8b12a112c2, session_id: 7c8684a1-73e7-46fa-b1fc-40fcde8c5854, gen: 1 } connected, version 4, task: proc
14748 {"vmsg":"":0No downstairs repair required",","name"v"::"0crucible,"",name":""crucible"level",:"30level":30,"time":"2023-09-22T23:14:27.66497969Z","hostname":","time":"ip-10-150-1-74.us-west-2.compute.internal","pid2023-09-22T23:14:27.664981011Z"":4301,"hostname":}"
14749 ip-10-150-1-74.us-west-2.compute.internal","pid":4301{}
14750 "msg":"[0] received reconcile message"{,"v":0,"name"":"msg":crucible"","level"No initial repair work was required":30,"v":0,"name":"crucible","level":30Sep 22 23:14:27.665 INFO [2] connecting to 127.0.0.1:63966, looper: 2
14751 ,,Sep 22 23:14:27.665 INFO Connection request from 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 with version 4, task: proc
14752 "time"":"time":"2023-09-22T23:14:27.665035625Z"2023-09-22T23:14:27.665030198Z",","hostname"hostname:"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal",,""pid"pid"::43014301}}Sep 22 23:14:27.665 INFO upstairs UpstairsConnection { upstairs_id: 0cb33d2c-901d-4ca7-884a-5b8b12a112c2, session_id: 7c8684a1-73e7-46fa-b1fc-40fcde8c5854, gen: 1 } connected, version 4, task: proc
14753 
14754 
14755 {"{msg":""msgSet Downstairs and Upstairs active"":","v":0[0] All repairs completed, exit",",name":""crucible"v",:"0level",:"30name":"crucible","level":30,,""time"time":":"2023-09-22T23:14:27.665105579Z"2023-09-22T23:14:27.665108854Z",","hostname":hostname"":"ip-10-150-1-74.us-west-2.compute.internal"ip-10-150-1-74.us-west-2.compute.internal,"","pid"pid"::43014301}}
14756 
14757 {"{msg":""msg":"[0] Starts cmd_loop","v":0,"f3c3fd03-7420-43e1-a477-c989612ec069 is now active with session: fe77f777-565a-4e57-8434-1353313e888f"name":"crucible",,""v"level:"0:,30"name":"crucible","level":30,Sep 22 23:14:27.665 INFO up_listen starts, task: up_listen
14758 ","time"time"::""2023-09-22T23:14:27.665171897Z"2023-09-22T23:14:27.665167893Z",","hostname":"hostname":"ip-10-150-1-74.us-west-2.compute.internal","ip-10-150-1-74.us-west-2.compute.internalpid"":,"4301pid":4301}
14759 Sep 22 23:14:27.665 INFO Wait for all three downstairs to come online
14760 }
14761 {{"msg":""msg":"f3c3fd03-7420-43e1-a477-c989612ec069 Set Active after no repair"[1] received reconcile message",",v""Sep 22 23:14:27.665 INFO Flush timeout: 0.5
14762 :0,"vname""::"0crucible",","name":"level"crucible":,"30level":30,"time":"2023-09-22T23:14:27.665252337Z","hostname":","timeip-10-150-1-74.us-west-2.compute.internal"",":pid":"4301}
14763 2023-09-22T23:14:27.665251324Z","{hostname":""msg":"ip-10-150-1-74.us-west-2.compute.internal"[1] All repairs completed, exit",","pid"v":0:,"4301name":"crucible","}level":
14764 30{"msg":"Notify all downstairs, region set compare is done.","v":0,",name"":"time":"crucible",2023-09-22T23:14:27.665297265Z"","level"hostname":":30ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14765 {"msg":"[1] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":","time":"2023-09-22T23:14:27.665318577Z"2023-09-22T23:14:27.665329829Z","hostname":","ip-10-150-1-74.us-west-2.compute.internal"hostname",":pid":"4301}
14766 ip-10-150-1-74.us-west-2.compute.internal",{"pid"":msg":"4301[2] received reconcile message","v"}:
14767 0,"name":"crucible","level":30{"msg":","time":"Set check for repair"2023-09-22T23:14:27.665370684Z",","v"hostname":":0ip-10-150-1-74.us-west-2.compute.internal",,""name":"pid":crucible"4301,"}level"
14768 :30{"msg":"[2] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":","2023-09-22T23:14:27.665397252Z"time":","hostname"2023-09-22T23:14:27.665407229Z:"","hostname"ip-10-150-1-74.us-west-2.compute.internal":,""pid":4301ip-10-150-1-74.us-west-2.compute.internal","pid"}:
14769 4301}
14770 {{"Sep 22 23:14:27.665 INFO [2] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) New New New ds_transition to WaitActive
14771 msg"":"msg":"[2] Starts cmd_loop"[1] 127.0.0.1:49337 task reports connection:true",",v"":v"0:,"0name",:""name"crucible"Sep 22 23:14:27.665 INFO [2] Transition from New to WaitActive
14772 :"Sep 22 23:14:27.665 INFO [0] 6e979958-69ff-4fc0-a35a-d73365a539fc looper connected, looper: 0
14773 ,"crucible"level",:"30level":30,"time":"2023-09-22T23:14:27.66548Z","hostname":","time":ip-10-150-1-74.us-west-2.compute.internal"","pid"2023-09-22T23:14:27.665478704Z":4301,"hostname}"
14774 :"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.665 INFO [0] Proc runs for 127.0.0.1:42286 in state New
14775 {pid":"4301msg":"}
14776 f3c3fd03-7420-43e1-a477-c989612ec069 Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.665535593Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14777 {"msg":"Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.665 INFO [0] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) New New WaitActive ds_transition to WaitActive
14778 ,"time":"2023-09-22T23:14:27.665575592Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14779 {"Sep 22 23:14:27.665 INFO [0] Transition from New to WaitActive
14780 msg":"[2] 127.0.0.1:61853 task reports connection:true","v":0,"name":"crucible","level":Sep 22 23:14:27.665 INFO [1] 6e979958-69ff-4fc0-a35a-d73365a539fc looper connected, looper: 1
14781 30,"time":"2023-09-22T23:14:27.665635173Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14782 {"msg":"Sep 22 23:14:27.665 INFO [1] Proc runs for 127.0.0.1:52962 in state New
14783 f3c3fd03-7420-43e1-a477-c989612ec069 Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.665684497Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14784 Sep 22 23:14:27.665 INFO [1] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) WaitActive New WaitActive ds_transition to WaitActive
14785 {"msg":"Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.665 INFO [1] Transition from New to WaitActive
14786 ,"time":"2023-09-22T23:14:27.665723244Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14787 {Sep 22 23:14:27.665 INFO [2] 6e979958-69ff-4fc0-a35a-d73365a539fc looper connected, looper: 2
14788 "msg":"[0] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.665770524Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14789 {"msg":"[0] All repairs completed, exit","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.665823274Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.665 INFO [2] Proc runs for 127.0.0.1:63966 in state New
14790 pid":4301}
14791 {"msg":"[0] Starts cmd_loop","v":0,"name":"crucible","level":30The guest has requested activation
14792 ,"time":"2023-09-22T23:14:27.665877784Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
147932023-09-22T23:14:27.665ZINFOcrucible: [1] received reconcile message
14794 Sep 22 23:14:27.665 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 active request set
147952023-09-22T23:14:27.665ZINFOcrucible: [1] All repairs completed, exit
147962023-09-22T23:14:27.665ZINFOcrucible: [1] Starts cmd_loop
14797 {"msg":"[2] received reconcile message","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.666047323Z","Sep 22 23:14:27.666 INFO [0] received activate with gen 1
14798 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
14799 {"msg":"[2] All repairs completed, exit","v":Sep 22 23:14:27.666 INFO [0] client got ds_active_rx, promote! session 7c8684a1-73e7-46fa-b1fc-40fcde8c5854
14800 0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.666094111Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.666 INFO accepted connection from 127.0.0.1:44001, task: main
14801 ,"pid":4301}
148022023-09-22T23:14:27.666ZINFOcrucible: [2] Starts cmd_loop
14803 Sep 22 23:14:27.666 INFO [1] received activate with gen 1
14804 Sep 22 23:14:27.666 INFO accepted connection from 127.0.0.1:36027, task: main
14805 Sep 22 23:14:27.666 INFO [1] client got ds_active_rx, promote! session 7c8684a1-73e7-46fa-b1fc-40fcde8c5854
14806 Sep 22 23:14:27.666 INFO [2] received activate with gen 1
14807 Sep 22 23:14:27.666 INFO [2] client got ds_active_rx, promote! session 7c8684a1-73e7-46fa-b1fc-40fcde8c5854
14808 Sep 22 23:14:27.666 INFO accepted connection from 127.0.0.1:57952, task: main
14809 Sep 22 23:14:27.666 INFO Connection request from 6e979958-69ff-4fc0-a35a-d73365a539fc with version 4, task: proc
14810 Sep 22 23:14:27.666 INFO [2] downstairs client at 127.0.0.1:54668 has UUID fa1fe4a9-5c4a-42db-ac50-6ea8a2af770b
14811 Sep 22 23:14:27.666 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fa1fe4a9-5c4a-42db-ac50-6ea8a2af770b, encrypted: true, database_read_version: 1, database_write_version: 1 }
14812 Sep 22 23:14:27.666 INFO upstairs UpstairsConnection { upstairs_id: 6e979958-69ff-4fc0-a35a-d73365a539fc, session_id: 9abbc7cc-1c99-4a3a-9498-4633122695a8, gen: 1 } connected, version 4, task: proc
14813 Sep 22 23:14:27.666 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 WaitActive WaitActive WaitActive
14814 Sep 22 23:14:27.666 INFO [0] downstairs client at 127.0.0.1:51538 has UUID 5ebd7851-0a07-43d6-ba5e-89e1c5b26e32
14815 test test::integration_test_guest_zero_length_io ... ok
14816 Sep 22 23:14:27.666 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5ebd7851-0a07-43d6-ba5e-89e1c5b26e32, encrypted: true, database_read_version: 1, database_write_version: 1 }
14817 Sep 22 23:14:27.666 INFO Connection request from 6e979958-69ff-4fc0-a35a-d73365a539fc with version 4, task: proc
14818 Sep 22 23:14:27.667 INFO upstairs UpstairsConnection { upstairs_id: 6e979958-69ff-4fc0-a35a-d73365a539fc, session_id: 9abbc7cc-1c99-4a3a-9498-4633122695a8, gen: 1 } connected, version 4, task: proc
14819 Sep 22 23:14:27.667 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 WaitActive WaitActive WaitActive
14820 Sep 22 23:14:27.667 INFO Connection request from 6e979958-69ff-4fc0-a35a-d73365a539fc with version 4, task: proc
14821 Sep 22 23:14:27.667 INFO upstairs UpstairsConnection { upstairs_id: 6e979958-69ff-4fc0-a35a-d73365a539fc, session_id: 9abbc7cc-1c99-4a3a-9498-4633122695a8, gen: 1 } connected, version 4, task: proc
14822 Sep 22 23:14:27.667 INFO [1] downstairs client at 127.0.0.1:38499 has UUID 189bf2c8-c3ff-478c-9ad5-fadafbe3e365
14823 Sep 22 23:14:27.667 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 189bf2c8-c3ff-478c-9ad5-fadafbe3e365, encrypted: true, database_read_version: 1, database_write_version: 1 }
14824 Sep 22 23:14:27.667 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 WaitActive WaitActive WaitActive
14825 Sep 22 23:14:27.667 INFO [0] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) New New New ds_transition to WaitActive
14826 Sep 22 23:14:27.667 INFO [0] Transition from New to WaitActive
14827 Sep 22 23:14:27.667 INFO current number of open files limit 65536 is already the maximum
14828 Sep 22 23:14:27.667 INFO Current flush_numbers [0..12]: [0, 0]
14829 Sep 22 23:14:27.667 INFO [1] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) WaitActive New New ds_transition to WaitActive
14830 Sep 22 23:14:27.667 INFO [1] Transition from New to WaitActive
14831 Sep 22 23:14:27.667 INFO Created new region file "/tmp/downstairs-n8rm3T5A/region.json"
14832 Sep 22 23:14:27.667 INFO [2] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) WaitActive WaitActive New ds_transition to WaitActive
14833 Sep 22 23:14:27.667 INFO [2] Transition from New to WaitActive
14834 Sep 22 23:14:27.667 INFO Downstairs has completed Negotiation, task: proc
14835 The guest has requested activation
14836 Sep 22 23:14:27.667 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc active request set
14837 Sep 22 23:14:27.667 INFO [0] received activate with gen 1
14838 Sep 22 23:14:27.667 INFO Current flush_numbers [0..12]: [0, 0]
14839 Sep 22 23:14:27.668 INFO [0] client got ds_active_rx, promote! session 9abbc7cc-1c99-4a3a-9498-4633122695a8
14840 test test::integration_test_io_span_out_of_range ... ok
14841 Sep 22 23:14:27.668 INFO [1] received activate with gen 1
14842 test test::integration_test_io_out_of_range ... ok
14843 Sep 22 23:14:27.668 INFO [1] client got ds_active_rx, promote! session 9abbc7cc-1c99-4a3a-9498-4633122695a8
14844 Sep 22 23:14:27.668 INFO [2] received activate with gen 1
14845 Sep 22 23:14:27.668 INFO [2] client got ds_active_rx, promote! session 9abbc7cc-1c99-4a3a-9498-4633122695a8
14846 Sep 22 23:14:27.668 INFO Downstairs has completed Negotiation, task: proc
14847 Sep 22 23:14:27.668 INFO current number of open files limit 65536 is already the maximum
14848 Sep 22 23:14:27.668 INFO current number of open files limit 65536 is already the maximum
14849 Sep 22 23:14:27.668 INFO Created new region file "/tmp/downstairs-IYCeuQqy/region.json"
14850 Sep 22 23:14:27.668 INFO Current flush_numbers [0..12]: [0, 0]
14851 Sep 22 23:14:27.668 INFO Created new region file "/tmp/downstairs-IS2nyztY/region.json"
14852 Sep 22 23:14:27.668 INFO [0] downstairs client at 127.0.0.1:42286 has UUID 892b80dc-b0fe-4fd6-9a73-88aa72d6d43b
14853 Sep 22 23:14:27.668 INFO Downstairs has completed Negotiation, task: proc
14854 Sep 22 23:14:27.668 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 892b80dc-b0fe-4fd6-9a73-88aa72d6d43b, encrypted: true, database_read_version: 1, database_write_version: 1 }
14855 Sep 22 23:14:27.669 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc WaitActive WaitActive WaitActive
14856 Sep 22 23:14:27.669 INFO [1] downstairs client at 127.0.0.1:52962 has UUID 41cf1bdf-fb18-466b-8427-1ff781e7de45
14857 Sep 22 23:14:27.669 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 41cf1bdf-fb18-466b-8427-1ff781e7de45, encrypted: true, database_read_version: 1, database_write_version: 1 }
14858 Sep 22 23:14:27.669 INFO [2] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
14859 Sep 22 23:14:27.669 INFO [2] Transition from WaitActive to WaitQuorum
14860 Sep 22 23:14:27.669 WARN [2] new RM replaced this: None
14861 Sep 22 23:14:27.669 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc WaitActive WaitActive WaitActive
14862 Sep 22 23:14:27.669 INFO [2] Starts reconcile loop
14863 Sep 22 23:14:27.669 INFO [2] downstairs client at 127.0.0.1:63966 has UUID f00f8ef1-bf2e-4a59-9818-84a4661ee77c
14864 Sep 22 23:14:27.669 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f00f8ef1-bf2e-4a59-9818-84a4661ee77c, encrypted: true, database_read_version: 1, database_write_version: 1 }
14865 Sep 22 23:14:27.669 INFO [0] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) WaitActive WaitActive WaitQuorum ds_transition to WaitQuorum
14866 Sep 22 23:14:27.669 INFO [0] Transition from WaitActive to WaitQuorum
14867 Sep 22 23:14:27.669 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc WaitActive WaitActive WaitActive
14868 Sep 22 23:14:27.669 WARN [0] new RM replaced this: None
14869 Sep 22 23:14:27.669 INFO [0] Starts reconcile loop
14870 Sep 22 23:14:27.669 INFO [1] 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 (7c8684a1-73e7-46fa-b1fc-40fcde8c5854) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
14871 Sep 22 23:14:27.669 INFO [1] Transition from WaitActive to WaitQuorum
14872 Sep 22 23:14:27.669 WARN [1] new RM replaced this: None
14873 Sep 22 23:14:27.669 INFO [1] Starts reconcile loop
14874 Sep 22 23:14:27.669 INFO Current flush_numbers [0..12]: [0, 0]
14875 Sep 22 23:14:27.669 INFO [2] 127.0.0.1:54668 task reports connection:true
14876 Sep 22 23:14:27.669 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 WaitQuorum WaitQuorum WaitQuorum
14877 Sep 22 23:14:27.669 INFO [0]R flush_numbers: [0, 0]
14878 Sep 22 23:14:27.669 INFO [0]R generation: [0, 0]
14879 Sep 22 23:14:27.669 INFO [0]R dirty: [false, false]
14880 Sep 22 23:14:27.669 INFO [1]R flush_numbers: [0, 0]
14881 Sep 22 23:14:27.669 INFO [1]R generation: [0, 0]
14882 Sep 22 23:14:27.669 INFO Downstairs has completed Negotiation, task: proc
14883 Sep 22 23:14:27.669 INFO [1]R dirty: [false, false]
14884 Sep 22 23:14:27.669 INFO [2]R flush_numbers: [0, 0]
14885 Sep 22 23:14:27.669 INFO [2]R generation: [0, 0]
14886 Sep 22 23:14:27.669 INFO [2]R dirty: [false, false]
14887 Sep 22 23:14:27.669 INFO Max found gen is 1
14888 Sep 22 23:14:27.669 INFO Generation requested: 1 >= found:1
14889 Sep 22 23:14:27.669 INFO Next flush: 1
14890 Sep 22 23:14:27.669 INFO All extents match
14891 Sep 22 23:14:27.669 INFO current number of open files limit 65536 is already the maximum
14892 Sep 22 23:14:27.669 INFO No downstairs repair required
14893 Sep 22 23:14:27.669 INFO No initial repair work was required
14894 Sep 22 23:14:27.669 INFO Opened existing region file "/tmp/downstairs-vsXtRvAk/region.json"
14895 Sep 22 23:14:27.669 INFO Set Downstairs and Upstairs active
14896 Sep 22 23:14:27.669 INFO Database read version 1
14897 Sep 22 23:14:27.669 INFO Current flush_numbers [0..12]: [0, 0]
14898 Sep 22 23:14:27.669 INFO Database write version 1
14899 Sep 22 23:14:27.669 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 is now active with session: 7c8684a1-73e7-46fa-b1fc-40fcde8c5854
14900 Sep 22 23:14:27.670 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 Set Active after no repair
14901 Sep 22 23:14:27.670 INFO Notify all downstairs, region set compare is done.
14902 Sep 22 23:14:27.670 INFO Set check for repair
14903 Sep 22 23:14:27.670 INFO [0] 127.0.0.1:51538 task reports connection:true
14904 Sep 22 23:14:27.670 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 Active Active Active
14905 Sep 22 23:14:27.670 INFO Downstairs has completed Negotiation, task: proc
14906 Sep 22 23:14:27.670 INFO Set check for repair
14907 Sep 22 23:14:27.670 INFO [1] 127.0.0.1:38499 task reports connection:true
14908 Sep 22 23:14:27.670 INFO 0cb33d2c-901d-4ca7-884a-5b8b12a112c2 Active Active Active
14909 Sep 22 23:14:27.670 INFO Set check for repair
14910 Sep 22 23:14:27.670 INFO [0] received reconcile message
14911 Sep 22 23:14:27.670 INFO [0] All repairs completed, exit
14912 Sep 22 23:14:27.670 INFO [0] Starts cmd_loop
14913 Sep 22 23:14:27.670 INFO Current flush_numbers [0..12]: [0, 0]
14914 Sep 22 23:14:27.670 INFO [1] received reconcile message
14915 Sep 22 23:14:27.670 INFO [1] All repairs completed, exit
14916 Sep 22 23:14:27.670 INFO [1] Starts cmd_loop
14917 Sep 22 23:14:27.670 INFO [2] received reconcile message
14918 Sep 22 23:14:27.670 INFO [2] All repairs completed, exit
14919 Sep 22 23:14:27.670 INFO [2] Starts cmd_loop
14920 Sep 22 23:14:27.670 INFO Downstairs has completed Negotiation, task: proc
14921 The guest has finished waiting for activation
14922 Sep 22 23:14:27.670 INFO [0] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
14923 Sep 22 23:14:27.670 INFO [0] Transition from WaitActive to WaitQuorum
14924 Sep 22 23:14:27.670 WARN [0] new RM replaced this: None
14925 Sep 22 23:14:27.670 INFO [0] Starts reconcile loop
14926 Sep 22 23:14:27.670 DEBG IO Read 1000 has deps []
14927 Sep 22 23:14:27.670 INFO [1] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
14928 Sep 22 23:14:27.670 INFO [1] Transition from WaitActive to WaitQuorum
14929 Sep 22 23:14:27.670 WARN [1] new RM replaced this: None
14930 Sep 22 23:14:27.670 INFO [1] Starts reconcile loop
14931 Sep 22 23:14:27.670 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
14932 Sep 22 23:14:27.671 INFO [2] 6e979958-69ff-4fc0-a35a-d73365a539fc (9abbc7cc-1c99-4a3a-9498-4633122695a8) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
14933 Sep 22 23:14:27.671 INFO [2] Transition from WaitActive to WaitQuorum
14934 Sep 22 23:14:27.671 WARN [2] new RM replaced this: None
14935 Sep 22 23:14:27.671 INFO [2] Starts reconcile loop
14936 Sep 22 23:14:27.671 INFO [0] 127.0.0.1:42286 task reports connection:true
14937 Sep 22 23:14:27.671 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc WaitQuorum WaitQuorum WaitQuorum
14938 Sep 22 23:14:27.671 INFO [0]R flush_numbers: [0, 0]
14939 Sep 22 23:14:27.671 INFO [0]R generation: [0, 0]
14940 Sep 22 23:14:27.671 INFO [0]R dirty: [false, false]
14941 Sep 22 23:14:27.671 INFO [1]R flush_numbers: [0, 0]
14942 Sep 22 23:14:27.671 INFO [1]R generation: [0, 0]
14943 Sep 22 23:14:27.671 INFO [1]R dirty: [false, false]
14944 Sep 22 23:14:27.671 INFO [2]R flush_numbers: [0, 0]
14945 Sep 22 23:14:27.671 INFO [2]R generation: [0, 0]
14946 Sep 22 23:14:27.671 INFO [2]R dirty: [false, false]
14947 Sep 22 23:14:27.671 INFO Max found gen is 1
14948 Sep 22 23:14:27.671 INFO Generation requested: 1 >= found:1
14949 Sep 22 23:14:27.671 INFO UUID: 9de06cf3-fc58-4960-af68-27205e75c2e5
14950 Sep 22 23:14:27.671 INFO Next flush: 1
14951 Sep 22 23:14:27.671 INFO Blocks per extent:5 Total Extents: 2
14952 Sep 22 23:14:27.671 INFO All extents match
14953 Sep 22 23:14:27.671 DEBG Read :1000 deps:[] res:true
14954 Sep 22 23:14:27.671 INFO No downstairs repair required
14955 Sep 22 23:14:27.671 INFO No initial repair work was required
14956 Sep 22 23:14:27.671 INFO Set Downstairs and Upstairs active
14957 Sep 22 23:14:27.671 INFO Crucible Version: Crucible Version: 0.0.1
14958 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
14959 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
14960 rustc: 1.70.0 stable x86_64-unknown-illumos
14961 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
14962 Sep 22 23:14:27.671 INFO Upstairs <-> Downstairs Message Version: 4, task: main
14963 Sep 22 23:14:27.671 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc is now active with session: 9abbc7cc-1c99-4a3a-9498-4633122695a8
14964 Sep 22 23:14:27.671 INFO Using address: 127.0.0.1:49734, task: main
14965 Sep 22 23:14:27.671 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc Set Active after no repair
14966 Sep 22 23:14:27.671 INFO Notify all downstairs, region set compare is done.
14967 Sep 22 23:14:27.671 INFO Set check for repair
14968 Sep 22 23:14:27.671 DEBG Read :1000 deps:[] res:true
14969 Sep 22 23:14:27.671 INFO [1] 127.0.0.1:52962 task reports connection:true
14970 Sep 22 23:14:27.671 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc Active Active Active
14971 Sep 22 23:14:27.671 INFO Set check for repair
14972 Sep 22 23:14:27.671 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
14973 Sep 22 23:14:27.671 INFO [2] 127.0.0.1:63966 task reports connection:true
14974 Sep 22 23:14:27.671 INFO 6e979958-69ff-4fc0-a35a-d73365a539fc Active Active Active
14975 Sep 22 23:14:27.671 DEBG Read :1000 deps:[] res:true
14976 Sep 22 23:14:27.671 INFO Set check for repair
14977 Sep 22 23:14:27.671 INFO [0] received reconcile message
14978 Sep 22 23:14:27.671 INFO Repair listens on 127.0.0.1:0, task: repair
14979 Sep 22 23:14:27.671 INFO [0] All repairs completed, exit
14980 Sep 22 23:14:27.671 INFO [0] Starts cmd_loop
14981 Sep 22 23:14:27.671 INFO [1] received reconcile message
14982 Sep 22 23:14:27.671 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43773, task: repair
14983 Sep 22 23:14:27.671 INFO [1] All repairs completed, exit
14984 Sep 22 23:14:27.671 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43773, task: repair
14985 Sep 22 23:14:27.671 INFO [1] Starts cmd_loop
14986 Sep 22 23:14:27.672 INFO listening, local_addr: 127.0.0.1:43773, task: repair
14987 Sep 22 23:14:27.672 DEBG [2] Read AckReady 1000, : downstairs
14988 Sep 22 23:14:27.672 INFO [2] received reconcile message
14989 Sep 22 23:14:27.672 INFO [2] All repairs completed, exit
14990 Sep 22 23:14:27.672 INFO [2] Starts cmd_loop
14991 Sep 22 23:14:27.672 DEBG [0] Read already AckReady 1000, : downstairs
14992 The guest has finished waiting for activation
14993 Sep 22 23:14:27.672 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43773, task: repair
14994 Sep 22 23:14:27.672 DEBG [1] Read already AckReady 1000, : downstairs
14995 Sep 22 23:14:27.672 INFO current number of open files limit 65536 is already the maximum
14996 Sep 22 23:14:27.672 INFO Using repair address: 127.0.0.1:43773, task: main
14997 Sep 22 23:14:27.672 INFO Opened existing region file "/tmp/downstairs-IYCeuQqy/region.json"
14998 Sep 22 23:14:27.672 INFO No SSL acceptor configured, task: main
14999 Sep 22 23:14:27.672 DEBG up_ds_listen was notified
15000 Sep 22 23:14:27.672 INFO current number of open files limit 65536 is already the maximum
15001 Sep 22 23:14:27.672 INFO Database read version 1
15002 Sep 22 23:14:27.672 DEBG Write :1004 deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
15003 Sep 22 23:14:27.672 INFO Database write version 1
15004 Sep 22 23:14:27.672 INFO Opened existing region file "/tmp/downstairs-IS2nyztY/region.json"
15005 Sep 22 23:14:27.672 DEBG up_ds_listen process 1000
15006 Sep 22 23:14:27.672 INFO Database read version 1
15007 Sep 22 23:14:27.672 INFO Database write version 1
15008 Sep 22 23:14:27.672 DEBG [A] ack job 1000:1, : downstairs
15009 Sep 22 23:14:27.672 INFO current number of open files limit 65536 is already the maximum
15010 Sep 22 23:14:27.672 DEBG up_ds_listen checked 1 jobs, back to waiting
15011 Sep 22 23:14:27.672 INFO Upstairs starts
15012 Sep 22 23:14:27.672 INFO Crucible Version: BuildInfo {
15013 version: "0.0.1",
15014 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15015 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15016 git_branch: "main",
15017 rustc_semver: "1.70.0",
15018 rustc_channel: "stable",
15019 rustc_host_triple: "x86_64-unknown-illumos",
15020 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15021 cargo_triple: "x86_64-unknown-illumos",
15022 debug: true,
15023 opt_level: 0,
15024 }
15025 Sep 22 23:14:27.672 INFO Created new region file "/tmp/downstairs-LvUMdXVd/region.json"
15026 Sep 22 23:14:27.672 INFO Upstairs <-> Downstairs Message Version: 4
15027 Sep 22 23:14:27.672 INFO Crucible stats registered with UUID: 432c17f3-ca99-4afa-891c-25e95b3f728b
15028 Sep 22 23:14:27.672 INFO Crucible 432c17f3-ca99-4afa-891c-25e95b3f728b has session id: 364115f9-176c-480d-86ab-3fccc63267b3
15029 Sep 22 23:14:27.672 INFO [0] connecting to 127.0.0.1:42286, looper: 0
15030 Sep 22 23:14:27.672 INFO [1] connecting to 127.0.0.1:52962, looper: 1
15031 Sep 22 23:14:27.673 INFO [2] connecting to 127.0.0.1:63966, looper: 2
15032 Sep 22 23:14:27.673 INFO up_listen starts, task: up_listen
15033 Sep 22 23:14:27.673 INFO Wait for all three downstairs to come online
15034 Sep 22 23:14:27.673 INFO Flush timeout: 0.5
15035 Sep 22 23:14:27.673 INFO UUID: c160bb67-b4be-4c85-a112-7924757e1ad4
15036 Sep 22 23:14:27.673 INFO UUID: a57d7c84-72de-4915-b41c-7b731748ffdd
15037 Sep 22 23:14:27.673 INFO [1] 432c17f3-ca99-4afa-891c-25e95b3f728b looper connected, looper: 1
15038 Sep 22 23:14:27.673 INFO Blocks per extent:5 Total Extents: 2
15039 Sep 22 23:14:27.673 INFO Blocks per extent:5 Total Extents: 2
15040 Sep 22 23:14:27.673 INFO [1] Proc runs for 127.0.0.1:52962 in state New
15041 Sep 22 23:14:27.673 INFO Crucible Version: Crucible Version: 0.0.1
15042 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15043 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15044 rustc: 1.70.0 stable x86_64-unknown-illumos
15045 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15046 Sep 22 23:14:27.673 INFO Crucible Version: Crucible Version: 0.0.1
15047 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15048 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15049 rustc: 1.70.0 stable x86_64-unknown-illumos
15050 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15051 Sep 22 23:14:27.673 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15052 Sep 22 23:14:27.673 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15053 Sep 22 23:14:27.673 INFO [0] 432c17f3-ca99-4afa-891c-25e95b3f728b looper connected, looper: 0
15054 Sep 22 23:14:27.673 INFO Using address: 127.0.0.1:63965, task: main
15055 Sep 22 23:14:27.673 INFO Using address: 127.0.0.1:48094, task: main
15056 Sep 22 23:14:27.673 INFO [0] Proc runs for 127.0.0.1:42286 in state New
15057 Sep 22 23:14:27.673 INFO [2] 432c17f3-ca99-4afa-891c-25e95b3f728b looper connected, looper: 2
15058 Sep 22 23:14:27.673 INFO [2] Proc runs for 127.0.0.1:63966 in state New
15059 Sep 22 23:14:27.673 INFO accepted connection from 127.0.0.1:48180, task: main
15060 Sep 22 23:14:27.673 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
15061 Sep 22 23:14:27.674 INFO accepted connection from 127.0.0.1:39314, task: main
15062 Sep 22 23:14:27.674 INFO accepted connection from 127.0.0.1:43895, task: main
15063 Sep 22 23:14:27.674 INFO Repair listens on 127.0.0.1:0, task: repair
15064 Sep 22 23:14:27.674 INFO Repair listens on 127.0.0.1:0, task: repair
15065 Sep 22 23:14:27.674 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62668, task: repair
15066 Sep 22 23:14:27.674 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54063, task: repair
15067 Sep 22 23:14:27.674 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54063, task: repair
15068 Sep 22 23:14:27.674 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62668, task: repair
15069 Sep 22 23:14:27.674 INFO listening, local_addr: 127.0.0.1:62668, task: repair
15070 Sep 22 23:14:27.674 INFO listening, local_addr: 127.0.0.1:54063, task: repair
15071 Sep 22 23:14:27.674 INFO Connection request from 432c17f3-ca99-4afa-891c-25e95b3f728b with version 4, task: proc
15072 Sep 22 23:14:27.674 INFO upstairs UpstairsConnection { upstairs_id: 432c17f3-ca99-4afa-891c-25e95b3f728b, session_id: dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54, gen: 1 } connected, version 4, task: proc
15073 Sep 22 23:14:27.674 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62668, task: repair
15074 Sep 22 23:14:27.674 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54063, task: repair
15075 Sep 22 23:14:27.674 INFO Using repair address: 127.0.0.1:62668, task: main
15076 Sep 22 23:14:27.674 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
15077 Sep 22 23:14:27.674 INFO Using repair address: 127.0.0.1:54063, task: main
15078 Sep 22 23:14:27.674 INFO No SSL acceptor configured, task: main
15079 Sep 22 23:14:27.674 INFO No SSL acceptor configured, task: main
15080 Sep 22 23:14:27.674 INFO Connection request from 432c17f3-ca99-4afa-891c-25e95b3f728b with version 4, task: proc
15081 Sep 22 23:14:27.674 INFO upstairs UpstairsConnection { upstairs_id: 432c17f3-ca99-4afa-891c-25e95b3f728b, session_id: dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54, gen: 1 } connected, version 4, task: proc
15082 Sep 22 23:14:27.674 INFO Connection request from 432c17f3-ca99-4afa-891c-25e95b3f728b with version 4, task: proc
15083 Sep 22 23:14:27.674 INFO upstairs UpstairsConnection { upstairs_id: 432c17f3-ca99-4afa-891c-25e95b3f728b, session_id: dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54, gen: 1 } connected, version 4, task: proc
15084 Sep 22 23:14:27.675 INFO [1] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) New New New ds_transition to WaitActive
15085 Sep 22 23:14:27.675 INFO [1] Transition from New to WaitActive
15086 Sep 22 23:14:27.675 INFO [0] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) New WaitActive New ds_transition to WaitActive
15087 Sep 22 23:14:27.675 INFO current number of open files limit 65536 is already the maximum
15088 Sep 22 23:14:27.675 INFO current number of open files limit 65536 is already the maximum
15089 Sep 22 23:14:27.675 INFO [0] Transition from New to WaitActive
15090 Sep 22 23:14:27.675 INFO [2] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) WaitActive WaitActive New ds_transition to WaitActive
15091 Sep 22 23:14:27.675 INFO [2] Transition from New to WaitActive
15092 Sep 22 23:14:27.675 INFO Created new region file "/tmp/downstairs-nP07tIRt/region.json"
15093 Sep 22 23:14:27.675 INFO Created new region file "/tmp/downstairs-7rayLe79/region.json"
15094 The guest has requested activation
15095 Sep 22 23:14:27.675 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b active request set
15096 Sep 22 23:14:27.675 INFO [0] received activate with gen 1
15097 Sep 22 23:14:27.675 INFO [0] client got ds_active_rx, promote! session dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54
15098 Sep 22 23:14:27.675 INFO [1] received activate with gen 1
15099 Sep 22 23:14:27.675 INFO [1] client got ds_active_rx, promote! session dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54
15100 Sep 22 23:14:27.675 INFO [2] received activate with gen 1
15101 Sep 22 23:14:27.675 INFO [2] client got ds_active_rx, promote! session dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54
15102 test test::integration_test_just_read ... ok
15103 Sep 22 23:14:27.676 INFO [1] downstairs client at 127.0.0.1:52962 has UUID 41cf1bdf-fb18-466b-8427-1ff781e7de45
15104 Sep 22 23:14:27.676 DEBG Read :1005 deps:[JobId(1004), JobId(1002), JobId(1000)] res:true
15105 Sep 22 23:14:27.676 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 41cf1bdf-fb18-466b-8427-1ff781e7de45, encrypted: true, database_read_version: 1, database_write_version: 1 }
15106 Sep 22 23:14:27.676 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b WaitActive WaitActive WaitActive
15107 Sep 22 23:14:27.676 INFO [0] downstairs client at 127.0.0.1:42286 has UUID 892b80dc-b0fe-4fd6-9a73-88aa72d6d43b
15108 Sep 22 23:14:27.676 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 892b80dc-b0fe-4fd6-9a73-88aa72d6d43b, encrypted: true, database_read_version: 1, database_write_version: 1 }
15109 Sep 22 23:14:27.676 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b WaitActive WaitActive WaitActive
15110 Sep 22 23:14:27.676 INFO [2] downstairs client at 127.0.0.1:63966 has UUID f00f8ef1-bf2e-4a59-9818-84a4661ee77c
15111 Sep 22 23:14:27.676 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f00f8ef1-bf2e-4a59-9818-84a4661ee77c, encrypted: true, database_read_version: 1, database_write_version: 1 }
15112 Sep 22 23:14:27.676 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b WaitActive WaitActive WaitActive
15113 Sep 22 23:14:27.676 INFO current number of open files limit 65536 is already the maximum
15114 Sep 22 23:14:27.676 INFO Created new region file "/tmp/downstairs-ahT0SB6c/region.json"
15115 Sep 22 23:14:27.676 INFO Current flush_numbers [0..12]: [0, 0]
15116 Sep 22 23:14:27.677 INFO Downstairs has completed Negotiation, task: proc
15117 Sep 22 23:14:27.677 INFO current number of open files limit 65536 is already the maximum
15118 Sep 22 23:14:27.677 INFO Opened existing region file "/tmp/downstairs-LvUMdXVd/region.json"
15119 Sep 22 23:14:27.677 INFO Database read version 1
15120 Sep 22 23:14:27.677 INFO Database write version 1
15121 Sep 22 23:14:27.677 INFO Current flush_numbers [0..12]: [0, 0]
15122 Sep 22 23:14:27.677 INFO Downstairs has completed Negotiation, task: proc
15123 Sep 22 23:14:27.677 INFO Current flush_numbers [0..12]: [0, 0]
15124 Sep 22 23:14:27.678 INFO Downstairs has completed Negotiation, task: proc
15125 Sep 22 23:14:27.678 INFO current number of open files limit 65536 is already the maximum
15126 Sep 22 23:14:27.678 INFO Opened existing region file "/tmp/downstairs-nP07tIRt/region.json"
15127 Sep 22 23:14:27.678 INFO Database read version 1
15128 Sep 22 23:14:27.678 INFO Database write version 1
15129 Sep 22 23:14:27.678 INFO [1] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15130 Sep 22 23:14:27.678 INFO [1] Transition from WaitActive to WaitQuorum
15131 Sep 22 23:14:27.678 WARN [1] new RM replaced this: None
15132 Sep 22 23:14:27.678 INFO [1] Starts reconcile loop
15133 Sep 22 23:14:27.678 INFO [0] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
15134 Sep 22 23:14:27.678 INFO [0] Transition from WaitActive to WaitQuorum
15135 Sep 22 23:14:27.678 WARN [0] new RM replaced this: None
15136 Sep 22 23:14:27.678 INFO [0] Starts reconcile loop
15137 Sep 22 23:14:27.678 INFO [2] 432c17f3-ca99-4afa-891c-25e95b3f728b (dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15138 Sep 22 23:14:27.678 INFO [2] Transition from WaitActive to WaitQuorum
15139 Sep 22 23:14:27.678 WARN [2] new RM replaced this: None
15140 Sep 22 23:14:27.678 INFO [2] Starts reconcile loop
15141 Sep 22 23:14:27.678 INFO [1] 127.0.0.1:52962 task reports connection:true
15142 Sep 22 23:14:27.678 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b WaitQuorum WaitQuorum WaitQuorum
15143 Sep 22 23:14:27.678 INFO [0]R flush_numbers: [0, 0]
15144 Sep 22 23:14:27.678 INFO [0]R generation: [0, 0]
15145 Sep 22 23:14:27.678 INFO [0]R dirty: [false, false]
15146 Sep 22 23:14:27.678 INFO [1]R flush_numbers: [0, 0]
15147 Sep 22 23:14:27.678 INFO [1]R generation: [0, 0]
15148 Sep 22 23:14:27.678 INFO [1]R dirty: [false, false]
15149 Sep 22 23:14:27.678 INFO [2]R flush_numbers: [0, 0]
15150 Sep 22 23:14:27.678 INFO [2]R generation: [0, 0]
15151 Sep 22 23:14:27.678 INFO [2]R dirty: [false, false]
15152 Sep 22 23:14:27.678 INFO Max found gen is 1
15153 Sep 22 23:14:27.678 INFO Generation requested: 1 >= found:1
15154 Sep 22 23:14:27.678 INFO Next flush: 1
15155 Sep 22 23:14:27.678 INFO All extents match
15156 Sep 22 23:14:27.678 INFO No downstairs repair required
15157 Sep 22 23:14:27.678 INFO No initial repair work was required
15158 Sep 22 23:14:27.678 INFO Set Downstairs and Upstairs active
15159 Sep 22 23:14:27.679 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b is now active with session: dfdb5e67-2a1d-4dc7-96ad-46ab7166dc54
15160 Sep 22 23:14:27.679 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b Set Active after no repair
15161 Sep 22 23:14:27.679 INFO Notify all downstairs, region set compare is done.
15162 Sep 22 23:14:27.679 INFO Set check for repair
15163 Sep 22 23:14:27.679 INFO [0] 127.0.0.1:42286 task reports connection:true
15164 Sep 22 23:14:27.679 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b Active Active Active
15165 Sep 22 23:14:27.679 INFO Set check for repair
15166 Sep 22 23:14:27.679 INFO [2] 127.0.0.1:63966 task reports connection:true
15167 Sep 22 23:14:27.679 INFO 432c17f3-ca99-4afa-891c-25e95b3f728b Active Active Active
15168 Sep 22 23:14:27.679 INFO Set check for repair
15169 Sep 22 23:14:27.679 INFO current number of open files limit 65536 is already the maximum
15170 Sep 22 23:14:27.679 INFO [0] received reconcile message
15171 Sep 22 23:14:27.679 INFO Opened existing region file "/tmp/downstairs-7rayLe79/region.json"
15172 Sep 22 23:14:27.679 INFO Database read version 1
15173 Sep 22 23:14:27.679 INFO [0] All repairs completed, exit
15174 Sep 22 23:14:27.679 INFO Database write version 1
15175 Sep 22 23:14:27.679 INFO [0] Starts cmd_loop
15176 Sep 22 23:14:27.679 INFO [1] received reconcile message
15177 Sep 22 23:14:27.679 INFO [1] All repairs completed, exit
15178 Sep 22 23:14:27.679 INFO [1] Starts cmd_loop
15179 Sep 22 23:14:27.679 INFO [2] received reconcile message
15180 Sep 22 23:14:27.679 INFO [2] All repairs completed, exit
15181 Sep 22 23:14:27.679 INFO [2] Starts cmd_loop
15182 The guest has finished waiting for activation
15183 Sep 22 23:14:27.679 INFO UUID: 7296d75f-0499-4277-a0c4-39bde4c94ac5
15184 Sep 22 23:14:27.679 INFO Blocks per extent:5 Total Extents: 2
15185 Sep 22 23:14:27.679 INFO Crucible Version: Crucible Version: 0.0.1
15186 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15187 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15188 rustc: 1.70.0 stable x86_64-unknown-illumos
15189 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15190 Sep 22 23:14:27.679 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15191 Sep 22 23:14:27.679 DEBG IO Read 1000 has deps []
15192 Sep 22 23:14:27.679 INFO Using address: 127.0.0.1:46304, task: main
15193 Sep 22 23:14:27.680 INFO Repair listens on 127.0.0.1:0, task: repair
15194 Sep 22 23:14:27.680 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64884, task: repair
15195 Sep 22 23:14:27.680 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64884, task: repair
15196 Sep 22 23:14:27.680 INFO listening, local_addr: 127.0.0.1:64884, task: repair
15197 Sep 22 23:14:27.680 DEBG Read :1000 deps:[] res:true
15198 Sep 22 23:14:27.680 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64884, task: repair
15199 Sep 22 23:14:27.680 INFO Using repair address: 127.0.0.1:64884, task: main
15200 Sep 22 23:14:27.680 INFO No SSL acceptor configured, task: main
15201 Sep 22 23:14:27.680 INFO UUID: 287a3e0c-04cc-4f00-8245-e0cea8ea9ef0
15202 Sep 22 23:14:27.680 INFO Blocks per extent:5 Total Extents: 2
15203 Sep 22 23:14:27.680 INFO Crucible Version: Crucible Version: 0.0.1
15204 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15205 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15206 rustc: 1.70.0 stable x86_64-unknown-illumos
15207 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15208 Sep 22 23:14:27.680 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15209 Sep 22 23:14:27.680 DEBG Read :1000 deps:[] res:true
15210 Sep 22 23:14:27.680 INFO Using address: 127.0.0.1:41031, task: main
15211 Sep 22 23:14:27.680 INFO listening on 127.0.0.1:0, task: main
15212 Sep 22 23:14:27.681 INFO listening on 127.0.0.1:0, task: main
15213 Sep 22 23:14:27.681 WARN a2292a1d-0eb4-4a70-b0af-896ace339068 request to replace downstairs 127.0.0.1:36461 with 127.0.0.1:49734
15214 Sep 22 23:14:27.681 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 found old target: 127.0.0.1:36461 at 0
15215 Sep 22 23:14:27.681 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 replacing old: 127.0.0.1:36461 at 0
15216 Sep 22 23:14:27.681 INFO [0] client skip 0 in process jobs because fault, : downstairs
15217 Sep 22 23:14:27.681 INFO [0] changed 0 jobs to fault skipped, : downstairs
15218 Sep 22 23:14:27.681 DEBG Read :1000 deps:[] res:true
15219 Sep 22 23:14:27.681 INFO [0] a2292a1d-0eb4-4a70-b0af-896ace339068 (6eb51b2f-ab23-44be-a134-dc7d267c1b0b) Active Active Active ds_transition to Replacing
15220 Sep 22 23:14:27.681 INFO [0] Transition from Active to Replacing
15221 Sep 22 23:14:27.681 INFO Repair listens on 127.0.0.1:0, task: repair
15222 Sep 22 23:14:27.681 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59163, task: repair
15223 Sep 22 23:14:27.681 WARN a2292a1d-0eb4-4a70-b0af-896ace339068 request to replace downstairs 127.0.0.1:48269 with 127.0.0.1:46304
15224 Sep 22 23:14:27.681 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59163, task: repair
15225 Sep 22 23:14:27.681 INFO a2292a1d-0eb4-4a70-b0af-896ace339068 found old target: 127.0.0.1:48269 at 1
15226 Sep 22 23:14:27.681 INFO listening, local_addr: 127.0.0.1:59163, task: repair
15227 Sep 22 23:14:27.681 DEBG [0] Read AckReady 1000, : downstairs
15228 Sep 22 23:14:27.681 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59163, task: repair
15229 Sep 22 23:14:27.681 INFO Using repair address: 127.0.0.1:59163, task: main
15230 Sep 22 23:14:27.681 INFO No SSL acceptor configured, task: main
15231 Sep 22 23:14:27.681 DEBG [1] Read already AckReady 1000, : downstairs
15232 Sep 22 23:14:27.681 DEBG [2] Read already AckReady 1000, : downstairs
15233 Sep 22 23:14:27.681 DEBG up_ds_listen was notified
15234 Sep 22 23:14:27.681 DEBG up_ds_listen process 1000
15235 Sep 22 23:14:27.681 DEBG [A] ack job 1000:1, : downstairs
15236 Sep 22 23:14:27.681 DEBG up_ds_listen checked 1 jobs, back to waiting
15237 Sep 22 23:14:27.682 INFO current number of open files limit 65536 is already the maximum
15238 Sep 22 23:14:27.682 DEBG IO Read 1000 has deps []
15239 Sep 22 23:14:27.682 INFO Created new region file "/tmp/downstairs-lhV1HVSC/region.json"
15240 Sep 22 23:14:27.682 INFO current number of open files limit 65536 is already the maximum
15241 Sep 22 23:14:27.682 INFO Opened existing region file "/tmp/downstairs-ahT0SB6c/region.json"
15242 Sep 22 23:14:27.682 INFO Database read version 1
15243 Sep 22 23:14:27.682 INFO Database write version 1
15244 Sep 22 23:14:27.682 DEBG Read :1000 deps:[] res:true
15245 Sep 22 23:14:27.682 INFO UUID: af190b5e-a849-4b2d-90bf-b82d85ee2543
15246 Sep 22 23:14:27.682 INFO Blocks per extent:5 Total Extents: 2
15247 Sep 22 23:14:27.683 DEBG Read :1000 deps:[] res:true
15248 Sep 22 23:14:27.683 INFO Crucible Version: Crucible Version: 0.0.1
15249 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15250 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15251 rustc: 1.70.0 stable x86_64-unknown-illumos
15252 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15253 Sep 22 23:14:27.683 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15254 Sep 22 23:14:27.683 INFO Using address: 127.0.0.1:34981, task: main
15255 Sep 22 23:14:27.683 DEBG Read :1000 deps:[] res:true
15256 Sep 22 23:14:27.683 INFO Repair listens on 127.0.0.1:0, task: repair
15257 Sep 22 23:14:27.683 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51334, task: repair
15258 Sep 22 23:14:27.683 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51334, task: repair
15259 Sep 22 23:14:27.683 INFO listening, local_addr: 127.0.0.1:51334, task: repair
15260 Sep 22 23:14:27.683 DEBG [1] Read AckReady 1000, : downstairs
15261 Sep 22 23:14:27.683 DEBG [0] Read already AckReady 1000, : downstairs
15262 Sep 22 23:14:27.683 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51334, task: repair
15263 Sep 22 23:14:27.683 DEBG [2] Read already AckReady 1000, : downstairs
15264 Sep 22 23:14:27.683 INFO Using repair address: 127.0.0.1:51334, task: main
15265 Sep 22 23:14:27.683 INFO No SSL acceptor configured, task: main
15266 Sep 22 23:14:27.683 DEBG up_ds_listen was notified
15267 Sep 22 23:14:27.683 DEBG up_ds_listen process 1000
15268 Sep 22 23:14:27.683 DEBG [A] ack job 1000:1, : downstairs
15269 Sep 22 23:14:27.684 DEBG up_ds_listen checked 1 jobs, back to waiting
15270 Sep 22 23:14:27.684 INFO current number of open files limit 65536 is already the maximum
15271 Sep 22 23:14:27.684 INFO Created new region file "/tmp/downstairs-9K0Zt4dt/region.json"
15272 Sep 22 23:14:27.685 INFO UUID: e624c1bd-6ca3-47e5-9bce-8c13d262fb0f
15273 Sep 22 23:14:27.685 INFO Blocks per extent:5 Total Extents: 2
15274 Sep 22 23:14:27.685 INFO Crucible Version: Crucible Version: 0.0.1
15275 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15276 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15277 rustc: 1.70.0 stable x86_64-unknown-illumos
15278 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15279 Sep 22 23:14:27.685 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15280 Sep 22 23:14:27.685 INFO Using address: 127.0.0.1:41733, task: main
15281 Sep 22 23:14:27.686 INFO Repair listens on 127.0.0.1:0, task: repair
15282 Sep 22 23:14:27.686 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50935, task: repair
15283 Sep 22 23:14:27.686 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50935, task: repair
15284 Sep 22 23:14:27.686 INFO listening, local_addr: 127.0.0.1:50935, task: repair
15285 Sep 22 23:14:27.686 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50935, task: repair
15286 test test::integration_test_guest_downstairs_unwritten ... ok
15287 Sep 22 23:14:27.686 INFO Using repair address: 127.0.0.1:50935, task: main
15288 Sep 22 23:14:27.686 INFO No SSL acceptor configured, task: main
15289 Sep 22 23:14:27.687 INFO current number of open files limit 65536 is already the maximum
15290 Sep 22 23:14:27.687 INFO Created new region file "/tmp/downstairs-GF5EnqWQ/region.json"
15291 Sep 22 23:14:27.687 INFO current number of open files limit 65536 is already the maximum
15292 Sep 22 23:14:27.688 INFO Created new region file "/tmp/downstairs-lYcgzWeT/region.json"
15293 Sep 22 23:14:27.688 INFO current number of open files limit 65536 is already the maximum
15294 test test::integration_test_multi_read_only ... ok
15295 Sep 22 23:14:27.688 INFO Opened existing region file "/tmp/downstairs-lhV1HVSC/region.json"
15296 Sep 22 23:14:27.688 INFO Database read version 1
15297 Sep 22 23:14:27.688 INFO Database write version 1
15298 Sep 22 23:14:27.689 INFO current number of open files limit 65536 is already the maximum
15299 Sep 22 23:14:27.689 INFO Created new region file "/tmp/downstairs-VxCr6wAp/region.json"
15300 test test::integration_test_guest_replace_many_downstairs ... ok
15301 Sep 22 23:14:27.690 INFO current number of open files limit 65536 is already the maximum
15302 Sep 22 23:14:27.690 INFO Created new region file "/tmp/downstairs-w7fd116Q/region.json"
15303 Sep 22 23:14:27.690 INFO UUID: 3e5c0f56-3bd3-4ecd-befe-102504a91370
15304 Sep 22 23:14:27.690 INFO Blocks per extent:5 Total Extents: 2
15305 Sep 22 23:14:27.690 INFO Crucible Version: Crucible Version: 0.0.1
15306 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15307 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15308 rustc: 1.70.0 stable x86_64-unknown-illumos
15309 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15310 Sep 22 23:14:27.690 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15311 Sep 22 23:14:27.690 INFO current number of open files limit 65536 is already the maximum
15312 Sep 22 23:14:27.690 INFO Using address: 127.0.0.1:40988, task: main
15313 Sep 22 23:14:27.690 INFO Opened existing region file "/tmp/downstairs-9K0Zt4dt/region.json"
15314 Sep 22 23:14:27.690 INFO Database read version 1
15315 Sep 22 23:14:27.690 INFO Database write version 1
15316 Sep 22 23:14:27.691 INFO Repair listens on 127.0.0.1:0, task: repair
15317 Sep 22 23:14:27.691 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40770, task: repair
15318 Sep 22 23:14:27.691 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40770, task: repair
15319 Sep 22 23:14:27.691 INFO listening, local_addr: 127.0.0.1:40770, task: repair
15320 Sep 22 23:14:27.691 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40770, task: repair
15321 Sep 22 23:14:27.691 INFO Using repair address: 127.0.0.1:40770, task: main
15322 Sep 22 23:14:27.691 INFO No SSL acceptor configured, task: main
15323 Sep 22 23:14:27.691 INFO current number of open files limit 65536 is already the maximum
15324 Sep 22 23:14:27.691 INFO Opened existing region file "/tmp/downstairs-lYcgzWeT/region.json"
15325 Sep 22 23:14:27.691 INFO Database read version 1
15326 Sep 22 23:14:27.691 INFO Database write version 1
15327 Sep 22 23:14:27.691 INFO current number of open files limit 65536 is already the maximum
15328 Sep 22 23:14:27.691 INFO Opened existing region file "/tmp/downstairs-GF5EnqWQ/region.json"
15329 Sep 22 23:14:27.692 INFO Database read version 1
15330 Sep 22 23:14:27.692 INFO Database write version 1
15331 Sep 22 23:14:27.692 INFO UUID: 12d88dad-09b4-4e8c-bc38-a95145cf057c
15332 Sep 22 23:14:27.692 INFO Blocks per extent:5 Total Extents: 2
15333 Sep 22 23:14:27.692 INFO Crucible Version: Crucible Version: 0.0.1
15334 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15335 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15336 rustc: 1.70.0 stable x86_64-unknown-illumos
15337 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15338 Sep 22 23:14:27.692 INFO Upstairs starts
15339 Sep 22 23:14:27.692 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15340 Sep 22 23:14:27.692 INFO Using address: 127.0.0.1:39833, task: main
15341 Sep 22 23:14:27.692 INFO Crucible Version: BuildInfo {
15342 version: "0.0.1",
15343 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15344 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15345 git_branch: "main",
15346 rustc_semver: "1.70.0",
15347 rustc_channel: "stable",
15348 rustc_host_triple: "x86_64-unknown-illumos",
15349 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15350 cargo_triple: "x86_64-unknown-illumos",
15351 debug: true,
15352 opt_level: 0,
15353 }
15354 Sep 22 23:14:27.692 INFO Upstairs <-> Downstairs Message Version: 4
15355 Sep 22 23:14:27.692 INFO Crucible stats registered with UUID: ce9a8163-4db1-4374-b6d0-604bca1e70ae
15356 Sep 22 23:14:27.692 INFO Crucible ce9a8163-4db1-4374-b6d0-604bca1e70ae has session id: bdade29d-3285-4c6a-be66-c4bc0b33d01d
15357 Sep 22 23:14:27.693 INFO listening on 127.0.0.1:0, task: main
15358 Sep 22 23:14:27.693 INFO UUID: c415f140-2b86-4861-a661-215ad49ee0b4
15359 Sep 22 23:14:27.693 INFO Blocks per extent:5 Total Extents: 2
15360 Sep 22 23:14:27.693 INFO listening on 127.0.0.1:0, task: main
15361 Sep 22 23:14:27.693 INFO Crucible Version: Crucible Version: 0.0.1
15362 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15363 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15364 rustc: 1.70.0 stable x86_64-unknown-illumos
15365 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15366 Sep 22 23:14:27.693 INFO listening on 127.0.0.1:0, task: main
15367 Sep 22 23:14:27.693 INFO Repair listens on 127.0.0.1:0, task: repair
15368 Sep 22 23:14:27.693 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15369 Sep 22 23:14:27.693 INFO Using address: 127.0.0.1:49579, task: main
15370 Sep 22 23:14:27.693 INFO [0] connecting to 127.0.0.1:48094, looper: 0
15371 Sep 22 23:14:27.693 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55042, task: repair
15372 Sep 22 23:14:27.693 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55042, task: repair
15373 Sep 22 23:14:27.693 INFO listening, local_addr: 127.0.0.1:55042, task: repair
15374 Sep 22 23:14:27.693 INFO [1] connecting to 127.0.0.1:41031, looper: 1
15375 Sep 22 23:14:27.693 INFO current number of open files limit 65536 is already the maximum
15376 Sep 22 23:14:27.693 INFO Repair listens on 127.0.0.1:0, task: repair
15377 Sep 22 23:14:27.693 INFO Opened existing region file "/tmp/downstairs-VxCr6wAp/region.json"
15378 Sep 22 23:14:27.693 INFO Database read version 1
15379 Sep 22 23:14:27.693 INFO Database write version 1
15380 Sep 22 23:14:27.693 INFO [2] connecting to 127.0.0.1:40988, looper: 2
15381 Sep 22 23:14:27.693 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64895, task: repair
15382 Sep 22 23:14:27.693 INFO up_listen starts, task: up_listen
15383 Sep 22 23:14:27.693 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55042, task: repair
15384 Sep 22 23:14:27.693 INFO Wait for all three downstairs to come online
15385 Sep 22 23:14:27.693 INFO Flush timeout: 0.5
15386 Sep 22 23:14:27.693 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64895, task: repair
15387 Sep 22 23:14:27.693 INFO Using repair address: 127.0.0.1:55042, task: main
15388 Sep 22 23:14:27.693 INFO No SSL acceptor configured, task: main
15389 Sep 22 23:14:27.693 INFO listening, local_addr: 127.0.0.1:64895, task: repair
15390 Sep 22 23:14:27.693 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64895, task: repair
15391 Sep 22 23:14:27.693 INFO accepted connection from 127.0.0.1:50420, task: main
15392 Sep 22 23:14:27.693 INFO Using repair address: 127.0.0.1:64895, task: main
15393 Sep 22 23:14:27.693 INFO No SSL acceptor configured, task: main
15394 Sep 22 23:14:27.694 INFO accepted connection from 127.0.0.1:38244, task: main
15395 Sep 22 23:14:27.694 INFO accepted connection from 127.0.0.1:45738, task: main
15396 Sep 22 23:14:27.694 INFO [0] ce9a8163-4db1-4374-b6d0-604bca1e70ae looper connected, looper: 0
15397 Sep 22 23:14:27.694 INFO UUID: d7e92243-263d-4c5e-b8c0-b3bf2546a302
15398 Sep 22 23:14:27.694 INFO [0] Proc runs for 127.0.0.1:48094 in state New
15399 Sep 22 23:14:27.694 INFO Blocks per extent:5 Total Extents: 2
15400 Sep 22 23:14:27.694 INFO Crucible Version: Crucible Version: 0.0.1
15401 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15402 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15403 rustc: 1.70.0 stable x86_64-unknown-illumos
15404 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15405 Sep 22 23:14:27.694 INFO [1] ce9a8163-4db1-4374-b6d0-604bca1e70ae looper connected, looper: 1
15406 Sep 22 23:14:27.694 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15407 Sep 22 23:14:27.694 INFO Using address: 127.0.0.1:59250, task: main
15408 Sep 22 23:14:27.694 INFO [1] Proc runs for 127.0.0.1:41031 in state New
15409 Sep 22 23:14:27.694 INFO [2] ce9a8163-4db1-4374-b6d0-604bca1e70ae looper connected, looper: 2
15410 Sep 22 23:14:27.694 INFO [2] Proc runs for 127.0.0.1:40988 in state New
15411 Sep 22 23:14:27.694 INFO Repair listens on 127.0.0.1:0, task: repair
15412 Sep 22 23:14:27.694 INFO current number of open files limit 65536 is already the maximum
15413 Sep 22 23:14:27.694 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44662, task: repair
15414 Sep 22 23:14:27.694 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44662, task: repair
15415 Sep 22 23:14:27.694 INFO listening, local_addr: 127.0.0.1:44662, task: repair
15416 Sep 22 23:14:27.694 INFO Created new region file "/tmp/downstairs-6CnpwJQF/region.json"
15417 Sep 22 23:14:27.694 INFO current number of open files limit 65536 is already the maximum
15418 Sep 22 23:14:27.694 INFO Opened existing region file "/tmp/downstairs-w7fd116Q/region.json"
15419 Sep 22 23:14:27.694 INFO Database read version 1
15420 Sep 22 23:14:27.694 INFO Database write version 1
15421 Sep 22 23:14:27.694 INFO Connection request from ce9a8163-4db1-4374-b6d0-604bca1e70ae with version 4, task: proc
15422 Sep 22 23:14:27.694 INFO Upstairs starts
15423 Sep 22 23:14:27.694 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44662, task: repair
15424 Sep 22 23:14:27.694 INFO upstairs UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } connected, version 4, task: proc
15425 Sep 22 23:14:27.694 INFO Using repair address: 127.0.0.1:44662, task: main
15426 Sep 22 23:14:27.694 INFO Crucible Version: BuildInfo {
15427 version: "0.0.1",
15428 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15429 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15430 git_branch: "main",
15431 rustc_semver: "1.70.0",
15432 rustc_channel: "stable",
15433 rustc_host_triple: "x86_64-unknown-illumos",
15434 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15435 cargo_triple: "x86_64-unknown-illumos",
15436 debug: true,
15437 opt_level: 0,
15438 }
15439 Sep 22 23:14:27.694 INFO No SSL acceptor configured, task: main
15440 Sep 22 23:14:27.694 INFO Upstairs <-> Downstairs Message Version: 4
15441 Sep 22 23:14:27.694 INFO Crucible stats registered with UUID: acc4dcd4-ace9-4660-952a-342aafe19661
15442 Sep 22 23:14:27.694 INFO Connection request from ce9a8163-4db1-4374-b6d0-604bca1e70ae with version 4, task: proc
15443 Sep 22 23:14:27.694 INFO Crucible acc4dcd4-ace9-4660-952a-342aafe19661 has session id: eee6ab7f-065d-44a7-b1e0-38dcf266c152
15444 Sep 22 23:14:27.694 INFO upstairs UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } connected, version 4, task: proc
15445 Sep 22 23:14:27.695 INFO Connection request from ce9a8163-4db1-4374-b6d0-604bca1e70ae with version 4, task: proc
15446 Sep 22 23:14:27.695 INFO upstairs UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } connected, version 4, task: proc
15447 Sep 22 23:14:27.695 INFO listening on 127.0.0.1:0, task: main
15448 Sep 22 23:14:27.695 INFO listening on 127.0.0.1:0, task: main
15449 Sep 22 23:14:27.695 INFO listening on 127.0.0.1:0, task: main
15450 Sep 22 23:14:27.695 INFO current number of open files limit 65536 is already the maximum
15451 Sep 22 23:14:27.695 INFO [0] connecting to 127.0.0.1:63965, looper: 0
15452 The guest has requested activation
15453 Sep 22 23:14:27.695 INFO Created new region file "/tmp/downstairs-bF0aQCxJ/region.json"
15454 Sep 22 23:14:27.695 INFO [0] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) New New New ds_transition to WaitActive
15455 Sep 22 23:14:27.695 INFO [0] Transition from New to WaitActive
15456 Sep 22 23:14:27.695 INFO [1] connecting to 127.0.0.1:34981, looper: 1
15457 Sep 22 23:14:27.695 INFO [1] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) WaitActive New New ds_transition to WaitActive
15458 Sep 22 23:14:27.695 INFO [1] Transition from New to WaitActive
15459 Sep 22 23:14:27.695 INFO [2] connecting to 127.0.0.1:39833, looper: 2
15460 Sep 22 23:14:27.695 INFO [2] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) WaitActive WaitActive New ds_transition to WaitActive
15461 Sep 22 23:14:27.695 INFO UUID: 61ba2500-0b28-4437-bb9f-9b380dfcca32
15462 Sep 22 23:14:27.695 INFO Blocks per extent:5 Total Extents: 2
15463 Sep 22 23:14:27.695 INFO [2] Transition from New to WaitActive
15464 Sep 22 23:14:27.695 INFO up_listen starts, task: up_listen
15465 Sep 22 23:14:27.695 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae active request set
15466 Sep 22 23:14:27.695 INFO Wait for all three downstairs to come online
15467 Sep 22 23:14:27.695 INFO Flush timeout: 0.5
15468 Sep 22 23:14:27.695 INFO [0] received activate with gen 1
15469 Sep 22 23:14:27.695 INFO [0] client got ds_active_rx, promote! session 7f38a194-f08c-4be4-9249-679cb6202428
15470 Sep 22 23:14:27.695 INFO Crucible Version: Crucible Version: 0.0.1
15471 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15472 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15473 rustc: 1.70.0 stable x86_64-unknown-illumos
15474 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15475 Sep 22 23:14:27.695 INFO [1] received activate with gen 1
15476 Sep 22 23:14:27.695 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15477 Sep 22 23:14:27.695 INFO [1] client got ds_active_rx, promote! session 7f38a194-f08c-4be4-9249-679cb6202428
15478 Sep 22 23:14:27.695 INFO Using address: 127.0.0.1:45743, task: main
15479 Sep 22 23:14:27.695 INFO [2] received activate with gen 1
15480 Sep 22 23:14:27.695 INFO [0] acc4dcd4-ace9-4660-952a-342aafe19661 looper connected, looper: 0
15481 Sep 22 23:14:27.695 INFO [2] client got ds_active_rx, promote! session 7f38a194-f08c-4be4-9249-679cb6202428
15482 Sep 22 23:14:27.695 INFO [0] Proc runs for 127.0.0.1:63965 in state New
15483 Sep 22 23:14:27.696 INFO UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } is now active (read-write)
15484 Sep 22 23:14:27.696 INFO accepted connection from 127.0.0.1:56306, task: main
15485 Sep 22 23:14:27.696 INFO UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } is now active (read-write)
15486 Sep 22 23:14:27.696 INFO [1] acc4dcd4-ace9-4660-952a-342aafe19661 looper connected, looper: 1
15487 Sep 22 23:14:27.696 INFO [1] Proc runs for 127.0.0.1:34981 in state New
15488 Sep 22 23:14:27.696 INFO Repair listens on 127.0.0.1:0, task: repair
15489 Sep 22 23:14:27.696 INFO UpstairsConnection { upstairs_id: ce9a8163-4db1-4374-b6d0-604bca1e70ae, session_id: 7f38a194-f08c-4be4-9249-679cb6202428, gen: 1 } is now active (read-write)
15490 Sep 22 23:14:27.696 INFO accepted connection from 127.0.0.1:52527, task: main
15491 Sep 22 23:14:27.696 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42007, task: repair
15492 Sep 22 23:14:27.696 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42007, task: repair
15493 Sep 22 23:14:27.696 INFO accepted connection from 127.0.0.1:62168, task: main
15494 Sep 22 23:14:27.696 INFO listening, local_addr: 127.0.0.1:42007, task: repair
15495 Sep 22 23:14:27.696 INFO [2] acc4dcd4-ace9-4660-952a-342aafe19661 looper connected, looper: 2
15496 Sep 22 23:14:27.696 INFO [2] Proc runs for 127.0.0.1:39833 in state New
15497 Sep 22 23:14:27.696 INFO [0] downstairs client at 127.0.0.1:48094 has UUID a57d7c84-72de-4915-b41c-7b731748ffdd
15498 Sep 22 23:14:27.696 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42007, task: repair
15499 Sep 22 23:14:27.696 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a57d7c84-72de-4915-b41c-7b731748ffdd, encrypted: true, database_read_version: 1, database_write_version: 1 }
15500 Sep 22 23:14:27.696 INFO Using repair address: 127.0.0.1:42007, task: main
15501 Sep 22 23:14:27.696 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae WaitActive WaitActive WaitActive
15502 Sep 22 23:14:27.696 INFO No SSL acceptor configured, task: main
15503 Sep 22 23:14:27.696 INFO [1] downstairs client at 127.0.0.1:41031 has UUID 287a3e0c-04cc-4f00-8245-e0cea8ea9ef0
15504 Sep 22 23:14:27.696 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 287a3e0c-04cc-4f00-8245-e0cea8ea9ef0, encrypted: true, database_read_version: 1, database_write_version: 1 }
15505 Sep 22 23:14:27.696 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae WaitActive WaitActive WaitActive
15506 Sep 22 23:14:27.696 INFO [2] downstairs client at 127.0.0.1:40988 has UUID 3e5c0f56-3bd3-4ecd-befe-102504a91370
15507 Sep 22 23:14:27.696 INFO Connection request from acc4dcd4-ace9-4660-952a-342aafe19661 with version 4, task: proc
15508 Sep 22 23:14:27.696 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3e5c0f56-3bd3-4ecd-befe-102504a91370, encrypted: true, database_read_version: 1, database_write_version: 1 }
15509 Sep 22 23:14:27.696 INFO upstairs UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } connected, version 4, task: proc
15510 Sep 22 23:14:27.696 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae WaitActive WaitActive WaitActive
15511 Sep 22 23:14:27.696 INFO current number of open files limit 65536 is already the maximum
15512 Sep 22 23:14:27.696 INFO Connection request from acc4dcd4-ace9-4660-952a-342aafe19661 with version 4, task: proc
15513 Sep 22 23:14:27.696 INFO upstairs UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } connected, version 4, task: proc
15514 Sep 22 23:14:27.696 INFO Created new region file "/tmp/downstairs-sXZCeOZr/region.json"
15515 Sep 22 23:14:27.697 INFO Connection request from acc4dcd4-ace9-4660-952a-342aafe19661 with version 4, task: proc
15516 Sep 22 23:14:27.697 INFO upstairs UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } connected, version 4, task: proc
15517 Sep 22 23:14:27.697 INFO UUID: 1e03bcc8-571c-4b44-a9e5-acc5366adf71
15518 Sep 22 23:14:27.697 INFO Blocks per extent:5 Total Extents: 2
15519 Sep 22 23:14:27.697 INFO Current flush_numbers [0..12]: [0, 0]
15520 Sep 22 23:14:27.697 INFO [0] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) New New New ds_transition to WaitActive
15521 Sep 22 23:14:27.697 INFO [0] Transition from New to WaitActive
15522 Sep 22 23:14:27.697 INFO Crucible Version: Crucible Version: 0.0.1
15523 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15524 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15525 rustc: 1.70.0 stable x86_64-unknown-illumos
15526 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15527 Sep 22 23:14:27.697 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15528 Sep 22 23:14:27.697 INFO Using address: 127.0.0.1:41704, task: main
15529 Sep 22 23:14:27.697 INFO [1] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) WaitActive New New ds_transition to WaitActive
15530 Sep 22 23:14:27.697 INFO [1] Transition from New to WaitActive
15531 Sep 22 23:14:27.697 INFO Downstairs has completed Negotiation, task: proc
15532 Sep 22 23:14:27.697 INFO [2] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) WaitActive WaitActive New ds_transition to WaitActive
15533 Sep 22 23:14:27.697 INFO [2] Transition from New to WaitActive
15534 Sep 22 23:14:27.697 INFO Current flush_numbers [0..12]: [0, 0]
15535 The guest has requested activation
15536 Sep 22 23:14:27.697 INFO acc4dcd4-ace9-4660-952a-342aafe19661 active request set
15537 Sep 22 23:14:27.697 INFO Repair listens on 127.0.0.1:0, task: repair
15538 Sep 22 23:14:27.697 INFO [0] received activate with gen 1
15539 Sep 22 23:14:27.697 INFO [0] client got ds_active_rx, promote! session f9138689-ed0d-4ebb-bf00-e5ccee3ed000
15540 Sep 22 23:14:27.697 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57699, task: repair
15541 Sep 22 23:14:27.697 INFO Downstairs has completed Negotiation, task: proc
15542 Sep 22 23:14:27.697 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57699, task: repair
15543 Sep 22 23:14:27.697 INFO [1] received activate with gen 1
15544 Sep 22 23:14:27.697 INFO [1] client got ds_active_rx, promote! session f9138689-ed0d-4ebb-bf00-e5ccee3ed000
15545 Sep 22 23:14:27.697 INFO listening, local_addr: 127.0.0.1:57699, task: repair
15546 Sep 22 23:14:27.698 INFO [2] received activate with gen 1
15547 Sep 22 23:14:27.698 INFO Current flush_numbers [0..12]: [0, 0]
15548 Sep 22 23:14:27.698 INFO [2] client got ds_active_rx, promote! session f9138689-ed0d-4ebb-bf00-e5ccee3ed000
15549 Sep 22 23:14:27.698 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57699, task: repair
15550 Sep 22 23:14:27.698 INFO Downstairs has completed Negotiation, task: proc
15551 Sep 22 23:14:27.698 INFO Using repair address: 127.0.0.1:57699, task: main
15552 Sep 22 23:14:27.698 INFO No SSL acceptor configured, task: main
15553 Sep 22 23:14:27.698 INFO UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } is now active (read-write)
15554 Sep 22 23:14:27.698 INFO [0] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15555 Sep 22 23:14:27.698 INFO [0] Transition from WaitActive to WaitQuorum
15556 Sep 22 23:14:27.698 WARN [0] new RM replaced this: None
15557 Sep 22 23:14:27.698 INFO UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } is now active (read-write)
15558 Sep 22 23:14:27.698 INFO [0] Starts reconcile loop
15559 Sep 22 23:14:27.698 INFO [1] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15560 Sep 22 23:14:27.698 INFO [1] Transition from WaitActive to WaitQuorum
15561 Sep 22 23:14:27.698 WARN [1] new RM replaced this: None
15562 Sep 22 23:14:27.698 INFO UpstairsConnection { upstairs_id: acc4dcd4-ace9-4660-952a-342aafe19661, session_id: f9138689-ed0d-4ebb-bf00-e5ccee3ed000, gen: 1 } is now active (read-write)
15563 Sep 22 23:14:27.698 INFO [1] Starts reconcile loop
15564 Sep 22 23:14:27.698 INFO [2] ce9a8163-4db1-4374-b6d0-604bca1e70ae (7f38a194-f08c-4be4-9249-679cb6202428) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15565 Sep 22 23:14:27.698 INFO [2] Transition from WaitActive to WaitQuorum
15566 Sep 22 23:14:27.698 WARN [2] new RM replaced this: None
15567 Sep 22 23:14:27.698 INFO [2] Starts reconcile loop
15568 Sep 22 23:14:27.698 INFO [0] 127.0.0.1:48094 task reports connection:true
15569 Sep 22 23:14:27.698 INFO current number of open files limit 65536 is already the maximum
15570 Sep 22 23:14:27.698 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae WaitQuorum WaitQuorum WaitQuorum
15571 Sep 22 23:14:27.698 INFO [0]R flush_numbers: [0, 0]
15572 Sep 22 23:14:27.698 INFO [0]R generation: [0, 0]
15573 Sep 22 23:14:27.698 INFO [0]R dirty: [false, false]
15574 Sep 22 23:14:27.698 INFO [1]R flush_numbers: [0, 0]
15575 Sep 22 23:14:27.698 INFO [1]R generation: [0, 0]
15576 Sep 22 23:14:27.698 INFO [1]R dirty: [false, false]
15577 Sep 22 23:14:27.698 INFO [2]R flush_numbers: [0, 0]
15578 Sep 22 23:14:27.698 INFO [2]R generation: [0, 0]
15579 Sep 22 23:14:27.698 INFO [2]R dirty: [false, false]
15580 Sep 22 23:14:27.698 INFO Created new region file "/tmp/downstairs-cvtdcEe4/region.json"
15581 Sep 22 23:14:27.698 INFO Max found gen is 1
15582 Sep 22 23:14:27.698 INFO Generation requested: 1 >= found:1
15583 Sep 22 23:14:27.698 INFO Next flush: 1
15584 Sep 22 23:14:27.698 INFO All extents match
15585 Sep 22 23:14:27.698 INFO No downstairs repair required
15586 Sep 22 23:14:27.698 INFO No initial repair work was required
15587 Sep 22 23:14:27.698 INFO current number of open files limit 65536 is already the maximum
15588 Sep 22 23:14:27.698 INFO Set Downstairs and Upstairs active
15589 Sep 22 23:14:27.698 INFO Opened existing region file "/tmp/downstairs-bF0aQCxJ/region.json"
15590 Sep 22 23:14:27.698 INFO Database read version 1
15591 Sep 22 23:14:27.698 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae is now active with session: 7f38a194-f08c-4be4-9249-679cb6202428
15592 Sep 22 23:14:27.698 INFO Database write version 1
15593 Sep 22 23:14:27.698 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae Set Active after no repair
15594 Sep 22 23:14:27.698 INFO Notify all downstairs, region set compare is done.
15595 Sep 22 23:14:27.698 INFO Set check for repair
15596 Sep 22 23:14:27.698 INFO [1] 127.0.0.1:41031 task reports connection:true
15597 Sep 22 23:14:27.698 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae Active Active Active
15598 Sep 22 23:14:27.698 INFO [0] downstairs client at 127.0.0.1:63965 has UUID c160bb67-b4be-4c85-a112-7924757e1ad4
15599 Sep 22 23:14:27.699 INFO Set check for repair
15600 Sep 22 23:14:27.699 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c160bb67-b4be-4c85-a112-7924757e1ad4, encrypted: true, database_read_version: 1, database_write_version: 1 }
15601 Sep 22 23:14:27.699 INFO [2] 127.0.0.1:40988 task reports connection:true
15602 Sep 22 23:14:27.699 INFO ce9a8163-4db1-4374-b6d0-604bca1e70ae Active Active Active
15603 Sep 22 23:14:27.699 INFO acc4dcd4-ace9-4660-952a-342aafe19661 WaitActive WaitActive WaitActive
15604 Sep 22 23:14:27.699 INFO Set check for repair
15605 Sep 22 23:14:27.699 INFO [0] received reconcile message
15606 Sep 22 23:14:27.699 INFO [1] downstairs client at 127.0.0.1:34981 has UUID af190b5e-a849-4b2d-90bf-b82d85ee2543
15607 Sep 22 23:14:27.699 INFO [0] All repairs completed, exit
15608 Sep 22 23:14:27.699 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: af190b5e-a849-4b2d-90bf-b82d85ee2543, encrypted: true, database_read_version: 1, database_write_version: 1 }
15609 Sep 22 23:14:27.699 INFO [0] Starts cmd_loop
15610 Sep 22 23:14:27.699 INFO acc4dcd4-ace9-4660-952a-342aafe19661 WaitActive WaitActive WaitActive
15611 Sep 22 23:14:27.699 INFO [1] received reconcile message
15612 Sep 22 23:14:27.699 INFO [1] All repairs completed, exit
15613 Sep 22 23:14:27.699 INFO [1] Starts cmd_loop
15614 Sep 22 23:14:27.699 INFO [2] downstairs client at 127.0.0.1:39833 has UUID 12d88dad-09b4-4e8c-bc38-a95145cf057c
15615 Sep 22 23:14:27.699 INFO current number of open files limit 65536 is already the maximum
15616 Sep 22 23:14:27.699 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 12d88dad-09b4-4e8c-bc38-a95145cf057c, encrypted: true, database_read_version: 1, database_write_version: 1 }
15617 Sep 22 23:14:27.699 INFO Opened existing region file "/tmp/downstairs-6CnpwJQF/region.json"
15618 Sep 22 23:14:27.699 INFO Database read version 1
15619 Sep 22 23:14:27.699 INFO Database write version 1
15620 Sep 22 23:14:27.699 INFO [2] received reconcile message
15621 Sep 22 23:14:27.699 INFO acc4dcd4-ace9-4660-952a-342aafe19661 WaitActive WaitActive WaitActive
15622 Sep 22 23:14:27.699 INFO [2] All repairs completed, exit
15623 Sep 22 23:14:27.699 INFO [2] Starts cmd_loop
15624 The guest has finished waiting for activation
15625 Sep 22 23:14:27.699 INFO Current flush_numbers [0..12]: [0, 0]
15626 Sep 22 23:14:27.699 INFO Downstairs has completed Negotiation, task: proc
15627 Sep 22 23:14:27.699 DEBG IO Read 1000 has deps []
15628 Sep 22 23:14:27.699 INFO Current flush_numbers [0..12]: [0, 0]
15629 Sep 22 23:14:27.700 INFO Downstairs has completed Negotiation, task: proc
15630 Sep 22 23:14:27.700 INFO Current flush_numbers [0..12]: [0, 0]
15631 Sep 22 23:14:27.700 INFO Downstairs has completed Negotiation, task: proc
15632 Sep 22 23:14:27.700 DEBG Read :1000 deps:[] res:true
15633 Sep 22 23:14:27.700 INFO [0] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15634 Sep 22 23:14:27.700 INFO [0] Transition from WaitActive to WaitQuorum
15635 Sep 22 23:14:27.700 WARN [0] new RM replaced this: None
15636 Sep 22 23:14:27.700 INFO [0] Starts reconcile loop
15637 Sep 22 23:14:27.700 INFO [1] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15638 Sep 22 23:14:27.701 INFO [1] Transition from WaitActive to WaitQuorum
15639 Sep 22 23:14:27.701 WARN [1] new RM replaced this: None
15640 Sep 22 23:14:27.701 INFO [1] Starts reconcile loop
15641 Sep 22 23:14:27.701 INFO [2] acc4dcd4-ace9-4660-952a-342aafe19661 (f9138689-ed0d-4ebb-bf00-e5ccee3ed000) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
15642 Sep 22 23:14:27.701 INFO [2] Transition from WaitActive to WaitQuorum
15643 Sep 22 23:14:27.701 DEBG Read :1000 deps:[] res:true
15644 Sep 22 23:14:27.701 WARN [2] new RM replaced this: None
15645 Sep 22 23:14:27.701 INFO [2] Starts reconcile loop
15646 Sep 22 23:14:27.701 INFO [0] 127.0.0.1:63965 task reports connection:true
15647 Sep 22 23:14:27.701 INFO acc4dcd4-ace9-4660-952a-342aafe19661 WaitQuorum WaitQuorum WaitQuorum
15648 Sep 22 23:14:27.701 INFO [0]R flush_numbers: [0, 0]
15649 Sep 22 23:14:27.701 INFO [0]R generation: [0, 0]
15650 Sep 22 23:14:27.701 INFO [0]R dirty: [false, false]
15651 Sep 22 23:14:27.701 INFO [1]R flush_numbers: [0, 0]
15652 Sep 22 23:14:27.701 INFO [1]R generation: [0, 0]
15653 Sep 22 23:14:27.701 INFO [1]R dirty: [false, false]
15654 Sep 22 23:14:27.701 INFO [2]R flush_numbers: [0, 0]
15655 Sep 22 23:14:27.701 INFO [2]R generation: [0, 0]
15656 Sep 22 23:14:27.701 INFO [2]R dirty: [false, false]
15657 Sep 22 23:14:27.701 INFO Max found gen is 1
15658 Sep 22 23:14:27.701 INFO Generation requested: 1 >= found:1
15659 Sep 22 23:14:27.701 INFO Next flush: 1
15660 Sep 22 23:14:27.701 INFO All extents match
15661 Sep 22 23:14:27.701 INFO No downstairs repair required
15662 Sep 22 23:14:27.701 INFO No initial repair work was required
15663 Sep 22 23:14:27.701 INFO Set Downstairs and Upstairs active
15664 Sep 22 23:14:27.701 INFO acc4dcd4-ace9-4660-952a-342aafe19661 is now active with session: f9138689-ed0d-4ebb-bf00-e5ccee3ed000
15665 Sep 22 23:14:27.701 INFO acc4dcd4-ace9-4660-952a-342aafe19661 Set Active after no repair
15666 Sep 22 23:14:27.701 INFO Notify all downstairs, region set compare is done.
15667 Sep 22 23:14:27.701 INFO Set check for repair
15668 Sep 22 23:14:27.701 INFO [1] 127.0.0.1:34981 task reports connection:true
15669 Sep 22 23:14:27.701 INFO acc4dcd4-ace9-4660-952a-342aafe19661 Active Active Active
15670 Sep 22 23:14:27.701 INFO Set check for repair
15671 Sep 22 23:14:27.701 DEBG Read :1000 deps:[] res:true
15672 Sep 22 23:14:27.701 INFO [2] 127.0.0.1:39833 task reports connection:true
15673 Sep 22 23:14:27.701 INFO acc4dcd4-ace9-4660-952a-342aafe19661 Active Active Active
15674 Sep 22 23:14:27.701 INFO Set check for repair
15675 Sep 22 23:14:27.701 INFO [0] received reconcile message
15676 Sep 22 23:14:27.701 INFO [0] All repairs completed, exit
15677 Sep 22 23:14:27.701 INFO [0] Starts cmd_loop
15678 Sep 22 23:14:27.701 INFO UUID: 6fdc0eed-5ffb-4659-a839-2533f07ab2b0
15679 Sep 22 23:14:27.701 INFO Blocks per extent:5 Total Extents: 2
15680 Sep 22 23:14:27.701 INFO [1] received reconcile message
15681 Sep 22 23:14:27.702 INFO [1] All repairs completed, exit
15682 Sep 22 23:14:27.702 INFO [1] Starts cmd_loop
15683 Sep 22 23:14:27.702 INFO Crucible Version: Crucible Version: 0.0.1
15684 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15685 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15686 rustc: 1.70.0 stable x86_64-unknown-illumos
15687 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15688 Sep 22 23:14:27.702 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15689 Sep 22 23:14:27.702 INFO Using address: 127.0.0.1:37634, task: main
15690 Sep 22 23:14:27.702 INFO current number of open files limit 65536 is already the maximum
15691 Sep 22 23:14:27.702 INFO [2] received reconcile message
15692 Sep 22 23:14:27.702 INFO Opened existing region file "/tmp/downstairs-sXZCeOZr/region.json"
15693 Sep 22 23:14:27.702 INFO Database read version 1
15694 Sep 22 23:14:27.702 INFO [2] All repairs completed, exit
15695 Sep 22 23:14:27.702 INFO Database write version 1
15696 Sep 22 23:14:27.702 INFO [2] Starts cmd_loop
15697 The guest has finished waiting for activation
15698 Sep 22 23:14:27.702 INFO UUID: 7879f4e7-8bf5-479b-b3e8-105e87c0dd1b
15699 Sep 22 23:14:27.702 INFO Blocks per extent:5 Total Extents: 2
15700 Sep 22 23:14:27.702 INFO Repair listens on 127.0.0.1:0, task: repair
15701 Sep 22 23:14:27.702 INFO Crucible Version: Crucible Version: 0.0.1
15702 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15703 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15704 rustc: 1.70.0 stable x86_64-unknown-illumos
15705 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15706 Sep 22 23:14:27.702 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15707 Sep 22 23:14:27.702 INFO Using address: 127.0.0.1:43456, task: main
15708 Sep 22 23:14:27.702 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41491, task: repair
15709 Sep 22 23:14:27.702 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41491, task: repair
15710 Sep 22 23:14:27.702 INFO listening, local_addr: 127.0.0.1:41491, task: repair
15711 Sep 22 23:14:27.702 DEBG IO Read 1000 has deps []
15712 Sep 22 23:14:27.702 INFO Repair listens on 127.0.0.1:0, task: repair
15713 Sep 22 23:14:27.702 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41491, task: repair
15714 Sep 22 23:14:27.702 INFO Using repair address: 127.0.0.1:41491, task: main
15715 Sep 22 23:14:27.702 INFO No SSL acceptor configured, task: main
15716 Sep 22 23:14:27.702 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56401, task: repair
15717 Sep 22 23:14:27.702 DEBG [0] Read AckReady 1000, : downstairs
15718 Sep 22 23:14:27.702 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56401, task: repair
15719 Sep 22 23:14:27.703 INFO listening, local_addr: 127.0.0.1:56401, task: repair
15720 Sep 22 23:14:27.703 INFO current number of open files limit 65536 is already the maximum
15721 Sep 22 23:14:27.703 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56401, task: repair
15722 Sep 22 23:14:27.703 DEBG [1] Read already AckReady 1000, : downstairs
15723 Sep 22 23:14:27.703 INFO Using repair address: 127.0.0.1:56401, task: main
15724 Sep 22 23:14:27.703 INFO Created new region file "/tmp/downstairs-AYTaLROs/region.json"
15725 Sep 22 23:14:27.703 INFO No SSL acceptor configured, task: main
15726 Sep 22 23:14:27.703 DEBG [2] Read already AckReady 1000, : downstairs
15727 Sep 22 23:14:27.703 DEBG up_ds_listen was notified
15728 Sep 22 23:14:27.703 DEBG up_ds_listen process 1000
15729 Sep 22 23:14:27.703 DEBG [A] ack job 1000:1, : downstairs
15730 Sep 22 23:14:27.703 DEBG Read :1000 deps:[] res:true
15731 Sep 22 23:14:27.704 DEBG up_ds_listen checked 1 jobs, back to waiting
15732 Sep 22 23:14:27.704 INFO Upstairs starts
15733 Sep 22 23:14:27.704 INFO Crucible Version: BuildInfo {
15734 version: "0.0.1",
15735 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15736 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15737 git_branch: "main",
15738 rustc_semver: "1.70.0",
15739 rustc_channel: "stable",
15740 rustc_host_triple: "x86_64-unknown-illumos",
15741 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15742 cargo_triple: "x86_64-unknown-illumos",
15743 debug: true,
15744 opt_level: 0,
15745 }
15746 Sep 22 23:14:27.704 INFO Upstairs <-> Downstairs Message Version: 4
15747 Sep 22 23:14:27.704 INFO Crucible stats registered with UUID: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c
15748 Sep 22 23:14:27.704 INFO Crucible 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c has session id: ea05610f-f165-4ac5-bd8c-225d17886309
15749 Sep 22 23:14:27.704 DEBG Read :1000 deps:[] res:true
15750 Sep 22 23:14:27.704 INFO current number of open files limit 65536 is already the maximum
15751 Sep 22 23:14:27.704 INFO Opened existing region file "/tmp/downstairs-cvtdcEe4/region.json"
15752 Sep 22 23:14:27.704 INFO Database read version 1
15753 Sep 22 23:14:27.704 INFO Database write version 1
15754 Sep 22 23:14:27.704 INFO listening on 127.0.0.1:0, task: main
15755 Sep 22 23:14:27.704 INFO UUID: d2794c71-5659-445d-8650-5f7a1461a2ed
15756 Sep 22 23:14:27.704 INFO Blocks per extent:5 Total Extents: 2
15757 Sep 22 23:14:27.704 INFO listening on 127.0.0.1:0, task: main
15758 Sep 22 23:14:27.704 DEBG Read :1000 deps:[] res:true
15759 Sep 22 23:14:27.704 INFO listening on 127.0.0.1:0, task: main
15760 Sep 22 23:14:27.704 INFO [0] connecting to 127.0.0.1:41733, looper: 0
15761 Sep 22 23:14:27.704 INFO Crucible Version: Crucible Version: 0.0.1
15762 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15763 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15764 rustc: 1.70.0 stable x86_64-unknown-illumos
15765 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15766 Sep 22 23:14:27.704 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15767 Sep 22 23:14:27.704 INFO Using address: 127.0.0.1:33459, task: main
15768 Sep 22 23:14:27.705 INFO [1] connecting to 127.0.0.1:49579, looper: 1
15769 Sep 22 23:14:27.705 INFO [2] connecting to 127.0.0.1:43456, looper: 2
15770 Sep 22 23:14:27.705 INFO Repair listens on 127.0.0.1:0, task: repair
15771 Sep 22 23:14:27.705 INFO up_listen starts, task: up_listen
15772 Sep 22 23:14:27.705 INFO Wait for all three downstairs to come online
15773 Sep 22 23:14:27.705 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35294, task: repair
15774 Sep 22 23:14:27.705 INFO Flush timeout: 0.5
15775 Sep 22 23:14:27.705 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35294, task: repair
15776 Sep 22 23:14:27.705 INFO Scrub check for 1965c3d3-3628-4d17-8184-b7c7fb3d1c3b
15777 Sep 22 23:14:27.705 INFO listening, local_addr: 127.0.0.1:35294, task: repair
15778 Sep 22 23:14:27.705 INFO Scrub for 1965c3d3-3628-4d17-8184-b7c7fb3d1c3b begins
15779 Sep 22 23:14:27.705 INFO Scrub with total_size:5120 block_size:512
15780 Sep 22 23:14:27.705 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
15781 Sep 22 23:14:27.705 INFO Adjust block_count to 10 at offset 0
15782 Sep 22 23:14:27.705 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35294, task: repair
15783 Sep 22 23:14:27.705 INFO Using repair address: 127.0.0.1:35294, task: main
15784 Sep 22 23:14:27.705 INFO No SSL acceptor configured, task: main
15785 Sep 22 23:14:27.705 INFO accepted connection from 127.0.0.1:56638, task: main
15786 Sep 22 23:14:27.705 INFO accepted connection from 127.0.0.1:51008, task: main
15787 Sep 22 23:14:27.705 INFO accepted connection from 127.0.0.1:44546, task: main
15788 Sep 22 23:14:27.705 DEBG [0] Read AckReady 1000, : downstairs
15789 Sep 22 23:14:27.705 INFO [0] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c looper connected, looper: 0
15790 Sep 22 23:14:27.706 INFO [0] Proc runs for 127.0.0.1:41733 in state New
15791 Sep 22 23:14:27.706 DEBG [1] Read already AckReady 1000, : downstairs
15792 Sep 22 23:14:27.706 INFO [2] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c looper connected, looper: 2
15793 Sep 22 23:14:27.706 INFO [2] Proc runs for 127.0.0.1:43456 in state New
15794 Sep 22 23:14:27.706 INFO current number of open files limit 65536 is already the maximum
15795 Sep 22 23:14:27.706 INFO [1] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c looper connected, looper: 1
15796 Sep 22 23:14:27.706 INFO [1] Proc runs for 127.0.0.1:49579 in state New
15797 Sep 22 23:14:27.706 DEBG [2] Read already AckReady 1000, : downstairs
15798 Sep 22 23:14:27.706 INFO Created new region file "/tmp/downstairs-dlM6zFZd/region.json"
15799 Sep 22 23:14:27.706 DEBG up_ds_listen was notified
15800 Sep 22 23:14:27.706 DEBG up_ds_listen process 1000
15801 Sep 22 23:14:27.706 DEBG [A] ack job 1000:1, : downstairs
15802 Sep 22 23:14:27.706 INFO Connection request from 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c with version 4, task: proc
15803 Sep 22 23:14:27.706 INFO upstairs UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } connected, version 4, task: proc
15804 Sep 22 23:14:27.706 INFO Connection request from 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c with version 4, task: proc
15805 Sep 22 23:14:27.706 INFO upstairs UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } connected, version 4, task: proc
15806 Sep 22 23:14:27.706 INFO Connection request from 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c with version 4, task: proc
15807 Sep 22 23:14:27.706 DEBG up_ds_listen checked 1 jobs, back to waiting
15808 Sep 22 23:14:27.706 INFO upstairs UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } connected, version 4, task: proc
15809 The guest has requested activation
15810 Sep 22 23:14:27.707 INFO [0] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) New New New ds_transition to WaitActive
15811 Sep 22 23:14:27.707 INFO [0] Transition from New to WaitActive
15812 Sep 22 23:14:27.707 INFO UUID: f4dd1f9e-c133-4738-8e4e-8aad0cee7c95
15813 Sep 22 23:14:27.707 INFO Blocks per extent:5 Total Extents: 2
15814 Sep 22 23:14:27.707 INFO current number of open files limit 65536 is already the maximum
15815 Sep 22 23:14:27.707 INFO Opened existing region file "/tmp/downstairs-AYTaLROs/region.json"
15816 Sep 22 23:14:27.707 INFO [2] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) WaitActive New New ds_transition to WaitActive
15817 Sep 22 23:14:27.707 INFO Database read version 1
15818 Sep 22 23:14:27.707 INFO Database write version 1
15819 Sep 22 23:14:27.707 INFO [2] Transition from New to WaitActive
15820 Sep 22 23:14:27.707 INFO Crucible Version: Crucible Version: 0.0.1
15821 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15822 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15823 rustc: 1.70.0 stable x86_64-unknown-illumos
15824 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15825 Sep 22 23:14:27.707 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15826 Sep 22 23:14:27.707 INFO Using address: 127.0.0.1:58860, task: main
15827 Sep 22 23:14:27.707 INFO [1] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) WaitActive New WaitActive ds_transition to WaitActive
15828 Sep 22 23:14:27.707 INFO [1] Transition from New to WaitActive
15829 Sep 22 23:14:27.707 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c active request set
15830 Sep 22 23:14:27.707 INFO [0] received activate with gen 1
15831 Sep 22 23:14:27.707 INFO [0] client got ds_active_rx, promote! session d464775d-265a-4fbf-a35a-512bcc66cbcc
15832 Sep 22 23:14:27.707 INFO [1] received activate with gen 1
15833 Sep 22 23:14:27.707 INFO [1] client got ds_active_rx, promote! session d464775d-265a-4fbf-a35a-512bcc66cbcc
15834 Sep 22 23:14:27.707 INFO Repair listens on 127.0.0.1:0, task: repair
15835 Sep 22 23:14:27.707 INFO [2] received activate with gen 1
15836 Sep 22 23:14:27.707 DEBG IO Write 1001 has deps [JobId(1000)]
15837 Sep 22 23:14:27.707 INFO [2] client got ds_active_rx, promote! session d464775d-265a-4fbf-a35a-512bcc66cbcc
15838 Sep 22 23:14:27.707 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41513, task: repair
15839 Sep 22 23:14:27.707 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41513, task: repair
15840 Sep 22 23:14:27.707 INFO listening, local_addr: 127.0.0.1:41513, task: repair
15841 Sep 22 23:14:27.707 INFO UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } is now active (read-write)
15842 Sep 22 23:14:27.708 INFO UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } is now active (read-write)
15843 Sep 22 23:14:27.708 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41513, task: repair
15844 Sep 22 23:14:27.708 INFO Using repair address: 127.0.0.1:41513, task: main
15845 Sep 22 23:14:27.708 INFO No SSL acceptor configured, task: main
15846 Sep 22 23:14:27.708 INFO UpstairsConnection { upstairs_id: 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c, session_id: d464775d-265a-4fbf-a35a-512bcc66cbcc, gen: 1 } is now active (read-write)
15847 Sep 22 23:14:27.708 INFO [0] downstairs client at 127.0.0.1:41733 has UUID e624c1bd-6ca3-47e5-9bce-8c13d262fb0f
15848 Sep 22 23:14:27.708 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e624c1bd-6ca3-47e5-9bce-8c13d262fb0f, encrypted: true, database_read_version: 1, database_write_version: 1 }
15849 Sep 22 23:14:27.708 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c WaitActive WaitActive WaitActive
15850 Sep 22 23:14:27.708 INFO [2] downstairs client at 127.0.0.1:43456 has UUID 7879f4e7-8bf5-479b-b3e8-105e87c0dd1b
15851 Sep 22 23:14:27.708 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7879f4e7-8bf5-479b-b3e8-105e87c0dd1b, encrypted: true, database_read_version: 1, database_write_version: 1 }
15852 Sep 22 23:14:27.708 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c WaitActive WaitActive WaitActive
15853 Sep 22 23:14:27.708 INFO [1] downstairs client at 127.0.0.1:49579 has UUID c415f140-2b86-4861-a661-215ad49ee0b4
15854 Sep 22 23:14:27.708 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c415f140-2b86-4861-a661-215ad49ee0b4, encrypted: true, database_read_version: 1, database_write_version: 1 }
15855 Sep 22 23:14:27.709 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c WaitActive WaitActive WaitActive
15856 Sep 22 23:14:27.709 INFO current number of open files limit 65536 is already the maximum
15857 Sep 22 23:14:27.709 INFO Created new region file "/tmp/downstairs-wC55dsN5/region.json"
15858 Sep 22 23:14:27.709 INFO UUID: b556d7b2-2d5c-488b-aa20-a0b62e7ef0f0
15859 Sep 22 23:14:27.709 INFO Current flush_numbers [0..12]: [0, 0]
15860 Sep 22 23:14:27.709 INFO Blocks per extent:5 Total Extents: 2
15861 Sep 22 23:14:27.709 INFO Crucible Version: Crucible Version: 0.0.1
15862 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
15863 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
15864 rustc: 1.70.0 stable x86_64-unknown-illumos
15865 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
15866 Sep 22 23:14:27.709 INFO Upstairs <-> Downstairs Message Version: 4, task: main
15867 Sep 22 23:14:27.709 INFO Using address: 127.0.0.1:62359, task: main
15868 Sep 22 23:14:27.709 INFO Downstairs has completed Negotiation, task: proc
15869 Sep 22 23:14:27.709 DEBG IO Write 1001 has deps [JobId(1000)]
15870 Sep 22 23:14:27.709 INFO Current flush_numbers [0..12]: [0, 0]
15871 Sep 22 23:14:27.709 INFO Repair listens on 127.0.0.1:0, task: repair
15872 Sep 22 23:14:27.709 DEBG up_ds_listen was notified
15873 Sep 22 23:14:27.709 DEBG up_ds_listen process 1001
15874 Sep 22 23:14:27.709 DEBG [A] ack job 1001:2, : downstairs
15875 Sep 22 23:14:27.709 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65391, task: repair
15876 Sep 22 23:14:27.709 DEBG up_ds_listen checked 1 jobs, back to waiting
15877 Sep 22 23:14:27.709 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65391, task: repair
15878 Sep 22 23:14:27.709 INFO listening, local_addr: 127.0.0.1:65391, task: repair
15879 Sep 22 23:14:27.709 INFO Downstairs has completed Negotiation, task: proc
15880 Sep 22 23:14:27.710 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65391, task: repair
15881 Sep 22 23:14:27.710 INFO Using repair address: 127.0.0.1:65391, task: main
15882 Sep 22 23:14:27.710 INFO No SSL acceptor configured, task: main
15883 Sep 22 23:14:27.710 INFO Current flush_numbers [0..12]: [0, 0]
15884 Sep 22 23:14:27.710 INFO Downstairs has completed Negotiation, task: proc
15885 Sep 22 23:14:27.710 INFO [0] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
15886 Sep 22 23:14:27.710 INFO [0] Transition from WaitActive to WaitQuorum
15887 Sep 22 23:14:27.710 WARN [0] new RM replaced this: None
15888 Sep 22 23:14:27.710 INFO [0] Starts reconcile loop
15889 Sep 22 23:14:27.710 INFO [2] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
15890 Sep 22 23:14:27.710 INFO [2] Transition from WaitActive to WaitQuorum
15891 Sep 22 23:14:27.710 WARN [2] new RM replaced this: None
15892 Sep 22 23:14:27.710 INFO [2] Starts reconcile loop
15893 Sep 22 23:14:27.710 INFO Upstairs starts
15894 Sep 22 23:14:27.710 INFO [1] 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c (d464775d-265a-4fbf-a35a-512bcc66cbcc) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
15895 Sep 22 23:14:27.710 INFO Crucible Version: BuildInfo {
15896 version: "0.0.1",
15897 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
15898 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
15899 git_branch: "main",
15900 rustc_semver: "1.70.0",
15901 rustc_channel: "stable",
15902 rustc_host_triple: "x86_64-unknown-illumos",
15903 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
15904 cargo_triple: "x86_64-unknown-illumos",
15905 debug: true,
15906 opt_level: 0,
15907 }
15908 Sep 22 23:14:27.710 INFO [1] Transition from WaitActive to WaitQuorum
15909 Sep 22 23:14:27.710 INFO Upstairs <-> Downstairs Message Version: 4
15910 Sep 22 23:14:27.710 WARN [1] new RM replaced this: None
15911 Sep 22 23:14:27.711 INFO [1] Starts reconcile loop
15912 Sep 22 23:14:27.711 INFO Crucible stats registered with UUID: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728
15913 Sep 22 23:14:27.711 INFO current number of open files limit 65536 is already the maximum
15914 Sep 22 23:14:27.711 INFO Crucible adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 has session id: 28156900-b93f-495a-b8e4-f44cac9dd065
15915 Sep 22 23:14:27.711 INFO Opened existing region file "/tmp/downstairs-dlM6zFZd/region.json"
15916 Sep 22 23:14:27.711 INFO [0] 127.0.0.1:41733 task reports connection:true
15917 Sep 22 23:14:27.711 INFO Database read version 1
15918 Sep 22 23:14:27.711 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c WaitQuorum WaitQuorum WaitQuorum
15919 Sep 22 23:14:27.711 INFO Database write version 1
15920 Sep 22 23:14:27.711 INFO [0]R flush_numbers: [0, 0]
15921 Sep 22 23:14:27.711 INFO [0]R generation: [0, 0]
15922 Sep 22 23:14:27.711 INFO [0]R dirty: [false, false]
15923 Sep 22 23:14:27.711 INFO [1]R flush_numbers: [0, 0]
15924 Sep 22 23:14:27.711 INFO [1]R generation: [0, 0]
15925 Sep 22 23:14:27.711 INFO [1]R dirty: [false, false]
15926 Sep 22 23:14:27.711 INFO [2]R flush_numbers: [0, 0]
15927 Sep 22 23:14:27.711 INFO [2]R generation: [0, 0]
15928 Sep 22 23:14:27.711 INFO [2]R dirty: [false, false]
15929 Sep 22 23:14:27.711 INFO Max found gen is 1
15930 Sep 22 23:14:27.711 INFO Generation requested: 1 >= found:1
15931 Sep 22 23:14:27.711 INFO Next flush: 1
15932 Sep 22 23:14:27.711 INFO All extents match
15933 Sep 22 23:14:27.711 INFO No downstairs repair required
15934 Sep 22 23:14:27.711 INFO No initial repair work was required
15935 Sep 22 23:14:27.711 INFO Set Downstairs and Upstairs active
15936 Sep 22 23:14:27.711 INFO listening on 127.0.0.1:0, task: main
15937 Sep 22 23:14:27.711 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c is now active with session: d464775d-265a-4fbf-a35a-512bcc66cbcc
15938 Sep 22 23:14:27.711 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c Set Active after no repair
15939 Sep 22 23:14:27.711 INFO Notify all downstairs, region set compare is done.
15940 Sep 22 23:14:27.711 INFO listening on 127.0.0.1:0, task: main
15941 Sep 22 23:14:27.711 INFO Set check for repair
15942 Sep 22 23:14:27.711 INFO listening on 127.0.0.1:0, task: main
15943 Sep 22 23:14:27.711 INFO [2] 127.0.0.1:43456 task reports connection:true
15944 Sep 22 23:14:27.711 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c Active Active Active
15945 Sep 22 23:14:27.711 INFO [0] connecting to 127.0.0.1:59250, looper: 0
15946 Sep 22 23:14:27.711 INFO Set check for repair
15947 Sep 22 23:14:27.711 INFO [1] 127.0.0.1:49579 task reports connection:true
15948 Sep 22 23:14:27.711 INFO 07dcd1dc-bd26-48c4-9d8c-b1cefc78367c Active Active Active
15949 Sep 22 23:14:27.711 INFO Set check for repair
15950 Sep 22 23:14:27.711 INFO [1] connecting to 127.0.0.1:37634, looper: 1
15951 Sep 22 23:14:27.711 INFO [0] received reconcile message
15952 Sep 22 23:14:27.711 INFO [0] All repairs completed, exit
15953 Sep 22 23:14:27.711 INFO [0] Starts cmd_loop
15954 Sep 22 23:14:27.711 INFO [2] connecting to 127.0.0.1:62359, looper: 2
15955 Sep 22 23:14:27.711 INFO [1] received reconcile message
15956 Sep 22 23:14:27.711 INFO [1] All repairs completed, exit
15957 Sep 22 23:14:27.712 INFO up_listen starts, task: up_listen
15958 Sep 22 23:14:27.712 INFO Wait for all three downstairs to come online
15959 Sep 22 23:14:27.712 INFO Flush timeout: 0.5
15960 Sep 22 23:14:27.712 INFO [1] Starts cmd_loop
15961 Sep 22 23:14:27.712 INFO [2] received reconcile message
15962 Sep 22 23:14:27.712 INFO [2] All repairs completed, exit
15963 Sep 22 23:14:27.712 INFO [2] Starts cmd_loop
15964 Sep 22 23:14:27.712 DEBG up_ds_listen was notified
15965 The guest has finished waiting for activation
15966 Sep 22 23:14:27.712 DEBG up_ds_listen process 1001
15967 Sep 22 23:14:27.712 INFO accepted connection from 127.0.0.1:47782, task: main
15968 Sep 22 23:14:27.712 DEBG [A] ack job 1001:2, : downstairs
15969 Sep 22 23:14:27.712 DEBG up_ds_listen checked 1 jobs, back to waiting
15970 Sep 22 23:14:27.712 INFO accepted connection from 127.0.0.1:51530, task: main
15971 Sep 22 23:14:27.712 INFO accepted connection from 127.0.0.1:64754, task: main
15972 Sep 22 23:14:27.712 INFO Scrub at offset 10/10 sp:10
15973 Sep 22 23:14:27.712 INFO [0] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 looper connected, looper: 0
15974 Sep 22 23:14:27.712 INFO [0] Proc runs for 127.0.0.1:59250 in state New
15975 Sep 22 23:14:27.712 INFO current number of open files limit 65536 is already the maximum
15976 Sep 22 23:14:27.712 INFO Opened existing region file "/tmp/downstairs-wC55dsN5/region.json"
15977 Sep 22 23:14:27.712 INFO Database read version 1
15978 Sep 22 23:14:27.712 INFO Database write version 1
15979 Sep 22 23:14:27.712 INFO [1] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 looper connected, looper: 1
15980 Sep 22 23:14:27.712 DEBG IO Read 1000 has deps []
15981 Sep 22 23:14:27.712 INFO [1] Proc runs for 127.0.0.1:37634 in state New
15982 Sep 22 23:14:27.712 INFO [2] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 looper connected, looper: 2
15983 Sep 22 23:14:27.712 INFO [2] Proc runs for 127.0.0.1:62359 in state New
15984 Sep 22 23:14:27.713 INFO Connection request from adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 with version 4, task: proc
15985 Sep 22 23:14:27.713 INFO upstairs UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } connected, version 4, task: proc
15986 Sep 22 23:14:27.713 INFO Connection request from adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 with version 4, task: proc
15987 Sep 22 23:14:27.713 INFO upstairs UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } connected, version 4, task: proc
15988 Sep 22 23:14:27.713 DEBG Write :1001 deps:[JobId(1000)] res:true
15989 Sep 22 23:14:27.713 INFO Connection request from adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 with version 4, task: proc
15990 Sep 22 23:14:27.713 INFO upstairs UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } connected, version 4, task: proc
15991 The guest has requested activation
15992 Sep 22 23:14:27.713 DEBG Read :1000 deps:[] res:true
15993 Sep 22 23:14:27.713 INFO Scrub 1965c3d3-3628-4d17-8184-b7c7fb3d1c3b done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
15994 Sep 22 23:14:27.713 INFO UUID: a9ddcb17-2090-44c0-b9dd-e7df3a8e01e7
15995 Sep 22 23:14:27.713 INFO Blocks per extent:5 Total Extents: 2
15996 Sep 22 23:14:27.713 INFO [0] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) New New New ds_transition to WaitActive
15997 Sep 22 23:14:27.713 INFO [0] Transition from New to WaitActive
15998 Sep 22 23:14:27.713 DEBG IO Flush 1002 has deps [JobId(1001), JobId(1000)]
15999 Sep 22 23:14:27.713 INFO Crucible Version: Crucible Version: 0.0.1
16000 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16001 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16002 rustc: 1.70.0 stable x86_64-unknown-illumos
16003 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16004 Sep 22 23:14:27.713 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16005 Sep 22 23:14:27.713 INFO Using address: 127.0.0.1:60290, task: main
16006 Sep 22 23:14:27.713 INFO [1] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) WaitActive New New ds_transition to WaitActive
16007 Sep 22 23:14:27.713 INFO [1] Transition from New to WaitActive
16008 Sep 22 23:14:27.713 INFO [2] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) WaitActive WaitActive New ds_transition to WaitActive
16009 Sep 22 23:14:27.713 INFO [2] Transition from New to WaitActive
16010 Sep 22 23:14:27.714 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 active request set
16011 Sep 22 23:14:27.714 INFO [0] received activate with gen 1
16012 Sep 22 23:14:27.714 INFO [0] client got ds_active_rx, promote! session 2dd2b5b0-82b1-4771-854f-f648a24d146f
16013 Sep 22 23:14:27.714 INFO Repair listens on 127.0.0.1:0, task: repair
16014 Sep 22 23:14:27.714 DEBG Read :1000 deps:[] res:true
16015 Sep 22 23:14:27.714 INFO [1] received activate with gen 1
16016 Sep 22 23:14:27.714 INFO [1] client got ds_active_rx, promote! session 2dd2b5b0-82b1-4771-854f-f648a24d146f
16017 Sep 22 23:14:27.714 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63842, task: repair
16018 Sep 22 23:14:27.714 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63842, task: repair
16019 Sep 22 23:14:27.714 INFO [2] received activate with gen 1
16020 Sep 22 23:14:27.714 DEBG Write :1001 deps:[JobId(1000)] res:true
16021 Sep 22 23:14:27.714 INFO [2] client got ds_active_rx, promote! session 2dd2b5b0-82b1-4771-854f-f648a24d146f
16022 Sep 22 23:14:27.714 INFO listening, local_addr: 127.0.0.1:63842, task: repair
16023 Sep 22 23:14:27.714 INFO UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } is now active (read-write)
16024 Sep 22 23:14:27.714 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63842, task: repair
16025 Sep 22 23:14:27.714 INFO Using repair address: 127.0.0.1:63842, task: main
16026 Sep 22 23:14:27.714 DEBG Read :1000 deps:[] res:true
16027 Sep 22 23:14:27.714 INFO UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } is now active (read-write)
16028 Sep 22 23:14:27.714 INFO No SSL acceptor configured, task: main
16029 Sep 22 23:14:27.714 INFO UUID: e9a979b4-d43f-404b-9e3f-1f402040beec
16030 Sep 22 23:14:27.714 INFO Blocks per extent:5 Total Extents: 2
16031 Sep 22 23:14:27.714 INFO UpstairsConnection { upstairs_id: adfa7dbe-ca19-48c5-8ab5-25a64ef3a728, session_id: 2dd2b5b0-82b1-4771-854f-f648a24d146f, gen: 1 } is now active (read-write)
16032 Sep 22 23:14:27.714 INFO Crucible Version: Crucible Version: 0.0.1
16033 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16034 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16035 rustc: 1.70.0 stable x86_64-unknown-illumos
16036 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16037 Sep 22 23:14:27.714 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16038 Sep 22 23:14:27.714 INFO Using address: 127.0.0.1:37936, task: main
16039 Sep 22 23:14:27.715 INFO Repair listens on 127.0.0.1:0, task: repair
16040 Sep 22 23:14:27.715 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60163, task: repair
16041 Sep 22 23:14:27.715 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60163, task: repair
16042 Sep 22 23:14:27.715 INFO listening, local_addr: 127.0.0.1:60163, task: repair
16043 Sep 22 23:14:27.715 INFO [0] downstairs client at 127.0.0.1:59250 has UUID d7e92243-263d-4c5e-b8c0-b3bf2546a302
16044 Sep 22 23:14:27.715 DEBG Write :1001 deps:[JobId(1000)] res:true
16045 Sep 22 23:14:27.715 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d7e92243-263d-4c5e-b8c0-b3bf2546a302, encrypted: true, database_read_version: 1, database_write_version: 1 }
16046 Sep 22 23:14:27.715 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60163, task: repair
16047 Sep 22 23:14:27.715 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 WaitActive WaitActive WaitActive
16048 Sep 22 23:14:27.715 INFO Using repair address: 127.0.0.1:60163, task: main
16049 Sep 22 23:14:27.715 INFO No SSL acceptor configured, task: main
16050 Sep 22 23:14:27.715 INFO [1] downstairs client at 127.0.0.1:37634 has UUID 6fdc0eed-5ffb-4659-a839-2533f07ab2b0
16051 Sep 22 23:14:27.715 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6fdc0eed-5ffb-4659-a839-2533f07ab2b0, encrypted: true, database_read_version: 1, database_write_version: 1 }
16052 Sep 22 23:14:27.715 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 WaitActive WaitActive WaitActive
16053 Sep 22 23:14:27.715 INFO Upstairs starts
16054 Sep 22 23:14:27.715 INFO Crucible Version: BuildInfo {
16055 version: "0.0.1",
16056 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16057 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16058 git_branch: "main",
16059 rustc_semver: "1.70.0",
16060 rustc_channel: "stable",
16061 rustc_host_triple: "x86_64-unknown-illumos",
16062 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16063 cargo_triple: "x86_64-unknown-illumos",
16064 debug: true,
16065 opt_level: 0,
16066 }
16067 Sep 22 23:14:27.715 INFO [2] downstairs client at 127.0.0.1:62359 has UUID b556d7b2-2d5c-488b-aa20-a0b62e7ef0f0
16068 Sep 22 23:14:27.715 INFO Upstairs <-> Downstairs Message Version: 4
16069 Sep 22 23:14:27.715 DEBG IO Read 1002 has deps [JobId(1001)]
16070 Sep 22 23:14:27.715 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b556d7b2-2d5c-488b-aa20-a0b62e7ef0f0, encrypted: true, database_read_version: 1, database_write_version: 1 }
16071 Sep 22 23:14:27.715 INFO Crucible stats registered with UUID: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7
16072 Sep 22 23:14:27.715 INFO Crucible aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 has session id: c6d467f8-efa1-4cc2-8786-807418938ca1
16073 Sep 22 23:14:27.715 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 WaitActive WaitActive WaitActive
16074 Sep 22 23:14:27.715 DEBG [0] Read AckReady 1000, : downstairs
16075 Sep 22 23:14:27.715 INFO Current flush_numbers [0..12]: [0, 0]
16076 Sep 22 23:14:27.715 INFO listening on 127.0.0.1:0, task: main
16077 Sep 22 23:14:27.715 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16078 Sep 22 23:14:27.715 INFO listening on 127.0.0.1:0, task: main
16079 Sep 22 23:14:27.716 DEBG [2] Read already AckReady 1000, : downstairs
16080 Sep 22 23:14:27.716 INFO listening on 127.0.0.1:0, task: main
16081 Sep 22 23:14:27.716 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16082 Sep 22 23:14:27.716 INFO [0] connecting to 127.0.0.1:45743, looper: 0
16083 Sep 22 23:14:27.716 INFO Downstairs has completed Negotiation, task: proc
16084 Sep 22 23:14:27.716 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
16085 Sep 22 23:14:27.716 DEBG [1] Read already AckReady 1000, : downstairs
16086 Sep 22 23:14:27.716 DEBG up_ds_listen was notified
16087 Sep 22 23:14:27.716 INFO [1] connecting to 127.0.0.1:33459, looper: 1
16088 Sep 22 23:14:27.716 DEBG up_ds_listen process 1000
16089 Sep 22 23:14:27.716 DEBG [A] ack job 1000:1, : downstairs
16090 Sep 22 23:14:27.716 INFO Current flush_numbers [0..12]: [0, 0]
16091 Sep 22 23:14:27.716 INFO [2] connecting to 127.0.0.1:60290, looper: 2
16092 Sep 22 23:14:27.716 INFO Upstairs starts
16093 Sep 22 23:14:27.716 DEBG Read :1002 deps:[JobId(1001)] res:true
16094 Sep 22 23:14:27.716 INFO Crucible Version: BuildInfo {
16095 version: "0.0.1",
16096 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16097 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16098 git_branch: "main",
16099 rustc_semver: "1.70.0",
16100 rustc_channel: "stable",
16101 rustc_host_triple: "x86_64-unknown-illumos",
16102 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16103 cargo_triple: "x86_64-unknown-illumos",
16104 debug: true,
16105 opt_level: 0,
16106 }
16107 Sep 22 23:14:27.716 INFO up_listen starts, task: up_listen
16108 Sep 22 23:14:27.716 INFO Upstairs <-> Downstairs Message Version: 4
16109 Sep 22 23:14:27.716 INFO Wait for all three downstairs to come online
16110 Sep 22 23:14:27.716 INFO Downstairs has completed Negotiation, task: proc
16111 Sep 22 23:14:27.716 INFO Flush timeout: 0.5
16112 Sep 22 23:14:27.716 DEBG up_ds_listen was notified
16113 Sep 22 23:14:27.716 INFO Crucible stats registered with UUID: f134b04d-a090-4ba5-ab19-3a0aa121e3dd
16114 Sep 22 23:14:27.716 DEBG up_ds_listen process 1002
16115 Sep 22 23:14:27.716 INFO Crucible f134b04d-a090-4ba5-ab19-3a0aa121e3dd has session id: a7c9e24d-671e-41c9-86fd-611b200865be
16116 Sep 22 23:14:27.716 DEBG [A] ack job 1002:3, : downstairs
16117 Sep 22 23:14:27.716 DEBG up_ds_listen checked 1 jobs, back to waiting
16118 Sep 22 23:14:27.716 DEBG [rc] retire 1002 clears [JobId(1000), JobId(1001), JobId(1002)], : downstairs
16119 Sep 22 23:14:27.716 DEBG up_ds_listen checked 1 jobs, back to waiting
16120 Sep 22 23:14:27.716 INFO accepted connection from 127.0.0.1:60164, task: main
16121 Sep 22 23:14:27.716 INFO Current flush_numbers [0..12]: [0, 0]
16122 Sep 22 23:14:27.716 DEBG Read :1002 deps:[JobId(1001)] res:true
16123 Sep 22 23:14:27.716 INFO accepted connection from 127.0.0.1:54301, task: main
16124 Sep 22 23:14:27.717 INFO listening on 127.0.0.1:0, task: main
16125 Sep 22 23:14:27.717 INFO listening on 127.0.0.1:0, task: main
16126 Sep 22 23:14:27.717 INFO [0] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 looper connected, looper: 0
16127 Sep 22 23:14:27.717 INFO [0] Proc runs for 127.0.0.1:45743 in state New
16128 Sep 22 23:14:27.717 INFO listening on 127.0.0.1:0, task: main
16129 Sep 22 23:14:27.717 INFO Downstairs has completed Negotiation, task: proc
16130 Sep 22 23:14:27.717 INFO [0] connecting to 127.0.0.1:41704, looper: 0
16131 Sep 22 23:14:27.717 INFO accepted connection from 127.0.0.1:59457, task: main
16132 Sep 22 23:14:27.717 INFO [1] connecting to 127.0.0.1:58860, looper: 1
16133 Sep 22 23:14:27.717 INFO [0] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16134 Sep 22 23:14:27.717 INFO [1] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 looper connected, looper: 1
16135 Sep 22 23:14:27.717 INFO [0] Transition from WaitActive to WaitQuorum
16136 Sep 22 23:14:27.717 WARN [0] new RM replaced this: None
16137 Sep 22 23:14:27.717 DEBG Read :1002 deps:[JobId(1001)] res:true
16138 Sep 22 23:14:27.717 INFO [1] Proc runs for 127.0.0.1:33459 in state New
16139 Sep 22 23:14:27.717 INFO [0] Starts reconcile loop
16140 Sep 22 23:14:27.717 INFO [2] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 looper connected, looper: 2
16141 Sep 22 23:14:27.717 INFO [2] connecting to 127.0.0.1:37936, looper: 2
16142 Sep 22 23:14:27.717 INFO [1] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16143 Sep 22 23:14:27.717 INFO [2] Proc runs for 127.0.0.1:60290 in state New
16144 Sep 22 23:14:27.717 INFO [1] Transition from WaitActive to WaitQuorum
16145 Sep 22 23:14:27.717 INFO up_listen starts, task: up_listen
16146 Sep 22 23:14:27.717 WARN [1] new RM replaced this: None
16147 Sep 22 23:14:27.717 INFO Wait for all three downstairs to come online
16148 Sep 22 23:14:27.717 INFO Flush timeout: 0.5
16149 Sep 22 23:14:27.717 INFO [1] Starts reconcile loop
16150 Sep 22 23:14:27.717 INFO [2] adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 (2dd2b5b0-82b1-4771-854f-f648a24d146f) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
16151 Sep 22 23:14:27.717 DEBG IO Write 1001 has deps [JobId(1000)]
16152 Sep 22 23:14:27.717 INFO [2] Transition from WaitActive to WaitQuorum
16153 Sep 22 23:14:27.717 WARN [2] new RM replaced this: None
16154 Sep 22 23:14:27.717 INFO [2] Starts reconcile loop
16155 Sep 22 23:14:27.717 DEBG up_ds_listen was notified
16156 Sep 22 23:14:27.717 INFO [0] 127.0.0.1:59250 task reports connection:true
16157 Sep 22 23:14:27.717 DEBG up_ds_listen process 1001
16158 Sep 22 23:14:27.717 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 WaitQuorum WaitQuorum WaitQuorum
16159 Sep 22 23:14:27.718 DEBG [A] ack job 1001:2, : downstairs
16160 Sep 22 23:14:27.718 INFO [0]R flush_numbers: [0, 0]
16161 Sep 22 23:14:27.718 INFO accepted connection from 127.0.0.1:64265, task: main
16162 Sep 22 23:14:27.718 INFO [0]R generation: [0, 0]
16163 Sep 22 23:14:27.718 DEBG up_ds_listen checked 1 jobs, back to waiting
16164 Sep 22 23:14:27.718 INFO [0]R dirty: [false, false]
16165 Sep 22 23:14:27.718 INFO Connection request from aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 with version 4, task: proc
16166 Sep 22 23:14:27.718 INFO [1]R flush_numbers: [0, 0]
16167 Sep 22 23:14:27.718 INFO [1]R generation: [0, 0]
16168 Sep 22 23:14:27.718 INFO upstairs UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } connected, version 4, task: proc
16169 Sep 22 23:14:27.718 INFO [1]R dirty: [false, false]
16170 Sep 22 23:14:27.718 INFO accepted connection from 127.0.0.1:39421, task: main
16171 Sep 22 23:14:27.718 INFO [2]R flush_numbers: [0, 0]
16172 Sep 22 23:14:27.718 INFO [2]R generation: [0, 0]
16173 Sep 22 23:14:27.718 INFO [2]R dirty: [false, false]
16174 Sep 22 23:14:27.718 INFO Max found gen is 1
16175 Sep 22 23:14:27.718 INFO accepted connection from 127.0.0.1:65012, task: main
16176 Sep 22 23:14:27.718 INFO Connection request from aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 with version 4, task: proc
16177 Sep 22 23:14:27.718 INFO Generation requested: 1 >= found:1
16178 Sep 22 23:14:27.718 INFO Next flush: 1
16179 Sep 22 23:14:27.718 INFO upstairs UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } connected, version 4, task: proc
16180 Sep 22 23:14:27.718 INFO All extents match
16181 Sep 22 23:14:27.718 INFO No downstairs repair required
16182 Sep 22 23:14:27.718 INFO [0] f134b04d-a090-4ba5-ab19-3a0aa121e3dd looper connected, looper: 0
16183 Sep 22 23:14:27.718 INFO No initial repair work was required
16184 Sep 22 23:14:27.718 INFO Set Downstairs and Upstairs active
16185 Sep 22 23:14:27.718 INFO Connection request from aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 with version 4, task: proc
16186 Sep 22 23:14:27.718 INFO [0] Proc runs for 127.0.0.1:41704 in state New
16187 Sep 22 23:14:27.718 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 is now active with session: 2dd2b5b0-82b1-4771-854f-f648a24d146f
16188 Sep 22 23:14:27.718 INFO upstairs UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } connected, version 4, task: proc
16189 Sep 22 23:14:27.718 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 Set Active after no repair
16190 Sep 22 23:14:27.718 INFO Notify all downstairs, region set compare is done.
16191 Sep 22 23:14:27.718 INFO Set check for repair
16192 Sep 22 23:14:27.718 INFO [1] f134b04d-a090-4ba5-ab19-3a0aa121e3dd looper connected, looper: 1
16193 Sep 22 23:14:27.718 INFO [1] Proc runs for 127.0.0.1:58860 in state New
16194 Sep 22 23:14:27.718 INFO [1] 127.0.0.1:37634 task reports connection:true
16195 The guest has requested activation
16196 Sep 22 23:14:27.718 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 Active Active Active
16197 Sep 22 23:14:27.718 INFO Set check for repair
16198 Sep 22 23:14:27.718 INFO [2] f134b04d-a090-4ba5-ab19-3a0aa121e3dd looper connected, looper: 2
16199 Sep 22 23:14:27.718 INFO [2] 127.0.0.1:62359 task reports connection:true
16200 Sep 22 23:14:27.718 INFO [0] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) New New New ds_transition to WaitActive
16201 Sep 22 23:14:27.718 INFO [2] Proc runs for 127.0.0.1:37936 in state New
16202 Sep 22 23:14:27.718 INFO [0] Transition from New to WaitActive
16203 Sep 22 23:14:27.718 INFO adfa7dbe-ca19-48c5-8ab5-25a64ef3a728 Active Active Active
16204 Sep 22 23:14:27.718 INFO Set check for repair
16205 Sep 22 23:14:27.718 INFO Scrub check for a80ca6f9-770a-4008-b246-89463d10a8c7
16206 Sep 22 23:14:27.718 INFO [0] received reconcile message
16207 Sep 22 23:14:27.718 INFO [1] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) WaitActive New New ds_transition to WaitActive
16208 Sep 22 23:14:27.718 INFO Scrub for a80ca6f9-770a-4008-b246-89463d10a8c7 not required
16209 Sep 22 23:14:27.718 INFO [1] Transition from New to WaitActive
16210 Sep 22 23:14:27.718 INFO [0] All repairs completed, exit
16211 Sep 22 23:14:27.718 INFO [0] Starts cmd_loop
16212 Sep 22 23:14:27.718 INFO [2] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) WaitActive WaitActive New ds_transition to WaitActive
16213 Sep 22 23:14:27.718 INFO [2] Transition from New to WaitActive
16214 Sep 22 23:14:27.718 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 active request set
16215 Sep 22 23:14:27.718 INFO [1] received reconcile message
16216 Sep 22 23:14:27.718 INFO [1] All repairs completed, exit
16217 Sep 22 23:14:27.718 INFO [0] received activate with gen 1
16218 Sep 22 23:14:27.718 INFO [0] client got ds_active_rx, promote! session 37685993-17b9-4e43-bd95-ffd5fc39e9bd
16219 Sep 22 23:14:27.718 INFO [1] Starts cmd_loop
16220 Sep 22 23:14:27.719 INFO [1] received activate with gen 1
16221 Sep 22 23:14:27.719 INFO [2] received reconcile message
16222 Sep 22 23:14:27.719 INFO Connection request from f134b04d-a090-4ba5-ab19-3a0aa121e3dd with version 4, task: proc
16223 Sep 22 23:14:27.719 INFO [1] client got ds_active_rx, promote! session 37685993-17b9-4e43-bd95-ffd5fc39e9bd
16224 Sep 22 23:14:27.719 INFO [2] All repairs completed, exit
16225 Sep 22 23:14:27.719 INFO upstairs UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } connected, version 4, task: proc
16226 Sep 22 23:14:27.719 INFO [2] Starts cmd_loop
16227 Sep 22 23:14:27.719 INFO [2] received activate with gen 1
16228 Sep 22 23:14:27.719 INFO [2] client got ds_active_rx, promote! session 37685993-17b9-4e43-bd95-ffd5fc39e9bd
16229 Sep 22 23:14:27.719 INFO Connection request from f134b04d-a090-4ba5-ab19-3a0aa121e3dd with version 4, task: proc
16230 The guest has finished waiting for activation
16231 Sep 22 23:14:27.719 INFO upstairs UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } connected, version 4, task: proc
16232 Sep 22 23:14:27.719 INFO UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } is now active (read-write)
16233 Sep 22 23:14:27.719 INFO Connection request from f134b04d-a090-4ba5-ab19-3a0aa121e3dd with version 4, task: proc
16234 Sep 22 23:14:27.719 INFO upstairs UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } connected, version 4, task: proc
16235 Sep 22 23:14:27.719 INFO UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } is now active (read-write)
16236 The guest has requested activation
16237 Sep 22 23:14:27.719 INFO UpstairsConnection { upstairs_id: aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7, session_id: 37685993-17b9-4e43-bd95-ffd5fc39e9bd, gen: 1 } is now active (read-write)
16238 Sep 22 23:14:27.719 INFO [0] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) New New New ds_transition to WaitActive
16239 Sep 22 23:14:27.719 INFO [0] Transition from New to WaitActive
16240 Sep 22 23:14:27.719 DEBG IO Read 1000 has deps []
16241 Sep 22 23:14:27.719 DEBG IO Write 1003 has deps []
16242 Sep 22 23:14:27.719 INFO [1] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) WaitActive New New ds_transition to WaitActive
16243 Sep 22 23:14:27.719 INFO [1] Transition from New to WaitActive
16244 Sep 22 23:14:27.719 INFO [2] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) WaitActive WaitActive New ds_transition to WaitActive
16245 Sep 22 23:14:27.719 INFO [2] Transition from New to WaitActive
16246 Sep 22 23:14:27.719 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd active request set
16247 Sep 22 23:14:27.720 INFO [0] received activate with gen 1
16248 Sep 22 23:14:27.720 INFO [0] client got ds_active_rx, promote! session 9d45ed58-1efb-4b07-81f4-53421dc0e429
16249 Sep 22 23:14:27.720 INFO [0] downstairs client at 127.0.0.1:45743 has UUID 61ba2500-0b28-4437-bb9f-9b380dfcca32
16250 Sep 22 23:14:27.720 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 61ba2500-0b28-4437-bb9f-9b380dfcca32, encrypted: true, database_read_version: 1, database_write_version: 1 }
16251 Sep 22 23:14:27.720 INFO [1] received activate with gen 1
16252 Sep 22 23:14:27.720 INFO [1] client got ds_active_rx, promote! session 9d45ed58-1efb-4b07-81f4-53421dc0e429
16253 Sep 22 23:14:27.720 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 WaitActive WaitActive WaitActive
16254 Sep 22 23:14:27.720 DEBG Write :1001 deps:[JobId(1000)] res:true
16255 Sep 22 23:14:27.720 INFO [2] received activate with gen 1
16256 Sep 22 23:14:27.720 INFO [1] downstairs client at 127.0.0.1:33459 has UUID d2794c71-5659-445d-8650-5f7a1461a2ed
16257 Sep 22 23:14:27.720 INFO [2] client got ds_active_rx, promote! session 9d45ed58-1efb-4b07-81f4-53421dc0e429
16258 Sep 22 23:14:27.720 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d2794c71-5659-445d-8650-5f7a1461a2ed, encrypted: true, database_read_version: 1, database_write_version: 1 }
16259 Sep 22 23:14:27.720 DEBG [0] Read AckReady 1002, : downstairs
16260 Sep 22 23:14:27.720 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 WaitActive WaitActive WaitActive
16261 Sep 22 23:14:27.720 INFO UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } is now active (read-write)
16262 Sep 22 23:14:27.720 INFO [2] downstairs client at 127.0.0.1:60290 has UUID a9ddcb17-2090-44c0-b9dd-e7df3a8e01e7
16263 Sep 22 23:14:27.720 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a9ddcb17-2090-44c0-b9dd-e7df3a8e01e7, encrypted: true, database_read_version: 1, database_write_version: 1 }
16264 Sep 22 23:14:27.720 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 WaitActive WaitActive WaitActive
16265 Sep 22 23:14:27.720 INFO UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } is now active (read-write)
16266 Sep 22 23:14:27.720 DEBG Read :1000 deps:[] res:true
16267 Sep 22 23:14:27.720 INFO UpstairsConnection { upstairs_id: f134b04d-a090-4ba5-ab19-3a0aa121e3dd, session_id: 9d45ed58-1efb-4b07-81f4-53421dc0e429, gen: 1 } is now active (read-write)
16268 Sep 22 23:14:27.720 INFO Current flush_numbers [0..12]: [0, 0]
16269 Sep 22 23:14:27.720 DEBG Read :1000 deps:[] res:true
16270 Sep 22 23:14:27.720 DEBG Write :1001 deps:[JobId(1000)] res:true
16271 Sep 22 23:14:27.720 INFO Downstairs has completed Negotiation, task: proc
16272 Sep 22 23:14:27.720 DEBG Read :1000 deps:[] res:true
16273 Sep 22 23:14:27.721 INFO [0] downstairs client at 127.0.0.1:41704 has UUID 1e03bcc8-571c-4b44-a9e5-acc5366adf71
16274 Sep 22 23:14:27.721 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1e03bcc8-571c-4b44-a9e5-acc5366adf71, encrypted: true, database_read_version: 1, database_write_version: 1 }
16275 Sep 22 23:14:27.721 INFO Current flush_numbers [0..12]: [0, 0]
16276 Sep 22 23:14:27.721 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd WaitActive WaitActive WaitActive
16277 Sep 22 23:14:27.721 INFO [1] downstairs client at 127.0.0.1:58860 has UUID f4dd1f9e-c133-4738-8e4e-8aad0cee7c95
16278 Sep 22 23:14:27.721 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f4dd1f9e-c133-4738-8e4e-8aad0cee7c95, encrypted: true, database_read_version: 1, database_write_version: 1 }
16279 Sep 22 23:14:27.721 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd WaitActive WaitActive WaitActive
16280 Sep 22 23:14:27.721 INFO [2] downstairs client at 127.0.0.1:37936 has UUID e9a979b4-d43f-404b-9e3f-1f402040beec
16281 Sep 22 23:14:27.721 DEBG Write :1001 deps:[JobId(1000)] res:true
16282 Sep 22 23:14:27.721 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e9a979b4-d43f-404b-9e3f-1f402040beec, encrypted: true, database_read_version: 1, database_write_version: 1 }
16283 Sep 22 23:14:27.721 INFO Downstairs has completed Negotiation, task: proc
16284 Sep 22 23:14:27.721 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd WaitActive WaitActive WaitActive
16285 Sep 22 23:14:27.721 INFO Current flush_numbers [0..12]: [0, 0]
16286 Sep 22 23:14:27.721 INFO Current flush_numbers [0..12]: [0, 0]
16287 Sep 22 23:14:27.721 DEBG [0] Read AckReady 1000, : downstairs
16288 Sep 22 23:14:27.721 DEBG IO Read 1002 has deps [JobId(1001)]
16289 Sep 22 23:14:27.721 INFO Downstairs has completed Negotiation, task: proc
16290 Sep 22 23:14:27.721 INFO Downstairs has completed Negotiation, task: proc
16291 Sep 22 23:14:27.721 DEBG [1] Read already AckReady 1000, : downstairs
16292 Sep 22 23:14:27.721 DEBG [1] Read already AckReady 1002, : downstairs
16293 Sep 22 23:14:27.722 DEBG [2] Read already AckReady 1000, : downstairs
16294 Sep 22 23:14:27.722 DEBG up_ds_listen was notified
16295 Sep 22 23:14:27.722 INFO [0] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16296 Sep 22 23:14:27.722 INFO Current flush_numbers [0..12]: [0, 0]
16297 Sep 22 23:14:27.722 DEBG up_ds_listen process 1000
16298 Sep 22 23:14:27.722 INFO [0] Transition from WaitActive to WaitQuorum
16299 Sep 22 23:14:27.722 WARN [0] new RM replaced this: None
16300 Sep 22 23:14:27.722 DEBG [A] ack job 1000:1, : downstairs
16301 Sep 22 23:14:27.722 INFO [0] Starts reconcile loop
16302 Sep 22 23:14:27.722 INFO [1] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16303 Sep 22 23:14:27.722 INFO [1] Transition from WaitActive to WaitQuorum
16304 Sep 22 23:14:27.722 WARN [1] new RM replaced this: None
16305 Sep 22 23:14:27.722 INFO Downstairs has completed Negotiation, task: proc
16306 Sep 22 23:14:27.722 INFO [1] Starts reconcile loop
16307 Sep 22 23:14:27.722 INFO [2] aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 (37685993-17b9-4e43-bd95-ffd5fc39e9bd) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
16308 Sep 22 23:14:27.722 INFO [2] Transition from WaitActive to WaitQuorum
16309 Sep 22 23:14:27.722 DEBG up_ds_listen checked 1 jobs, back to waiting
16310 Sep 22 23:14:27.722 WARN [2] new RM replaced this: None
16311 Sep 22 23:14:27.722 INFO [2] Starts reconcile loop
16312 Sep 22 23:14:27.722 INFO Current flush_numbers [0..12]: [0, 0]
16313 Sep 22 23:14:27.722 INFO [0] 127.0.0.1:45743 task reports connection:true
16314 Sep 22 23:14:27.722 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 WaitQuorum WaitQuorum WaitQuorum
16315 Sep 22 23:14:27.722 INFO [0]R flush_numbers: [0, 0]
16316 Sep 22 23:14:27.722 INFO [0]R generation: [0, 0]
16317 Sep 22 23:14:27.722 INFO [0]R dirty: [false, false]
16318 Sep 22 23:14:27.722 INFO [1]R flush_numbers: [0, 0]
16319 Sep 22 23:14:27.722 DEBG Read :1002 deps:[JobId(1001)] res:true
16320 Sep 22 23:14:27.722 INFO [1]R generation: [0, 0]
16321 Sep 22 23:14:27.722 INFO [1]R dirty: [false, false]
16322 Sep 22 23:14:27.722 INFO [2]R flush_numbers: [0, 0]
16323 Sep 22 23:14:27.722 INFO [2]R generation: [0, 0]
16324 Sep 22 23:14:27.722 INFO [2]R dirty: [false, false]
16325 Sep 22 23:14:27.722 INFO Downstairs has completed Negotiation, task: proc
16326 Sep 22 23:14:27.722 INFO Max found gen is 1
16327 Sep 22 23:14:27.722 INFO Generation requested: 1 >= found:1
16328 Sep 22 23:14:27.722 INFO Next flush: 1
16329 Sep 22 23:14:27.722 INFO All extents match
16330 Sep 22 23:14:27.722 INFO No downstairs repair required
16331 Sep 22 23:14:27.722 INFO No initial repair work was required
16332 Sep 22 23:14:27.722 INFO Set Downstairs and Upstairs active
16333 Sep 22 23:14:27.722 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 is now active with session: 37685993-17b9-4e43-bd95-ffd5fc39e9bd
16334 Sep 22 23:14:27.722 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 Set Active after no repair
16335 Sep 22 23:14:27.722 INFO Notify all downstairs, region set compare is done.
16336 Sep 22 23:14:27.722 INFO [0] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16337 Sep 22 23:14:27.722 INFO Set check for repair
16338 Sep 22 23:14:27.722 INFO [0] Transition from WaitActive to WaitQuorum
16339 Sep 22 23:14:27.723 WARN [0] new RM replaced this: None
16340 Sep 22 23:14:27.723 INFO [1] 127.0.0.1:33459 task reports connection:true
16341 Sep 22 23:14:27.723 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 Active Active Active
16342 Sep 22 23:14:27.723 DEBG Read :1002 deps:[JobId(1001)] res:true
16343 Sep 22 23:14:27.723 INFO Set check for repair
16344 Sep 22 23:14:27.723 INFO [0] Starts reconcile loop
16345 Sep 22 23:14:27.723 INFO [2] 127.0.0.1:60290 task reports connection:true
16346 Sep 22 23:14:27.723 INFO aafec2ff-66c0-4ac7-aa5b-9f14a05b68e7 Active Active Active
16347 Sep 22 23:14:27.723 INFO Set check for repair
16348 Sep 22 23:14:27.723 DEBG up_ds_listen was notified
16349 Sep 22 23:14:27.723 INFO [1] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16350 Sep 22 23:14:27.723 INFO [0] received reconcile message
16351 Sep 22 23:14:27.723 INFO [1] Transition from WaitActive to WaitQuorum
16352 Sep 22 23:14:27.723 DEBG up_ds_listen process 1003
16353 Sep 22 23:14:27.723 WARN [1] new RM replaced this: None
16354 Sep 22 23:14:27.723 INFO [0] All repairs completed, exit
16355 Sep 22 23:14:27.723 DEBG [A] ack job 1003:4, : downstairs
16356 Sep 22 23:14:27.723 INFO [1] Starts reconcile loop
16357 Sep 22 23:14:27.723 DEBG up_ds_listen checked 1 jobs, back to waiting
16358 Sep 22 23:14:27.723 INFO [0] Starts cmd_loop
16359 Sep 22 23:14:27.723 INFO [2] f134b04d-a090-4ba5-ab19-3a0aa121e3dd (9d45ed58-1efb-4b07-81f4-53421dc0e429) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
16360 Sep 22 23:14:27.723 INFO [2] Transition from WaitActive to WaitQuorum
16361 Sep 22 23:14:27.723 WARN [2] new RM replaced this: None
16362 Sep 22 23:14:27.723 DEBG Read :1002 deps:[JobId(1001)] res:true
16363 Sep 22 23:14:27.723 DEBG IO Read 1001 has deps []
16364 Sep 22 23:14:27.723 INFO [2] Starts reconcile loop
16365 Sep 22 23:14:27.723 INFO [1] received reconcile message
16366 Sep 22 23:14:27.723 INFO [1] All repairs completed, exit
16367 Sep 22 23:14:27.723 INFO [0] 127.0.0.1:41704 task reports connection:true
16368 Sep 22 23:14:27.723 INFO [1] Starts cmd_loop
16369 Sep 22 23:14:27.723 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd WaitQuorum WaitQuorum WaitQuorum
16370 Sep 22 23:14:27.723 INFO [0]R flush_numbers: [0, 0]
16371 Sep 22 23:14:27.723 INFO [0]R generation: [0, 0]
16372 Sep 22 23:14:27.723 INFO [2] received reconcile message
16373 Sep 22 23:14:27.723 INFO [0]R dirty: [false, false]
16374 Sep 22 23:14:27.723 DEBG IO Read 1004 has deps [JobId(1003)]
16375 Sep 22 23:14:27.723 INFO [1]R flush_numbers: [0, 0]
16376 Sep 22 23:14:27.723 INFO [2] All repairs completed, exit
16377 Sep 22 23:14:27.723 INFO [1]R generation: [0, 0]
16378 Sep 22 23:14:27.723 INFO [1]R dirty: [false, false]
16379 Sep 22 23:14:27.723 INFO [2] Starts cmd_loop
16380 Sep 22 23:14:27.723 INFO [2]R flush_numbers: [0, 0]
16381 Sep 22 23:14:27.723 INFO [2]R generation: [0, 0]
16382 Sep 22 23:14:27.723 DEBG [2] Read already AckReady 1002, : downstairs
16383 Sep 22 23:14:27.723 INFO [2]R dirty: [false, false]
16384 Sep 22 23:14:27.723 INFO Max found gen is 1
16385 The guest has finished waiting for activation
16386 Sep 22 23:14:27.723 INFO Generation requested: 1 >= found:1
16387 Sep 22 23:14:27.723 DEBG up_ds_listen was notified
16388 Sep 22 23:14:27.723 INFO Next flush: 1
16389 Sep 22 23:14:27.723 DEBG up_ds_listen process 1002
16390 Sep 22 23:14:27.723 INFO All extents match
16391 Sep 22 23:14:27.723 DEBG [A] ack job 1002:3, : downstairs
16392 Sep 22 23:14:27.723 INFO No downstairs repair required
16393 Sep 22 23:14:27.723 INFO No initial repair work was required
16394 Sep 22 23:14:27.723 INFO Set Downstairs and Upstairs active
16395 Sep 22 23:14:27.723 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd is now active with session: 9d45ed58-1efb-4b07-81f4-53421dc0e429
16396 Sep 22 23:14:27.723 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd Set Active after no repair
16397 Sep 22 23:14:27.723 INFO Notify all downstairs, region set compare is done.
16398 Sep 22 23:14:27.723 INFO Set check for repair
16399 Sep 22 23:14:27.723 INFO [1] 127.0.0.1:58860 task reports connection:true
16400 Sep 22 23:14:27.723 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd Active Active Active
16401 Sep 22 23:14:27.724 INFO Set check for repair
16402 Sep 22 23:14:27.724 INFO [2] 127.0.0.1:37936 task reports connection:true
16403 Sep 22 23:14:27.724 INFO f134b04d-a090-4ba5-ab19-3a0aa121e3dd Active Active Active
16404 Sep 22 23:14:27.724 DEBG up_ds_listen checked 1 jobs, back to waiting
16405 Sep 22 23:14:27.724 INFO Set check for repair
16406 Sep 22 23:14:27.724 DEBG Read :1001 deps:[] res:true
16407 Sep 22 23:14:27.724 INFO [0] received reconcile message
16408 Sep 22 23:14:27.724 INFO [0] All repairs completed, exit
16409 Sep 22 23:14:27.724 INFO [0] Starts cmd_loop
16410 Sep 22 23:14:27.724 DEBG IO Write 1000 has deps []
16411 Sep 22 23:14:27.724 INFO [1] received reconcile message
16412 Sep 22 23:14:27.724 INFO [1] All repairs completed, exit
16413 Sep 22 23:14:27.724 INFO [1] Starts cmd_loop
16414 Sep 22 23:14:27.724 DEBG up_ds_listen was notified
16415 Sep 22 23:14:27.724 DEBG Read :1004 deps:[JobId(1003)] res:true
16416 Sep 22 23:14:27.724 DEBG Read :1001 deps:[] res:true
16417 Sep 22 23:14:27.724 INFO [2] received reconcile message
16418 Sep 22 23:14:27.724 DEBG up_ds_listen process 1000
16419 Sep 22 23:14:27.724 INFO [2] All repairs completed, exit
16420 Sep 22 23:14:27.724 DEBG [A] ack job 1000:1, : downstairs
16421 Sep 22 23:14:27.724 INFO [2] Starts cmd_loop
16422 Sep 22 23:14:27.724 DEBG up_ds_listen checked 1 jobs, back to waiting
16423 The guest has finished waiting for activation
16424 Sep 22 23:14:27.724 DEBG Read :1001 deps:[] res:true
16425 Sep 22 23:14:27.724 DEBG IO Read 1000 has deps []
16426 Sep 22 23:14:27.725 DEBG Read :1004 deps:[JobId(1003)] res:true
16427 Sep 22 23:14:27.725 DEBG [0] Read AckReady 1001, : downstairs
16428 Sep 22 23:14:27.725 DEBG [1] Read already AckReady 1001, : downstairs
16429 Sep 22 23:14:27.725 DEBG [2] Read already AckReady 1001, : downstairs
16430 Sep 22 23:14:27.726 DEBG up_ds_listen was notified
16431 Sep 22 23:14:27.726 DEBG up_ds_listen process 1001
16432 Sep 22 23:14:27.726 DEBG [A] ack job 1001:2, : downstairs
16433 Sep 22 23:14:27.726 DEBG [0] Read AckReady 1002, : downstairs
16434 Sep 22 23:14:27.726 DEBG Write :1000 deps:[] res:true
16435 Sep 22 23:14:27.726 DEBG Read :1004 deps:[JobId(1003)] res:true
16436 Sep 22 23:14:27.726 DEBG up_ds_listen checked 1 jobs, back to waiting
16437 Sep 22 23:14:27.726 INFO Scrub check for d2ca4639-e324-4c6c-8e8a-4f9bdf088392
16438 Sep 22 23:14:27.726 INFO Scrub for d2ca4639-e324-4c6c-8e8a-4f9bdf088392 begins
16439 Sep 22 23:14:27.726 INFO Scrub with total_size:2560 block_size:512
16440 Sep 22 23:14:27.726 INFO Scrubs from block 0 to 5 in (256) 131072 size IOs pm:0
16441 Sep 22 23:14:27.726 INFO Adjust block_count to 5 at offset 0
16442 Sep 22 23:14:27.726 DEBG Read :1000 deps:[] res:true
16443 Sep 22 23:14:27.726 DEBG Write :1000 deps:[] res:true
16444 Sep 22 23:14:27.727 DEBG Read :1000 deps:[] res:true
16445 test test::integration_test_region ... ok
16446 Sep 22 23:14:27.727 DEBG Write :1000 deps:[] res:true
16447 Sep 22 23:14:27.727 DEBG [2] Read already AckReady 1002, : downstairs
16448 Sep 22 23:14:27.727 DEBG Read :1000 deps:[] res:true
16449 Sep 22 23:14:27.727 INFO current number of open files limit 65536 is already the maximum
16450 Sep 22 23:14:27.727 INFO Created new region file "/tmp/downstairs-NzNQXxni/region.json"
16451 Sep 22 23:14:27.727 DEBG IO Write 1001 has deps []
16452 Sep 22 23:14:27.728 DEBG up_ds_listen was notified
16453 Sep 22 23:14:27.728 DEBG up_ds_listen process 1001
16454 Sep 22 23:14:27.728 DEBG [A] ack job 1001:2, : downstairs
16455 Sep 22 23:14:27.728 DEBG up_ds_listen checked 1 jobs, back to waiting
16456 Sep 22 23:14:27.728 DEBG IO Write 1002 has deps [JobId(1000)]
16457 Sep 22 23:14:27.728 INFO Scrub check for 27af30bc-466c-47bd-9576-e5adb1115007
16458 Sep 22 23:14:27.728 INFO Scrub for 27af30bc-466c-47bd-9576-e5adb1115007 begins
16459 Sep 22 23:14:27.728 INFO Scrub with total_size:2560 block_size:512
16460 Sep 22 23:14:27.728 INFO Scrubs from block 0 to 5 in (256) 131072 size IOs pm:0
16461 Sep 22 23:14:27.728 INFO Adjust block_count to 5 at offset 0
16462 Sep 22 23:14:27.728 DEBG [0] Read AckReady 1000, : downstairs
16463 Sep 22 23:14:27.728 DEBG [1] Read already AckReady 1000, : downstairs
16464 Sep 22 23:14:27.729 DEBG [1] Read already AckReady 1002, : downstairs
16465 Sep 22 23:14:27.729 DEBG up_ds_listen was notified
16466 Sep 22 23:14:27.729 DEBG up_ds_listen process 1002
16467 Sep 22 23:14:27.729 DEBG [A] ack job 1002:3, : downstairs
16468 Sep 22 23:14:27.729 DEBG [2] Read already AckReady 1000, : downstairs
16469 Sep 22 23:14:27.729 DEBG up_ds_listen was notified
16470 Sep 22 23:14:27.729 DEBG up_ds_listen process 1000
16471 Sep 22 23:14:27.729 DEBG [A] ack job 1000:1, : downstairs
16472 Sep 22 23:14:27.729 DEBG up_ds_listen checked 1 jobs, back to waiting
16473 Sep 22 23:14:27.729 INFO current number of open files limit 65536 is already the maximum
16474 Sep 22 23:14:27.729 INFO Opened existing region file "/tmp/downstairs-NzNQXxni/region.json"
16475 Sep 22 23:14:27.729 INFO Database read version 1
16476 Sep 22 23:14:27.729 INFO Database write version 1
16477 Sep 22 23:14:27.729 DEBG up_ds_listen checked 1 jobs, back to waiting
16478 Sep 22 23:14:27.729 DEBG Write :1001 deps:[] res:true
16479 Sep 22 23:14:27.730 DEBG Write :1001 deps:[] res:true
16480 Sep 22 23:14:27.730 DEBG [0] Read AckReady 1004, : downstairs
16481 Sep 22 23:14:27.730 DEBG Write :1001 deps:[] res:true
16482 Sep 22 23:14:27.731 INFO UUID: a0901c3f-7fbf-40b8-85c2-142a27a641c8
16483 Sep 22 23:14:27.731 INFO Blocks per extent:5 Total Extents: 2
16484 Sep 22 23:14:27.731 INFO Crucible Version: Crucible Version: 0.0.1
16485 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16486 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16487 rustc: 1.70.0 stable x86_64-unknown-illumos
16488 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16489 Sep 22 23:14:27.731 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16490 Sep 22 23:14:27.731 INFO Using address: 127.0.0.1:35706, task: main
16491 Sep 22 23:14:27.731 INFO Repair listens on 127.0.0.1:0, task: repair
16492 Sep 22 23:14:27.731 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53909, task: repair
16493 Sep 22 23:14:27.731 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53909, task: repair
16494 Sep 22 23:14:27.731 INFO listening, local_addr: 127.0.0.1:53909, task: repair
16495 Sep 22 23:14:27.732 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53909, task: repair
16496 Sep 22 23:14:27.732 DEBG IO Write 1002 has deps [JobId(1000)]
16497 Sep 22 23:14:27.732 INFO Using repair address: 127.0.0.1:53909, task: main
16498 Sep 22 23:14:27.732 INFO No SSL acceptor configured, task: main
16499 Sep 22 23:14:27.732 INFO current number of open files limit 65536 is already the maximum
16500 Sep 22 23:14:27.732 INFO Created new region file "/tmp/downstairs-JJG0Y7Ly/region.json"
16501 Sep 22 23:14:27.732 DEBG up_ds_listen was notified
16502 Sep 22 23:14:27.732 DEBG up_ds_listen process 1002
16503 Sep 22 23:14:27.732 DEBG [A] ack job 1002:3, : downstairs
16504 Sep 22 23:14:27.732 DEBG up_ds_listen checked 1 jobs, back to waiting
16505 Sep 22 23:14:27.732 INFO Scrub at offset 5/5 sp:5
16506 Sep 22 23:14:27.732 DEBG [1] Read already AckReady 1004, : downstairs
16507 test test::integration_test_scrub_no_rop ... ok
16508 Sep 22 23:14:27.733 DEBG IO Write 1001 has deps [JobId(1000)]
16509 Sep 22 23:14:27.734 INFO Scrub d2ca4639-e324-4c6c-8e8a-4f9bdf088392 done in 0 seconds. Retries:0 scrub_size:2560 size:5 pause_milli:0
16510 Sep 22 23:14:27.734 INFO current number of open files limit 65536 is already the maximum
16511 Sep 22 23:14:27.734 DEBG up_ds_listen was notified
16512 Sep 22 23:14:27.734 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
16513 Sep 22 23:14:27.734 DEBG up_ds_listen process 1001
16514 Sep 22 23:14:27.734 INFO Created new region file "/tmp/downstairs-yOcvqWhx/region.json"
16515 Sep 22 23:14:27.734 DEBG [A] ack job 1001:2, : downstairs
16516 Sep 22 23:14:27.734 DEBG up_ds_listen checked 1 jobs, back to waiting
16517 Sep 22 23:14:27.734 INFO current number of open files limit 65536 is already the maximum
16518 Sep 22 23:14:27.734 INFO Opened existing region file "/tmp/downstairs-JJG0Y7Ly/region.json"
16519 Sep 22 23:14:27.734 INFO Database read version 1
16520 Sep 22 23:14:27.734 INFO Database write version 1
16521 Sep 22 23:14:27.734 DEBG up_ds_listen was notified
16522 Sep 22 23:14:27.734 DEBG up_ds_listen process 1002
16523 Sep 22 23:14:27.734 DEBG [A] ack job 1002:3, : downstairs
16524 Sep 22 23:14:27.734 DEBG up_ds_listen checked 1 jobs, back to waiting
16525 Sep 22 23:14:27.734 INFO Scrub at offset 5/5 sp:5
16526 Sep 22 23:14:27.735 INFO Scrub check for 510c33b3-4659-4dfe-9ea9-4fd9b3d18480
16527 Sep 22 23:14:27.735 INFO Scrub for 510c33b3-4659-4dfe-9ea9-4fd9b3d18480 begins
16528 Sep 22 23:14:27.735 INFO Scrub with total_size:5120 block_size:512
16529 Sep 22 23:14:27.735 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
16530 Sep 22 23:14:27.735 INFO Adjust block_count to 10 at offset 0
16531 Sep 22 23:14:27.735 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16532 Sep 22 23:14:27.735 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16533 Sep 22 23:14:27.735 DEBG [2] Read already AckReady 1004, : downstairs
16534 Sep 22 23:14:27.735 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16535 Sep 22 23:14:27.735 INFO UUID: e45251b5-0344-4afb-bb50-e9e4af673e5f
16536 Sep 22 23:14:27.735 DEBG up_ds_listen was notified
16537 Sep 22 23:14:27.735 INFO Blocks per extent:5 Total Extents: 2
16538 Sep 22 23:14:27.735 DEBG up_ds_listen process 1004
16539 Sep 22 23:14:27.735 DEBG [A] ack job 1004:5, : downstairs
16540 Sep 22 23:14:27.735 INFO Crucible Version: Crucible Version: 0.0.1
16541 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16542 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16543 rustc: 1.70.0 stable x86_64-unknown-illumos
16544 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16545 Sep 22 23:14:27.735 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16546 Sep 22 23:14:27.735 INFO Using address: 127.0.0.1:51761, task: main
16547 Sep 22 23:14:27.735 DEBG up_ds_listen was notified
16548 Sep 22 23:14:27.735 DEBG up_ds_listen process 1003
16549 Sep 22 23:14:27.735 DEBG [A] ack job 1003:4, : downstairs
16550 Sep 22 23:14:27.735 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
16551 Sep 22 23:14:27.735 INFO Repair listens on 127.0.0.1:0, task: repair
16552 Sep 22 23:14:27.735 DEBG up_ds_listen checked 1 jobs, back to waiting
16553 Sep 22 23:14:27.735 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50200, task: repair
16554 Sep 22 23:14:27.735 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50200, task: repair
16555 Sep 22 23:14:27.735 INFO listening, local_addr: 127.0.0.1:50200, task: repair
16556 Sep 22 23:14:27.736 DEBG up_ds_listen checked 1 jobs, back to waiting
16557 Sep 22 23:14:27.736 INFO Scrub 27af30bc-466c-47bd-9576-e5adb1115007 done in 0 seconds. Retries:0 scrub_size:2560 size:5 pause_milli:0
16558 Sep 22 23:14:27.736 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50200, task: repair
16559 Sep 22 23:14:27.736 INFO Using repair address: 127.0.0.1:50200, task: main
16560 Sep 22 23:14:27.736 INFO No SSL acceptor configured, task: main
16561 Sep 22 23:14:27.736 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
16562 Sep 22 23:14:27.736 INFO current number of open files limit 65536 is already the maximum
16563 Sep 22 23:14:27.736 INFO Created new region file "/tmp/downstairs-RAGRvo5y/region.json"
16564 Sep 22 23:14:27.737 INFO current number of open files limit 65536 is already the maximum
16565 Sep 22 23:14:27.737 INFO Opened existing region file "/tmp/downstairs-yOcvqWhx/region.json"
16566 Sep 22 23:14:27.737 INFO Database read version 1
16567 Sep 22 23:14:27.737 INFO Database write version 1
16568 Sep 22 23:14:27.738 DEBG IO Write 1004 has deps []
16569 Sep 22 23:14:27.738 DEBG Write :1001 deps:[JobId(1000)] res:true
16570 Sep 22 23:14:27.739 INFO UUID: dcbee2a7-5a05-4339-b188-1d1bfb232386
16571 Sep 22 23:14:27.739 INFO Blocks per extent:5 Total Extents: 2
16572 Sep 22 23:14:27.739 INFO Crucible Version: Crucible Version: 0.0.1
16573 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16574 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16575 rustc: 1.70.0 stable x86_64-unknown-illumos
16576 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16577 Sep 22 23:14:27.739 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16578 Sep 22 23:14:27.739 INFO Using address: 127.0.0.1:39658, task: main
16579 Sep 22 23:14:27.739 INFO Repair listens on 127.0.0.1:0, task: repair
16580 Sep 22 23:14:27.739 DEBG Write :1001 deps:[JobId(1000)] res:true
16581 Sep 22 23:14:27.739 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54627, task: repair
16582 Sep 22 23:14:27.739 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54627, task: repair
16583 Sep 22 23:14:27.740 INFO listening, local_addr: 127.0.0.1:54627, task: repair
16584 Sep 22 23:14:27.740 INFO current number of open files limit 65536 is already the maximum
16585 Sep 22 23:14:27.740 INFO Opened existing region file "/tmp/downstairs-RAGRvo5y/region.json"
16586 Sep 22 23:14:27.740 INFO Database read version 1
16587 Sep 22 23:14:27.740 INFO Database write version 1
16588 Sep 22 23:14:27.740 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54627, task: repair
16589 Sep 22 23:14:27.740 INFO Using repair address: 127.0.0.1:54627, task: main
16590 Sep 22 23:14:27.740 INFO No SSL acceptor configured, task: main
16591 Sep 22 23:14:27.740 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16592 Sep 22 23:14:27.740 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16593 Sep 22 23:14:27.740 INFO current number of open files limit 65536 is already the maximum
16594 Sep 22 23:14:27.740 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16595 Sep 22 23:14:27.740 INFO Created new region file "/tmp/downstairs-HE4BLZZn/region.json"
16596 test test::integration_test_scrub ... Sep 22 23:14:27.740 DEBG Write :1001 deps:[JobId(1000)] res:true
16597 ok
16598 Sep 22 23:14:27.740 DEBG up_ds_listen was notified
16599 Sep 22 23:14:27.741 DEBG up_ds_listen process 1003
16600 Sep 22 23:14:27.741 DEBG [A] ack job 1003:4, : downstairs
16601 Sep 22 23:14:27.741 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
16602 Sep 22 23:14:27.741 DEBG up_ds_listen checked 1 jobs, back to waiting
16603 Sep 22 23:14:27.741 INFO current number of open files limit 65536 is already the maximum
16604 Sep 22 23:14:27.741 INFO Created new region file "/tmp/downstairs-Fx2jwgfS/region.json"
16605 Sep 22 23:14:27.741 DEBG IO Read 1004 has deps []
16606 Sep 22 23:14:27.741 INFO UUID: c2d08264-7ded-412a-9acb-ca0bc576793e
16607 Sep 22 23:14:27.741 INFO Blocks per extent:5 Total Extents: 2
16608 Sep 22 23:14:27.741 INFO Crucible Version: Crucible Version: 0.0.1
16609 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16610 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16611 rustc: 1.70.0 stable x86_64-unknown-illumos
16612 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16613 Sep 22 23:14:27.741 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16614 Sep 22 23:14:27.741 INFO Using address: 127.0.0.1:34180, task: main
16615 Sep 22 23:14:27.742 INFO Repair listens on 127.0.0.1:0, task: repair
16616 Sep 22 23:14:27.742 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58985, task: repair
16617 Sep 22 23:14:27.742 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58985, task: repair
16618 Sep 22 23:14:27.742 INFO listening, local_addr: 127.0.0.1:58985, task: repair
16619 Sep 22 23:14:27.742 DEBG Read :1004 deps:[] res:true
16620 Sep 22 23:14:27.742 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58985, task: repair
16621 Sep 22 23:14:27.742 INFO Using repair address: 127.0.0.1:58985, task: main
16622 Sep 22 23:14:27.742 INFO No SSL acceptor configured, task: main
16623 Sep 22 23:14:27.742 DEBG up_ds_listen was notified
16624 Sep 22 23:14:27.742 DEBG up_ds_listen process 1004
16625 Sep 22 23:14:27.742 DEBG [A] ack job 1004:5, : downstairs
16626 Sep 22 23:14:27.742 DEBG up_ds_listen checked 1 jobs, back to waiting
16627 Sep 22 23:14:27.742 INFO Upstairs starts
16628 Sep 22 23:14:27.742 INFO Crucible Version: BuildInfo {
16629 version: "0.0.1",
16630 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16631 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16632 git_branch: "main",
16633 rustc_semver: "1.70.0",
16634 rustc_channel: "stable",
16635 rustc_host_triple: "x86_64-unknown-illumos",
16636 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16637 cargo_triple: "x86_64-unknown-illumos",
16638 debug: true,
16639 opt_level: 0,
16640 }
16641 Sep 22 23:14:27.742 INFO Upstairs <-> Downstairs Message Version: 4
16642 Sep 22 23:14:27.743 INFO Crucible stats registered with UUID: d173eb3a-6ff3-4b10-89cb-971e46f2c897
16643 Sep 22 23:14:27.743 INFO Crucible d173eb3a-6ff3-4b10-89cb-971e46f2c897 has session id: f6aaf660-c0e6-4906-96a3-4646c5c3c38c
16644 Sep 22 23:14:27.743 DEBG Read :1004 deps:[] res:true
16645 Sep 22 23:14:27.743 DEBG IO Read 1005 has deps [JobId(1004)]
16646 Sep 22 23:14:27.743 INFO listening on 127.0.0.1:0, task: main
16647 Sep 22 23:14:27.743 INFO listening on 127.0.0.1:0, task: main
16648 Sep 22 23:14:27.743 INFO listening on 127.0.0.1:0, task: main
16649 Sep 22 23:14:27.743 INFO [0] connecting to 127.0.0.1:35706, looper: 0
16650 Sep 22 23:14:27.743 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
16651 Sep 22 23:14:27.743 INFO [1] connecting to 127.0.0.1:51761, looper: 1
16652 Sep 22 23:14:27.743 INFO [2] connecting to 127.0.0.1:34180, looper: 2
16653 Sep 22 23:14:27.743 INFO up_listen starts, task: up_listen
16654 Sep 22 23:14:27.743 INFO Wait for all three downstairs to come online
16655 Sep 22 23:14:27.743 INFO Flush timeout: 0.5
16656 Sep 22 23:14:27.743 INFO accepted connection from 127.0.0.1:34884, task: main
16657 Sep 22 23:14:27.743 INFO accepted connection from 127.0.0.1:47731, task: main
16658 Sep 22 23:14:27.743 DEBG Read :1004 deps:[] res:true
16659 Sep 22 23:14:27.743 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 0
16660 Sep 22 23:14:27.743 INFO [0] Proc runs for 127.0.0.1:35706 in state New
16661 Sep 22 23:14:27.743 INFO accepted connection from 127.0.0.1:62343, task: main
16662 Sep 22 23:14:27.744 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 2
16663 Sep 22 23:14:27.744 INFO [2] Proc runs for 127.0.0.1:34180 in state New
16664 Sep 22 23:14:27.744 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 1
16665 Sep 22 23:14:27.744 DEBG Read :1005 deps:[JobId(1004)] res:true
16666 Sep 22 23:14:27.744 INFO [1] Proc runs for 127.0.0.1:51761 in state New
16667 Sep 22 23:14:27.744 INFO current number of open files limit 65536 is already the maximum
16668 Sep 22 23:14:27.744 INFO Opened existing region file "/tmp/downstairs-HE4BLZZn/region.json"
16669 Sep 22 23:14:27.744 INFO Database read version 1
16670 Sep 22 23:14:27.744 INFO Database write version 1
16671 Sep 22 23:14:27.744 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
16672 Sep 22 23:14:27.744 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } connected, version 4, task: proc
16673 Sep 22 23:14:27.744 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
16674 Sep 22 23:14:27.744 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } connected, version 4, task: proc
16675 Sep 22 23:14:27.744 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
16676 Sep 22 23:14:27.744 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } connected, version 4, task: proc
16677 Sep 22 23:14:27.744 DEBG Read :1005 deps:[JobId(1004)] res:true
16678 The guest has requested activation
16679 Sep 22 23:14:27.744 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) New New New ds_transition to WaitActive
16680 Sep 22 23:14:27.744 INFO [0] Transition from New to WaitActive
16681 Sep 22 23:14:27.744 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) WaitActive New New ds_transition to WaitActive
16682 Sep 22 23:14:27.744 INFO [2] Transition from New to WaitActive
16683 Sep 22 23:14:27.744 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) WaitActive New WaitActive ds_transition to WaitActive
16684 Sep 22 23:14:27.744 INFO [1] Transition from New to WaitActive
16685 Sep 22 23:14:27.744 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 active request set
16686 Sep 22 23:14:27.745 INFO [0] received activate with gen 1
16687 Sep 22 23:14:27.745 INFO [0] client got ds_active_rx, promote! session 7d35a049-e6b2-42d0-8692-1238474c8ef7
16688 Sep 22 23:14:27.745 DEBG Read :1005 deps:[JobId(1004)] res:true
16689 Sep 22 23:14:27.745 INFO [1] received activate with gen 1
16690 Sep 22 23:14:27.745 INFO [1] client got ds_active_rx, promote! session 7d35a049-e6b2-42d0-8692-1238474c8ef7
16691 Sep 22 23:14:27.745 INFO [2] received activate with gen 1
16692 Sep 22 23:14:27.745 INFO [2] client got ds_active_rx, promote! session 7d35a049-e6b2-42d0-8692-1238474c8ef7
16693 Sep 22 23:14:27.745 INFO current number of open files limit 65536 is already the maximum
16694 Sep 22 23:14:27.745 INFO UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } is now active (read-write)
16695 Sep 22 23:14:27.745 INFO Opened existing region file "/tmp/downstairs-Fx2jwgfS/region.json"
16696 Sep 22 23:14:27.745 INFO Database read version 1
16697 Sep 22 23:14:27.745 INFO Database write version 1
16698 Sep 22 23:14:27.745 INFO UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } is now active (read-write)
16699 Sep 22 23:14:27.745 INFO UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } is now active (read-write)
16700 Sep 22 23:14:27.745 INFO [0] downstairs client at 127.0.0.1:35706 has UUID a0901c3f-7fbf-40b8-85c2-142a27a641c8
16701 Sep 22 23:14:27.745 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a0901c3f-7fbf-40b8-85c2-142a27a641c8, encrypted: true, database_read_version: 1, database_write_version: 1 }
16702 Sep 22 23:14:27.745 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
16703 Sep 22 23:14:27.745 INFO [2] downstairs client at 127.0.0.1:34180 has UUID c2d08264-7ded-412a-9acb-ca0bc576793e
16704 Sep 22 23:14:27.745 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c2d08264-7ded-412a-9acb-ca0bc576793e, encrypted: true, database_read_version: 1, database_write_version: 1 }
16705 Sep 22 23:14:27.745 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
16706 Sep 22 23:14:27.745 INFO [1] downstairs client at 127.0.0.1:51761 has UUID e45251b5-0344-4afb-bb50-e9e4af673e5f
16707 Sep 22 23:14:27.746 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e45251b5-0344-4afb-bb50-e9e4af673e5f, encrypted: true, database_read_version: 1, database_write_version: 1 }
16708 Sep 22 23:14:27.746 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
16709 Sep 22 23:14:27.746 INFO Current flush_numbers [0..12]: [0, 0]
16710 Sep 22 23:14:27.746 INFO Downstairs has completed Negotiation, task: proc
16711 Sep 22 23:14:27.746 INFO UUID: cc461d70-432a-44c4-b9ed-07b65b90fa30
16712 Sep 22 23:14:27.746 INFO Current flush_numbers [0..12]: [0, 0]
16713 Sep 22 23:14:27.746 INFO Blocks per extent:5 Total Extents: 2
16714 Sep 22 23:14:27.746 INFO Crucible Version: Crucible Version: 0.0.1
16715 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16716 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16717 rustc: 1.70.0 stable x86_64-unknown-illumos
16718 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16719 Sep 22 23:14:27.746 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16720 Sep 22 23:14:27.746 INFO Downstairs has completed Negotiation, task: proc
16721 Sep 22 23:14:27.746 INFO Using address: 127.0.0.1:62268, task: main
16722 Sep 22 23:14:27.746 INFO Current flush_numbers [0..12]: [0, 0]
16723 Sep 22 23:14:27.746 DEBG up_ds_listen was notified
16724 Sep 22 23:14:27.746 DEBG up_ds_listen process 1002
16725 Sep 22 23:14:27.746 DEBG [A] ack job 1002:3, : downstairs
16726 Sep 22 23:14:27.746 INFO Downstairs has completed Negotiation, task: proc
16727 Sep 22 23:14:27.746 DEBG up_ds_listen checked 1 jobs, back to waiting
16728 Sep 22 23:14:27.746 INFO Scrub at offset 10/10 sp:10
16729 Sep 22 23:14:27.746 INFO Repair listens on 127.0.0.1:0, task: repair
16730 Sep 22 23:14:27.746 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
16731 Sep 22 23:14:27.747 INFO [0] Transition from WaitActive to WaitQuorum
16732 Sep 22 23:14:27.747 WARN [0] new RM replaced this: None
16733 Sep 22 23:14:27.747 INFO [0] Starts reconcile loop
16734 Sep 22 23:14:27.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65334, task: repair
16735 Sep 22 23:14:27.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65334, task: repair
16736 Sep 22 23:14:27.747 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
16737 Sep 22 23:14:27.747 INFO [2] Transition from WaitActive to WaitQuorum
16738 Sep 22 23:14:27.747 WARN [2] new RM replaced this: None
16739 Sep 22 23:14:27.747 INFO listening, local_addr: 127.0.0.1:65334, task: repair
16740 Sep 22 23:14:27.747 INFO [2] Starts reconcile loop
16741 Sep 22 23:14:27.747 DEBG [0] Read AckReady 1004, : downstairs
16742 Sep 22 23:14:27.747 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
16743 Sep 22 23:14:27.747 INFO [1] Transition from WaitActive to WaitQuorum
16744 Sep 22 23:14:27.747 WARN [1] new RM replaced this: None
16745 Sep 22 23:14:27.747 INFO [1] Starts reconcile loop
16746 Sep 22 23:14:27.747 INFO UUID: d97d6679-3993-407e-b222-4165f2682efb
16747 Sep 22 23:14:27.747 INFO [0] 127.0.0.1:35706 task reports connection:true
16748 Sep 22 23:14:27.747 INFO Blocks per extent:5 Total Extents: 2
16749 Sep 22 23:14:27.747 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitQuorum WaitQuorum WaitQuorum
16750 Sep 22 23:14:27.747 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65334, task: repair
16751 Sep 22 23:14:27.747 INFO [0]R flush_numbers: [0, 0]
16752 Sep 22 23:14:27.747 INFO [0]R generation: [0, 0]
16753 Sep 22 23:14:27.747 INFO [0]R dirty: [false, false]
16754 Sep 22 23:14:27.747 INFO Using repair address: 127.0.0.1:65334, task: main
16755 Sep 22 23:14:27.747 INFO [1]R flush_numbers: [0, 0]
16756 Sep 22 23:14:27.747 INFO [1]R generation: [0, 0]
16757 Sep 22 23:14:27.747 INFO No SSL acceptor configured, task: main
16758 Sep 22 23:14:27.747 INFO [1]R dirty: [false, false]
16759 Sep 22 23:14:27.747 INFO Crucible Version: Crucible Version: 0.0.1
16760 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16761 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16762 rustc: 1.70.0 stable x86_64-unknown-illumos
16763 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16764 Sep 22 23:14:27.747 INFO [2]R flush_numbers: [0, 0]
16765 Sep 22 23:14:27.747 INFO [2]R generation: [0, 0]
16766 Sep 22 23:14:27.747 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16767 Sep 22 23:14:27.747 INFO [2]R dirty: [false, false]
16768 Sep 22 23:14:27.747 INFO Max found gen is 1
16769 Sep 22 23:14:27.747 INFO Using address: 127.0.0.1:40377, task: main
16770 Sep 22 23:14:27.747 INFO Generation requested: 1 >= found:1
16771 Sep 22 23:14:27.747 INFO Next flush: 1
16772 Sep 22 23:14:27.747 INFO All extents match
16773 Sep 22 23:14:27.747 INFO No downstairs repair required
16774 Sep 22 23:14:27.747 INFO No initial repair work was required
16775 Sep 22 23:14:27.747 INFO Set Downstairs and Upstairs active
16776 Sep 22 23:14:27.747 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 is now active with session: 7d35a049-e6b2-42d0-8692-1238474c8ef7
16777 Sep 22 23:14:27.747 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Set Active after no repair
16778 Sep 22 23:14:27.747 INFO Notify all downstairs, region set compare is done.
16779 Sep 22 23:14:27.747 INFO Set check for repair
16780 Sep 22 23:14:27.747 INFO current number of open files limit 65536 is already the maximum
16781 Sep 22 23:14:27.747 INFO [2] 127.0.0.1:34180 task reports connection:true
16782 Sep 22 23:14:27.747 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
16783 Sep 22 23:14:27.747 INFO Set check for repair
16784 Sep 22 23:14:27.747 INFO [1] 127.0.0.1:51761 task reports connection:true
16785 Sep 22 23:14:27.747 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
16786 Sep 22 23:14:27.747 INFO Set check for repair
16787 Sep 22 23:14:27.747 INFO Created new region file "/tmp/downstairs-AadW8v1X/region.json"
16788 Sep 22 23:14:27.747 INFO [0] received reconcile message
16789 Sep 22 23:14:27.747 INFO [0] All repairs completed, exit
16790 Sep 22 23:14:27.747 INFO [0] Starts cmd_loop
16791 Sep 22 23:14:27.747 INFO Repair listens on 127.0.0.1:0, task: repair
16792 Sep 22 23:14:27.747 INFO [1] received reconcile message
16793 Sep 22 23:14:27.747 INFO [1] All repairs completed, exit
16794 Sep 22 23:14:27.747 INFO [1] Starts cmd_loop
16795 Sep 22 23:14:27.747 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33016, task: repair
16796 Sep 22 23:14:27.747 INFO [2] received reconcile message
16797 Sep 22 23:14:27.747 INFO [2] All repairs completed, exit
16798 Sep 22 23:14:27.747 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33016, task: repair
16799 Sep 22 23:14:27.747 INFO [2] Starts cmd_loop
16800 The guest has finished waiting for activation
16801 Sep 22 23:14:27.747 INFO listening, local_addr: 127.0.0.1:33016, task: repair
16802 Sep 22 23:14:27.748 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33016, task: repair
16803 Sep 22 23:14:27.748 INFO Using repair address: 127.0.0.1:33016, task: main
16804 Sep 22 23:14:27.748 INFO Scrub 510c33b3-4659-4dfe-9ea9-4fd9b3d18480 done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
16805 Sep 22 23:14:27.748 INFO No SSL acceptor configured, task: main
16806 Sep 22 23:14:27.748 DEBG IO Flush 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
16807 Sep 22 23:14:27.748 INFO current number of open files limit 65536 is already the maximum
16808 Sep 22 23:14:27.748 INFO Created new region file "/tmp/downstairs-tpdOXLU0/region.json"
16809 Sep 22 23:14:27.749 DEBG [1] Read already AckReady 1004, : downstairs
16810 Sep 22 23:14:27.749 DEBG [0] Read AckReady 1005, : downstairs
16811 Sep 22 23:14:27.750 DEBG IO Write 1000 has deps []
16812 Sep 22 23:14:27.750 DEBG up_ds_listen was notified
16813 Sep 22 23:14:27.750 DEBG up_ds_listen process 1000
16814 Sep 22 23:14:27.750 DEBG [A] ack job 1000:1, : downstairs
16815 Sep 22 23:14:27.750 DEBG up_ds_listen checked 1 jobs, back to waiting
16816 Sep 22 23:14:27.750 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16817 Sep 22 23:14:27.750 DEBG [2] Read already AckReady 1004, : downstairs
16818 Sep 22 23:14:27.750 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16819 Sep 22 23:14:27.750 DEBG up_ds_listen was notified
16820 Sep 22 23:14:27.751 DEBG up_ds_listen process 1004
16821 Sep 22 23:14:27.751 DEBG [A] ack job 1004:5, : downstairs
16822 Sep 22 23:14:27.751 DEBG Flush :1003 extent_limit None deps:[JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
16823 Sep 22 23:14:27.751 INFO current number of open files limit 65536 is already the maximum
16824 Sep 22 23:14:27.751 INFO Opened existing region file "/tmp/downstairs-AadW8v1X/region.json"
16825 Sep 22 23:14:27.751 INFO Database read version 1
16826 Sep 22 23:14:27.751 INFO Database write version 1
16827 Sep 22 23:14:27.751 DEBG up_ds_listen was notified
16828 Sep 22 23:14:27.751 DEBG up_ds_listen process 1003
16829 Sep 22 23:14:27.751 DEBG [A] ack job 1003:4, : downstairs
16830 Sep 22 23:14:27.751 DEBG [rc] retire 1003 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003)], : downstairs
16831 Sep 22 23:14:27.751 DEBG up_ds_listen checked 1 jobs, back to waiting
16832 Sep 22 23:14:27.751 DEBG up_ds_listen checked 1 jobs, back to waiting
16833 Sep 22 23:14:27.751 DEBG IO Read 1004 has deps []
16834 Sep 22 23:14:27.751 INFO Request to deactivate this guest
16835 Sep 22 23:14:27.752 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 set deactivating.
16836 Sep 22 23:14:27.752 DEBG IO Flush 1001 has deps [JobId(1000)]
16837 Sep 22 23:14:27.752 INFO current number of open files limit 65536 is already the maximum
16838 Sep 22 23:14:27.752 INFO Opened existing region file "/tmp/downstairs-tpdOXLU0/region.json"
16839 Sep 22 23:14:27.752 INFO Database read version 1
16840 Sep 22 23:14:27.752 INFO Database write version 1
16841 Sep 22 23:14:27.752 DEBG [1] Read already AckReady 1005, : downstairs
16842 Sep 22 23:14:27.752 DEBG Read :1004 deps:[] res:true
16843 Sep 22 23:14:27.753 DEBG Write :1000 deps:[] res:true
16844 Sep 22 23:14:27.753 DEBG Read :1004 deps:[] res:true
16845 Sep 22 23:14:27.753 DEBG Read :1004 deps:[] res:true
16846 Sep 22 23:14:27.754 INFO UUID: ac1cf4dc-f285-414c-b94a-cbeedf2f019d
16847 Sep 22 23:14:27.754 INFO Blocks per extent:5 Total Extents: 2
16848 Sep 22 23:14:27.754 DEBG Write :1000 deps:[] res:true
16849 Sep 22 23:14:27.754 INFO Crucible Version: Crucible Version: 0.0.1
16850 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16851 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16852 rustc: 1.70.0 stable x86_64-unknown-illumos
16853 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16854 Sep 22 23:14:27.754 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16855 Sep 22 23:14:27.754 INFO Using address: 127.0.0.1:60728, task: main
16856 Sep 22 23:14:27.754 INFO Repair listens on 127.0.0.1:0, task: repair
16857 Sep 22 23:14:27.754 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36934, task: repair
16858 Sep 22 23:14:27.754 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36934, task: repair
16859 Sep 22 23:14:27.754 INFO listening, local_addr: 127.0.0.1:36934, task: repair
16860 Sep 22 23:14:27.755 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36934, task: repair
16861 Sep 22 23:14:27.755 INFO Using repair address: 127.0.0.1:36934, task: main
16862 Sep 22 23:14:27.755 INFO No SSL acceptor configured, task: main
16863 Sep 22 23:14:27.755 DEBG [2] Read already AckReady 1005, : downstairs
16864 Sep 22 23:14:27.755 DEBG up_ds_listen was notified
16865 Sep 22 23:14:27.755 DEBG up_ds_listen process 1005
16866 Sep 22 23:14:27.755 INFO UUID: 13286ed5-820a-470e-a2ba-89ce1d0ebb25
16867 Sep 22 23:14:27.755 DEBG [A] ack job 1005:6, : downstairs
16868 Sep 22 23:14:27.755 INFO Blocks per extent:5 Total Extents: 2
16869 Sep 22 23:14:27.755 INFO Crucible Version: Crucible Version: 0.0.1
16870 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
16871 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
16872 rustc: 1.70.0 stable x86_64-unknown-illumos
16873 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
16874 Sep 22 23:14:27.755 INFO Upstairs <-> Downstairs Message Version: 4, task: main
16875 Sep 22 23:14:27.755 INFO Using address: 127.0.0.1:50371, task: main
16876 Sep 22 23:14:27.755 DEBG up_ds_listen checked 1 jobs, back to waiting
16877 Sep 22 23:14:27.755 DEBG Write :1000 deps:[] res:true
16878 Sep 22 23:14:27.756 INFO Repair listens on 127.0.0.1:0, task: repair
16879 Sep 22 23:14:27.756 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53863, task: repair
16880 Sep 22 23:14:27.756 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53863, task: repair
16881 Sep 22 23:14:27.756 INFO listening, local_addr: 127.0.0.1:53863, task: repair
16882 Sep 22 23:14:27.756 INFO [0] deactivate job 1001 not InProgress flush, NO
16883 Sep 22 23:14:27.756 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53863, task: repair
16884 Sep 22 23:14:27.756 INFO Using repair address: 127.0.0.1:53863, task: main
16885 Sep 22 23:14:27.756 INFO [2] deactivate job 1001 not InProgress flush, NO
16886 Sep 22 23:14:27.756 INFO No SSL acceptor configured, task: main
16887 Sep 22 23:14:27.756 INFO [1] deactivate job 1001 not InProgress flush, NO
16888 Sep 22 23:14:27.756 INFO current number of open files limit 65536 is already the maximum
16889 Sep 22 23:14:27.756 INFO Upstairs starts
16890 Sep 22 23:14:27.756 INFO Created new region file "/tmp/downstairs-TELOR5uT/region.json"
16891 Sep 22 23:14:27.756 INFO Crucible Version: BuildInfo {
16892 version: "0.0.1",
16893 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
16894 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
16895 git_branch: "main",
16896 rustc_semver: "1.70.0",
16897 rustc_channel: "stable",
16898 rustc_host_triple: "x86_64-unknown-illumos",
16899 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
16900 cargo_triple: "x86_64-unknown-illumos",
16901 debug: true,
16902 opt_level: 0,
16903 }
16904 Sep 22 23:14:27.756 INFO Upstairs <-> Downstairs Message Version: 4
16905 Sep 22 23:14:27.756 INFO Crucible stats registered with UUID: de3b8039-2721-4588-b15f-2b25684f8d95
16906 Sep 22 23:14:27.756 INFO Crucible de3b8039-2721-4588-b15f-2b25684f8d95 has session id: 1f4003f9-85a2-4adf-822f-455c625523f3
16907 test test::integration_test_scrub_short_sparse ... ok
16908 Sep 22 23:14:27.757 INFO listening on 127.0.0.1:0, task: main
16909 Sep 22 23:14:27.757 INFO listening on 127.0.0.1:0, task: main
16910 Sep 22 23:14:27.757 INFO listening on 127.0.0.1:0, task: main
16911 Sep 22 23:14:27.757 INFO [0] connecting to 127.0.0.1:39658, looper: 0
16912 Sep 22 23:14:27.757 INFO [1] connecting to 127.0.0.1:62268, looper: 1
16913 Sep 22 23:14:27.757 DEBG [0] Read AckReady 1004, : downstairs
16914 Sep 22 23:14:27.757 INFO current number of open files limit 65536 is already the maximum
16915 Sep 22 23:14:27.757 INFO [2] connecting to 127.0.0.1:60728, looper: 2
16916 Sep 22 23:14:27.757 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
16917 Sep 22 23:14:27.757 INFO up_listen starts, task: up_listen
16918 Sep 22 23:14:27.757 INFO Wait for all three downstairs to come online
16919 Sep 22 23:14:27.757 INFO Created new region file "/tmp/downstairs-uCbezb5Z/region.json"
16920 Sep 22 23:14:27.757 INFO Flush timeout: 0.5
16921 Sep 22 23:14:27.757 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
16922 Sep 22 23:14:27.757 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
16923 Sep 22 23:14:27.757 INFO [0] check deactivate YES
16924 Sep 22 23:14:27.757 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) Active Active Active ds_transition to Deactivated
16925 Sep 22 23:14:27.757 INFO [0] Transition from Active to Deactivated
16926 Sep 22 23:14:27.757 INFO accepted connection from 127.0.0.1:62055, task: main
16927 Sep 22 23:14:27.758 INFO [2] check deactivate YES
16928 Sep 22 23:14:27.758 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) Deactivated Active Active ds_transition to Deactivated
16929 Sep 22 23:14:27.758 INFO [2] Transition from Active to Deactivated
16930 Sep 22 23:14:27.758 INFO [1] de3b8039-2721-4588-b15f-2b25684f8d95 looper connected, looper: 1
16931 Sep 22 23:14:27.758 DEBG [1] deactivate flush 1001 done, : downstairs
16932 Sep 22 23:14:27.758 INFO [1] Proc runs for 127.0.0.1:62268 in state New
16933 Sep 22 23:14:27.758 INFO [1] check deactivate YES
16934 Sep 22 23:14:27.758 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (7d35a049-e6b2-42d0-8692-1238474c8ef7) Deactivated Active Deactivated ds_transition to Deactivated
16935 Sep 22 23:14:27.758 INFO [1] Transition from Active to Deactivated
16936 Sep 22 23:14:27.758 INFO [0] de3b8039-2721-4588-b15f-2b25684f8d95 looper connected, looper: 0
16937 Sep 22 23:14:27.758 INFO [0] Proc runs for 127.0.0.1:39658 in state New
16938 Sep 22 23:14:27.758 ERRO 127.0.0.1:35706: proc: [0] client work task ended, Ok(Err([0] exits after deactivation)), so we end too, looper: 0
16939 Sep 22 23:14:27.758 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Deactivated to New
16940 Sep 22 23:14:27.758 INFO deactivate transition checking...
16941 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16942 Sep 22 23:14:27.758 INFO deactivate_transition Deactivated NO
16943 Sep 22 23:14:27.758 INFO deactivate_transition Deactivated NO
16944 Sep 22 23:14:27.758 INFO accepted connection from 127.0.0.1:63374, task: main
16945 Sep 22 23:14:27.758 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:35706 closed, looper: 0
16946 Sep 22 23:14:27.758 INFO [2] de3b8039-2721-4588-b15f-2b25684f8d95 looper connected, looper: 2
16947 Sep 22 23:14:27.758 ERRO 127.0.0.1:34180: proc: [2] client work task ended, Ok(Err([2] exits after deactivation)), so we end too, looper: 2
16948 Sep 22 23:14:27.758 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Deactivated to New
16949 Sep 22 23:14:27.758 INFO [2] Proc runs for 127.0.0.1:60728 in state New
16950 Sep 22 23:14:27.758 INFO deactivate transition checking...
16951 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16952 Sep 22 23:14:27.758 INFO deactivate_transition Deactivated NO
16953 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16954 Sep 22 23:14:27.758 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:34180 closed, looper: 2
16955 Sep 22 23:14:27.758 DEBG up_ds_listen was notified
16956 Sep 22 23:14:27.758 DEBG up_ds_listen process 1001
16957 Sep 22 23:14:27.758 DEBG [A] ack job 1001:2, : downstairs
16958 Sep 22 23:14:27.758 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
16959 Sep 22 23:14:27.758 DEBG up_ds_listen checked 1 jobs, back to waiting
16960 Sep 22 23:14:27.758 INFO accepted connection from 127.0.0.1:64760, task: main
16961 Sep 22 23:14:27.758 ERRO 127.0.0.1:51761: proc: [1] client work task ended, Ok(Err([1] exits after deactivation)), so we end too, looper: 1
16962 Sep 22 23:14:27.758 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Deactivated to New
16963 Sep 22 23:14:27.758 INFO deactivate transition checking...
16964 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16965 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16966 Sep 22 23:14:27.758 INFO deactivate_transition New Maybe
16967 Sep 22 23:14:27.758 INFO All DS in the proper state! -> INIT
16968 Sep 22 23:14:27.758 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:51761 closed, looper: 1
16969 Sep 22 23:14:27.758 INFO [0] 127.0.0.1:35706 task reports connection:false
16970 Sep 22 23:14:27.758 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 New New New
16971 Sep 22 23:14:27.758 INFO [0] 127.0.0.1:35706 task reports offline
16972 Sep 22 23:14:27.758 INFO [2] 127.0.0.1:34180 task reports connection:false
16973 Sep 22 23:14:27.758 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 New New New
16974 Sep 22 23:14:27.758 INFO [2] 127.0.0.1:34180 task reports offline
16975 Sep 22 23:14:27.758 INFO [1] 127.0.0.1:51761 task reports connection:false
16976 Sep 22 23:14:27.758 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 New New New
16977 Sep 22 23:14:27.758 INFO [1] 127.0.0.1:51761 task reports offline
16978 Sep 22 23:14:27.759 INFO Connection request from de3b8039-2721-4588-b15f-2b25684f8d95 with version 4, task: proc
16979 Sep 22 23:14:27.759 INFO current number of open files limit 65536 is already the maximum
16980 Sep 22 23:14:27.759 INFO upstairs UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } connected, version 4, task: proc
16981 Sep 22 23:14:27.759 INFO Opened existing region file "/tmp/downstairs-NzNQXxni/region.json"
16982 Sep 22 23:14:27.759 INFO Database read version 1
16983 Sep 22 23:14:27.759 INFO Database write version 1
16984 Sep 22 23:14:27.759 INFO Connection request from de3b8039-2721-4588-b15f-2b25684f8d95 with version 4, task: proc
16985 Sep 22 23:14:27.759 INFO upstairs UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } connected, version 4, task: proc
16986 Sep 22 23:14:27.759 INFO Connection request from de3b8039-2721-4588-b15f-2b25684f8d95 with version 4, task: proc
16987 Sep 22 23:14:27.759 INFO upstairs UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } connected, version 4, task: proc
16988 Sep 22 23:14:27.759 INFO [1] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) New New New ds_transition to WaitActive
16989 Sep 22 23:14:27.759 INFO [1] Transition from New to WaitActive
16990 Sep 22 23:14:27.759 INFO [0] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) New WaitActive New ds_transition to WaitActive
16991 Sep 22 23:14:27.759 INFO [0] Transition from New to WaitActive
16992 Sep 22 23:14:27.759 INFO [2] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) WaitActive WaitActive New ds_transition to WaitActive
16993 Sep 22 23:14:27.759 INFO [2] Transition from New to WaitActive
16994 The guest has requested activation
16995 Sep 22 23:14:27.759 INFO de3b8039-2721-4588-b15f-2b25684f8d95 active request set
16996 Sep 22 23:14:27.759 DEBG [1] Read already AckReady 1004, : downstairs
16997 Sep 22 23:14:27.759 INFO [0] received activate with gen 1
16998 Sep 22 23:14:27.760 INFO [0] client got ds_active_rx, promote! session d559352a-579e-4e6f-9baa-d23c3f55b78d
16999 Sep 22 23:14:27.760 INFO [1] received activate with gen 1
17000 Sep 22 23:14:27.760 INFO [1] client got ds_active_rx, promote! session d559352a-579e-4e6f-9baa-d23c3f55b78d
17001 Sep 22 23:14:27.760 INFO [2] received activate with gen 1
17002 Sep 22 23:14:27.760 INFO [2] client got ds_active_rx, promote! session d559352a-579e-4e6f-9baa-d23c3f55b78d
17003 Sep 22 23:14:27.760 INFO UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } is now active (read-write)
17004 test test::integration_test_scrub_short ... okSep 22 23:14:27.760 INFO UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } is now active (read-write)
17005 
17006 Sep 22 23:14:27.760 INFO UpstairsConnection { upstairs_id: de3b8039-2721-4588-b15f-2b25684f8d95, session_id: d559352a-579e-4e6f-9baa-d23c3f55b78d, gen: 1 } is now active (read-write)
17007 Sep 22 23:14:27.760 INFO UUID: a0901c3f-7fbf-40b8-85c2-142a27a641c8
17008 Sep 22 23:14:27.760 INFO current number of open files limit 65536 is already the maximum
17009 Sep 22 23:14:27.760 INFO Blocks per extent:5 Total Extents: 2
17010 Sep 22 23:14:27.760 INFO Crucible Version: Crucible Version: 0.0.1
17011 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17012 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17013 rustc: 1.70.0 stable x86_64-unknown-illumos
17014 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17015 Sep 22 23:14:27.760 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17016 Sep 22 23:14:27.760 INFO Using address: 127.0.0.1:34728, task: main
17017 Sep 22 23:14:27.760 INFO Created new region file "/tmp/downstairs-14il0Ky1/region.json"
17018 Sep 22 23:14:27.760 INFO Repair listens on 127.0.0.1:0, task: repair
17019 Sep 22 23:14:27.761 INFO [1] downstairs client at 127.0.0.1:62268 has UUID cc461d70-432a-44c4-b9ed-07b65b90fa30
17020 Sep 22 23:14:27.761 INFO current number of open files limit 65536 is already the maximum
17021 Sep 22 23:14:27.761 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: cc461d70-432a-44c4-b9ed-07b65b90fa30, encrypted: true, database_read_version: 1, database_write_version: 1 }
17022 Sep 22 23:14:27.761 INFO Opened existing region file "/tmp/downstairs-TELOR5uT/region.json"
17023 Sep 22 23:14:27.761 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61304, task: repair
17024 Sep 22 23:14:27.761 INFO Database read version 1
17025 Sep 22 23:14:27.761 INFO Database write version 1
17026 Sep 22 23:14:27.761 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61304, task: repair
17027 Sep 22 23:14:27.761 INFO de3b8039-2721-4588-b15f-2b25684f8d95 WaitActive WaitActive WaitActive
17028 Sep 22 23:14:27.761 INFO listening, local_addr: 127.0.0.1:61304, task: repair
17029 Sep 22 23:14:27.761 INFO [0] downstairs client at 127.0.0.1:39658 has UUID dcbee2a7-5a05-4339-b188-1d1bfb232386
17030 Sep 22 23:14:27.761 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: dcbee2a7-5a05-4339-b188-1d1bfb232386, encrypted: true, database_read_version: 1, database_write_version: 1 }
17031 Sep 22 23:14:27.761 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61304, task: repair
17032 Sep 22 23:14:27.761 INFO de3b8039-2721-4588-b15f-2b25684f8d95 WaitActive WaitActive WaitActive
17033 Sep 22 23:14:27.761 INFO Using repair address: 127.0.0.1:61304, task: main
17034 Sep 22 23:14:27.761 INFO No SSL acceptor configured, task: main
17035 Sep 22 23:14:27.761 INFO current number of open files limit 65536 is already the maximum
17036 Sep 22 23:14:27.761 INFO [2] downstairs client at 127.0.0.1:60728 has UUID ac1cf4dc-f285-414c-b94a-cbeedf2f019d
17037 Sep 22 23:14:27.761 INFO Opened existing region file "/tmp/downstairs-JJG0Y7Ly/region.json"
17038 Sep 22 23:14:27.761 INFO Database read version 1
17039 Sep 22 23:14:27.761 INFO Database write version 1
17040 Sep 22 23:14:27.761 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ac1cf4dc-f285-414c-b94a-cbeedf2f019d, encrypted: true, database_read_version: 1, database_write_version: 1 }
17041 Sep 22 23:14:27.761 INFO de3b8039-2721-4588-b15f-2b25684f8d95 WaitActive WaitActive WaitActive
17042 Sep 22 23:14:27.761 INFO Current flush_numbers [0..12]: [0, 0]
17043 Sep 22 23:14:27.761 INFO current number of open files limit 65536 is already the maximum
17044 Sep 22 23:14:27.761 INFO Opened existing region file "/tmp/downstairs-uCbezb5Z/region.json"
17045 Sep 22 23:14:27.761 INFO Database read version 1
17046 Sep 22 23:14:27.761 INFO Database write version 1
17047 Sep 22 23:14:27.761 INFO Downstairs has completed Negotiation, task: proc
17048 Sep 22 23:14:27.762 INFO Current flush_numbers [0..12]: [0, 0]
17049 Sep 22 23:14:27.762 INFO Downstairs has completed Negotiation, task: proc
17050 Sep 22 23:14:27.762 INFO UUID: 5bccaeee-f6e7-4a30-aac1-e7711f75e3d5
17051 Sep 22 23:14:27.762 INFO Current flush_numbers [0..12]: [0, 0]
17052 Sep 22 23:14:27.762 INFO Blocks per extent:5 Total Extents: 2
17053 Sep 22 23:14:27.762 DEBG [2] Read already AckReady 1004, : downstairs
17054 Sep 22 23:14:27.762 INFO Crucible Version: Crucible Version: 0.0.1
17055 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17056 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17057 rustc: 1.70.0 stable x86_64-unknown-illumos
17058 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17059 Sep 22 23:14:27.762 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17060 Sep 22 23:14:27.762 INFO Using address: 127.0.0.1:53080, task: main
17061 Sep 22 23:14:27.762 DEBG up_ds_listen was notified
17062 Sep 22 23:14:27.762 DEBG up_ds_listen process 1004
17063 Sep 22 23:14:27.762 INFO Downstairs has completed Negotiation, task: proc
17064 Sep 22 23:14:27.762 DEBG [A] ack job 1004:5, : downstairs
17065 Sep 22 23:14:27.762 INFO UUID: e45251b5-0344-4afb-bb50-e9e4af673e5f
17066 Sep 22 23:14:27.762 INFO Blocks per extent:5 Total Extents: 2
17067 Sep 22 23:14:27.762 INFO Crucible Version: Crucible Version: 0.0.1
17068 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17069 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17070 rustc: 1.70.0 stable x86_64-unknown-illumos
17071 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17072 Sep 22 23:14:27.762 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17073 Sep 22 23:14:27.762 INFO Using address: 127.0.0.1:61240, task: main
17074 Sep 22 23:14:27.762 INFO [1] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17075 Sep 22 23:14:27.762 INFO [1] Transition from WaitActive to WaitQuorum
17076 Sep 22 23:14:27.762 WARN [1] new RM replaced this: None
17077 Sep 22 23:14:27.762 INFO [1] Starts reconcile loop
17078 Sep 22 23:14:27.762 INFO Repair listens on 127.0.0.1:0, task: repair
17079 Sep 22 23:14:27.763 INFO [0] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
17080 Sep 22 23:14:27.763 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39280, task: repair
17081 Sep 22 23:14:27.763 INFO [0] Transition from WaitActive to WaitQuorum
17082 Sep 22 23:14:27.763 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39280, task: repair
17083 Sep 22 23:14:27.763 WARN [0] new RM replaced this: None
17084 Sep 22 23:14:27.763 INFO [0] Starts reconcile loop
17085 Sep 22 23:14:27.763 INFO listening, local_addr: 127.0.0.1:39280, task: repair
17086 Sep 22 23:14:27.763 INFO Repair listens on 127.0.0.1:0, task: repair
17087 Sep 22 23:14:27.763 DEBG up_ds_listen checked 1 jobs, back to waiting
17088 Sep 22 23:14:27.763 INFO [2] de3b8039-2721-4588-b15f-2b25684f8d95 (d559352a-579e-4e6f-9baa-d23c3f55b78d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
17089 Sep 22 23:14:27.763 INFO [2] Transition from WaitActive to WaitQuorum
17090 Sep 22 23:14:27.763 WARN [2] new RM replaced this: None
17091 Sep 22 23:14:27.763 INFO [2] Starts reconcile loop
17092 Sep 22 23:14:27.763 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50554, task: repair
17093 Sep 22 23:14:27.763 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50554, task: repair
17094 Sep 22 23:14:27.763 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39280, task: repair
17095 Sep 22 23:14:27.763 INFO listening, local_addr: 127.0.0.1:50554, task: repair
17096 Sep 22 23:14:27.763 INFO [1] 127.0.0.1:62268 task reports connection:true
17097 Sep 22 23:14:27.763 INFO Using repair address: 127.0.0.1:39280, task: main
17098 Sep 22 23:14:27.763 INFO No SSL acceptor configured, task: main
17099 Sep 22 23:14:27.763 INFO de3b8039-2721-4588-b15f-2b25684f8d95 WaitQuorum WaitQuorum WaitQuorum
17100 Sep 22 23:14:27.763 INFO [0]R flush_numbers: [0, 0]
17101 Sep 22 23:14:27.763 INFO [0]R generation: [0, 0]
17102 Sep 22 23:14:27.763 INFO [0]R dirty: [false, false]
17103 Sep 22 23:14:27.763 INFO [1]R flush_numbers: [0, 0]
17104 Sep 22 23:14:27.763 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50554, task: repair
17105 Sep 22 23:14:27.763 INFO [1]R generation: [0, 0]
17106 Sep 22 23:14:27.763 INFO [1]R dirty: [false, false]
17107 Sep 22 23:14:27.763 INFO Using repair address: 127.0.0.1:50554, task: main
17108 Sep 22 23:14:27.763 INFO No SSL acceptor configured, task: main
17109 Sep 22 23:14:27.763 INFO [2]R flush_numbers: [0, 0]
17110 Sep 22 23:14:27.763 INFO [2]R generation: [0, 0]
17111 Sep 22 23:14:27.763 INFO [2]R dirty: [false, false]
17112 Sep 22 23:14:27.763 INFO Max found gen is 1
17113 Sep 22 23:14:27.763 INFO current number of open files limit 65536 is already the maximum
17114 Sep 22 23:14:27.763 INFO Generation requested: 1 >= found:1
17115 Sep 22 23:14:27.763 INFO Opened existing region file "/tmp/downstairs-RAGRvo5y/region.json"
17116 Sep 22 23:14:27.763 INFO Database read version 1
17117 Sep 22 23:14:27.763 INFO Next flush: 1
17118 Sep 22 23:14:27.763 INFO Database write version 1
17119 Sep 22 23:14:27.763 INFO All extents match
17120 Sep 22 23:14:27.763 INFO No downstairs repair required
17121 Sep 22 23:14:27.763 INFO No initial repair work was required
17122 Sep 22 23:14:27.763 INFO Set Downstairs and Upstairs active
17123 Sep 22 23:14:27.763 INFO de3b8039-2721-4588-b15f-2b25684f8d95 is now active with session: d559352a-579e-4e6f-9baa-d23c3f55b78d
17124 Sep 22 23:14:27.763 INFO de3b8039-2721-4588-b15f-2b25684f8d95 Set Active after no repair
17125 Sep 22 23:14:27.763 INFO Notify all downstairs, region set compare is done.
17126 Sep 22 23:14:27.763 INFO Set check for repair
17127 Sep 22 23:14:27.763 INFO [0] 127.0.0.1:39658 task reports connection:true
17128 Sep 22 23:14:27.763 INFO de3b8039-2721-4588-b15f-2b25684f8d95 Active Active Active
17129 Sep 22 23:14:27.763 INFO Set check for repair
17130 Sep 22 23:14:27.763 INFO UUID: 2cb21d94-77f3-4d85-9833-6fd57802dc13
17131 Sep 22 23:14:27.763 INFO Blocks per extent:5 Total Extents: 2
17132 Sep 22 23:14:27.763 INFO [2] 127.0.0.1:60728 task reports connection:true
17133 Sep 22 23:14:27.763 INFO de3b8039-2721-4588-b15f-2b25684f8d95 Active Active Active
17134 Sep 22 23:14:27.763 INFO Set check for repair
17135 Sep 22 23:14:27.763 INFO Crucible Version: Crucible Version: 0.0.1
17136 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17137 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17138 rustc: 1.70.0 stable x86_64-unknown-illumos
17139 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17140 Sep 22 23:14:27.764 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17141 Sep 22 23:14:27.764 INFO Using address: 127.0.0.1:62529, task: main
17142 Sep 22 23:14:27.764 INFO [0] received reconcile message
17143 Sep 22 23:14:27.764 INFO [0] All repairs completed, exit
17144 Sep 22 23:14:27.764 INFO [0] Starts cmd_loop
17145 Sep 22 23:14:27.764 INFO [1] received reconcile message
17146 Sep 22 23:14:27.764 INFO [1] All repairs completed, exit
17147 Sep 22 23:14:27.764 INFO [1] Starts cmd_loop
17148 Sep 22 23:14:27.764 INFO [2] received reconcile message
17149 Sep 22 23:14:27.764 INFO [2] All repairs completed, exit
17150 Sep 22 23:14:27.764 INFO [2] Starts cmd_loop
17151 The guest has finished waiting for activation
17152 Sep 22 23:14:27.764 INFO Repair listens on 127.0.0.1:0, task: repair
17153 Sep 22 23:14:27.764 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38091, task: repair
17154 Sep 22 23:14:27.764 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38091, task: repair
17155 Sep 22 23:14:27.764 INFO listening, local_addr: 127.0.0.1:38091, task: repair
17156 Sep 22 23:14:27.764 DEBG IO Read 1000 has deps []
17157 Sep 22 23:14:27.764 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38091, task: repair
17158 Sep 22 23:14:27.764 INFO Using repair address: 127.0.0.1:38091, task: main
17159 Sep 22 23:14:27.764 INFO No SSL acceptor configured, task: main
17160 Sep 22 23:14:27.764 INFO Upstairs starts
17161 Sep 22 23:14:27.765 INFO Crucible Version: BuildInfo {
17162 version: "0.0.1",
17163 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17164 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17165 git_branch: "main",
17166 rustc_semver: "1.70.0",
17167 rustc_channel: "stable",
17168 rustc_host_triple: "x86_64-unknown-illumos",
17169 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17170 cargo_triple: "x86_64-unknown-illumos",
17171 debug: true,
17172 opt_level: 0,
17173 }
17174 Sep 22 23:14:27.765 INFO Upstairs <-> Downstairs Message Version: 4
17175 Sep 22 23:14:27.765 INFO Crucible stats registered with UUID: 8c772b52-d880-4be4-a2d6-4ced50104923
17176 Sep 22 23:14:27.765 INFO Crucible 8c772b52-d880-4be4-a2d6-4ced50104923 has session id: 74b4f51b-c95c-416c-b091-8e4cc8b12bee
17177 Sep 22 23:14:27.765 INFO listening on 127.0.0.1:0, task: main
17178 Sep 22 23:14:27.765 INFO listening on 127.0.0.1:0, task: main
17179 Sep 22 23:14:27.765 INFO listening on 127.0.0.1:0, task: main
17180 Sep 22 23:14:27.765 INFO current number of open files limit 65536 is already the maximum
17181 Sep 22 23:14:27.765 INFO [0] connecting to 127.0.0.1:40377, looper: 0
17182 Sep 22 23:14:27.765 INFO Opened existing region file "/tmp/downstairs-14il0Ky1/region.json"
17183 Sep 22 23:14:27.765 INFO Database read version 1
17184 Sep 22 23:14:27.765 INFO Database write version 1
17185 Sep 22 23:14:27.765 INFO [1] connecting to 127.0.0.1:50371, looper: 1
17186 Sep 22 23:14:27.765 INFO [2] connecting to 127.0.0.1:53080, looper: 2
17187 Sep 22 23:14:27.765 INFO current number of open files limit 65536 is already the maximum
17188 Sep 22 23:14:27.765 INFO Created new region file "/tmp/downstairs-iS4nNbSN/region.json"
17189 Sep 22 23:14:27.765 INFO up_listen starts, task: up_listen
17190 Sep 22 23:14:27.765 INFO Wait for all three downstairs to come online
17191 Sep 22 23:14:27.766 INFO Flush timeout: 0.5
17192 Sep 22 23:14:27.766 INFO [0] 8c772b52-d880-4be4-a2d6-4ced50104923 looper connected, looper: 0
17193 Sep 22 23:14:27.766 INFO [0] Proc runs for 127.0.0.1:40377 in state New
17194 Sep 22 23:14:27.766 INFO accepted connection from 127.0.0.1:42064, task: main
17195 Sep 22 23:14:27.766 INFO [2] 8c772b52-d880-4be4-a2d6-4ced50104923 looper connected, looper: 2
17196 Sep 22 23:14:27.766 DEBG Read :1000 deps:[] res:true
17197 Sep 22 23:14:27.766 INFO [2] Proc runs for 127.0.0.1:53080 in state New
17198 Sep 22 23:14:27.766 INFO [1] 8c772b52-d880-4be4-a2d6-4ced50104923 looper connected, looper: 1
17199 Sep 22 23:14:27.766 INFO [1] Proc runs for 127.0.0.1:50371 in state New
17200 Sep 22 23:14:27.766 INFO accepted connection from 127.0.0.1:53474, task: main
17201 Sep 22 23:14:27.766 INFO accepted connection from 127.0.0.1:32942, task: main
17202 Sep 22 23:14:27.766 DEBG Read :1000 deps:[] res:true
17203 Sep 22 23:14:27.767 INFO Connection request from 8c772b52-d880-4be4-a2d6-4ced50104923 with version 4, task: proc
17204 Sep 22 23:14:27.767 INFO upstairs UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } connected, version 4, task: proc
17205 Sep 22 23:14:27.767 INFO Connection request from 8c772b52-d880-4be4-a2d6-4ced50104923 with version 4, task: proc
17206 Sep 22 23:14:27.767 INFO upstairs UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } connected, version 4, task: proc
17207 Sep 22 23:14:27.767 DEBG Read :1000 deps:[] res:true
17208 Sep 22 23:14:27.767 INFO Connection request from 8c772b52-d880-4be4-a2d6-4ced50104923 with version 4, task: proc
17209 Sep 22 23:14:27.767 INFO upstairs UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } connected, version 4, task: proc
17210 Sep 22 23:14:27.767 INFO UUID: c2d08264-7ded-412a-9acb-ca0bc576793e
17211 Sep 22 23:14:27.767 INFO Blocks per extent:5 Total Extents: 2
17212 The guest has requested activation
17213 Sep 22 23:14:27.767 INFO Crucible Version: Crucible Version: 0.0.1
17214 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17215 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17216 rustc: 1.70.0 stable x86_64-unknown-illumos
17217 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17218 Sep 22 23:14:27.767 INFO UUID: cc26b772-bef2-4af0-a9fc-0733fa8e0d6d
17219 Sep 22 23:14:27.767 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17220 Sep 22 23:14:27.767 INFO Using address: 127.0.0.1:49345, task: main
17221 Sep 22 23:14:27.767 INFO Blocks per extent:5 Total Extents: 2
17222 Sep 22 23:14:27.767 INFO [0] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) New New New ds_transition to WaitActive
17223 Sep 22 23:14:27.767 INFO [0] Transition from New to WaitActive
17224 Sep 22 23:14:27.767 INFO Crucible Version: Crucible Version: 0.0.1
17225 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17226 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17227 rustc: 1.70.0 stable x86_64-unknown-illumos
17228 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17229 Sep 22 23:14:27.767 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17230 Sep 22 23:14:27.767 INFO Using address: 127.0.0.1:61471, task: main
17231 test test::integration_test_scrub_useless ... ok
17232 Sep 22 23:14:27.767 INFO [2] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) WaitActive New New ds_transition to WaitActive
17233 Sep 22 23:14:27.767 INFO [2] Transition from New to WaitActive
17234 Sep 22 23:14:27.767 INFO [1] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) WaitActive New WaitActive ds_transition to WaitActive
17235 Sep 22 23:14:27.767 INFO [1] Transition from New to WaitActive
17236 Sep 22 23:14:27.767 INFO Repair listens on 127.0.0.1:0, task: repair
17237 Sep 22 23:14:27.767 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 active request set
17238 Sep 22 23:14:27.767 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57771, task: repair
17239 Sep 22 23:14:27.767 INFO [0] received activate with gen 1
17240 Sep 22 23:14:27.767 INFO [0] client got ds_active_rx, promote! session f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6
17241 Sep 22 23:14:27.767 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57771, task: repair
17242 Sep 22 23:14:27.767 INFO listening, local_addr: 127.0.0.1:57771, task: repair
17243 Sep 22 23:14:27.767 INFO Repair listens on 127.0.0.1:0, task: repair
17244 Sep 22 23:14:27.768 INFO [1] received activate with gen 1
17245 Sep 22 23:14:27.768 INFO [1] client got ds_active_rx, promote! session f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6
17246 Sep 22 23:14:27.768 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41268, task: repair
17247 Sep 22 23:14:27.768 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57771, task: repair
17248 Sep 22 23:14:27.768 INFO current number of open files limit 65536 is already the maximum
17249 Sep 22 23:14:27.768 INFO Using repair address: 127.0.0.1:57771, task: main
17250 Sep 22 23:14:27.768 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41268, task: repair
17251 Sep 22 23:14:27.768 INFO No SSL acceptor configured, task: main
17252 Sep 22 23:14:27.768 INFO [2] received activate with gen 1
17253 Sep 22 23:14:27.768 INFO [2] client got ds_active_rx, promote! session f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6
17254 Sep 22 23:14:27.768 INFO listening, local_addr: 127.0.0.1:41268, task: repair
17255 Sep 22 23:14:27.768 INFO Created new region file "/tmp/downstairs-pJh0v4dT/region.json"
17256 Sep 22 23:14:27.768 INFO Upstairs starts
17257 Sep 22 23:14:27.768 INFO UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } is now active (read-write)
17258 Sep 22 23:14:27.768 INFO Crucible Version: BuildInfo {
17259 version: "0.0.1",
17260 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17261 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17262 git_branch: "main",
17263 rustc_semver: "1.70.0",
17264 rustc_channel: "stable",
17265 rustc_host_triple: "x86_64-unknown-illumos",
17266 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17267 cargo_triple: "x86_64-unknown-illumos",
17268 debug: true,
17269 opt_level: 0,
17270 }
17271 Sep 22 23:14:27.768 INFO Upstairs <-> Downstairs Message Version: 4
17272 Sep 22 23:14:27.768 INFO Crucible stats registered with UUID: d173eb3a-6ff3-4b10-89cb-971e46f2c897
17273 Sep 22 23:14:27.768 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41268, task: repair
17274 Sep 22 23:14:27.768 INFO Crucible d173eb3a-6ff3-4b10-89cb-971e46f2c897 has session id: 116b3e11-839c-4e16-8ff9-3034e728e74a
17275 Sep 22 23:14:27.768 INFO Using repair address: 127.0.0.1:41268, task: main
17276 Sep 22 23:14:27.768 INFO No SSL acceptor configured, task: main
17277 Sep 22 23:14:27.768 INFO UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } is now active (read-write)
17278 Sep 22 23:14:27.768 DEBG [1] Read AckReady 1000, : downstairs
17279 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } disconnected, 0 jobs left, task: main
17280 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } was previously active, clearing, task: main
17281 Sep 22 23:14:27.768 INFO connection (127.0.0.1:34884): all done
17282 Sep 22 23:14:27.768 INFO UpstairsConnection { upstairs_id: 8c772b52-d880-4be4-a2d6-4ced50104923, session_id: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6, gen: 1 } is now active (read-write)
17283 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } disconnected, 0 jobs left, task: main
17284 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } was previously active, clearing, task: main
17285 Sep 22 23:14:27.768 INFO connection (127.0.0.1:47731): all done
17286 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } disconnected, 0 jobs left, task: main
17287 Sep 22 23:14:27.768 WARN upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 7d35a049-e6b2-42d0-8692-1238474c8ef7, gen: 1 } was previously active, clearing, task: main
17288 Sep 22 23:14:27.768 DEBG [0] Read already AckReady 1000, : downstairs
17289 Sep 22 23:14:27.768 INFO current number of open files limit 65536 is already the maximum
17290 Sep 22 23:14:27.768 INFO connection (127.0.0.1:62343): all done
17291 Sep 22 23:14:27.768 INFO listening on 127.0.0.1:0, task: main
17292 Sep 22 23:14:27.768 INFO listening on 127.0.0.1:0, task: main
17293 Sep 22 23:14:27.768 INFO Created new region file "/tmp/downstairs-hIre6xvC/region.json"
17294 Sep 22 23:14:27.768 INFO listening on 127.0.0.1:0, task: main
17295 Sep 22 23:14:27.768 INFO [0] connecting to 127.0.0.1:34728, looper: 0
17296 Sep 22 23:14:27.768 DEBG [2] Read already AckReady 1000, : downstairs
17297 Sep 22 23:14:27.768 INFO [1] connecting to 127.0.0.1:61240, looper: 1
17298 Sep 22 23:14:27.768 DEBG up_ds_listen was notified
17299 Sep 22 23:14:27.768 DEBG up_ds_listen process 1000
17300 Sep 22 23:14:27.769 INFO current number of open files limit 65536 is already the maximum
17301 Sep 22 23:14:27.769 DEBG [A] ack job 1000:1, : downstairs
17302 Sep 22 23:14:27.769 INFO [2] connecting to 127.0.0.1:49345, looper: 2
17303 Sep 22 23:14:27.769 INFO Opened existing region file "/tmp/downstairs-iS4nNbSN/region.json"
17304 Sep 22 23:14:27.769 INFO [0] downstairs client at 127.0.0.1:40377 has UUID d97d6679-3993-407e-b222-4165f2682efb
17305 Sep 22 23:14:27.769 INFO Database read version 1
17306 Sep 22 23:14:27.769 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d97d6679-3993-407e-b222-4165f2682efb, encrypted: true, database_read_version: 1, database_write_version: 1 }
17307 Sep 22 23:14:27.769 INFO Database write version 1
17308 Sep 22 23:14:27.769 INFO up_listen starts, task: up_listen
17309 Sep 22 23:14:27.769 INFO Wait for all three downstairs to come online
17310 Sep 22 23:14:27.769 INFO Flush timeout: 0.5
17311 Sep 22 23:14:27.769 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 WaitActive WaitActive WaitActive
17312 Sep 22 23:14:27.769 INFO [2] downstairs client at 127.0.0.1:53080 has UUID 5bccaeee-f6e7-4a30-aac1-e7711f75e3d5
17313 Sep 22 23:14:27.769 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5bccaeee-f6e7-4a30-aac1-e7711f75e3d5, encrypted: true, database_read_version: 1, database_write_version: 1 }
17314 Sep 22 23:14:27.769 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 WaitActive WaitActive WaitActive
17315 Sep 22 23:14:27.769 INFO [1] downstairs client at 127.0.0.1:50371 has UUID 13286ed5-820a-470e-a2ba-89ce1d0ebb25
17316 Sep 22 23:14:27.769 DEBG up_ds_listen checked 1 jobs, back to waiting
17317 Sep 22 23:14:27.769 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 13286ed5-820a-470e-a2ba-89ce1d0ebb25, encrypted: true, database_read_version: 1, database_write_version: 1 }
17318 Sep 22 23:14:27.769 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 WaitActive WaitActive WaitActive
17319 Sep 22 23:14:27.769 INFO accepted connection from 127.0.0.1:40760, task: main
17320 Sep 22 23:14:27.769 INFO accepted connection from 127.0.0.1:53958, task: main
17321 Sep 22 23:14:27.769 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 0
17322 Sep 22 23:14:27.769 INFO [0] Proc runs for 127.0.0.1:34728 in state New
17323 Sep 22 23:14:27.769 INFO Current flush_numbers [0..12]: [0, 0]
17324 Sep 22 23:14:27.769 INFO accepted connection from 127.0.0.1:53496, task: main
17325 Sep 22 23:14:27.769 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 1
17326 Sep 22 23:14:27.769 INFO [1] Proc runs for 127.0.0.1:61240 in state New
17327 Sep 22 23:14:27.769 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 2
17328 Sep 22 23:14:27.769 INFO [2] Proc runs for 127.0.0.1:49345 in state New
17329 Sep 22 23:14:27.769 INFO Downstairs has completed Negotiation, task: proc
17330 Sep 22 23:14:27.770 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
17331 Sep 22 23:14:27.770 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } connected, version 4, task: proc
17332 Sep 22 23:14:27.770 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
17333 Sep 22 23:14:27.770 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } connected, version 4, task: proc
17334 Sep 22 23:14:27.770 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
17335 Sep 22 23:14:27.770 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } connected, version 4, task: proc
17336 Sep 22 23:14:27.770 INFO Current flush_numbers [0..12]: [0, 0]
17337 The guest has requested activation
17338 Sep 22 23:14:27.770 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) New New New ds_transition to WaitActive
17339 Sep 22 23:14:27.770 INFO [0] Transition from New to WaitActive
17340 Sep 22 23:14:27.770 INFO Downstairs has completed Negotiation, task: proc
17341 Sep 22 23:14:27.770 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) WaitActive New New ds_transition to WaitActive
17342 Sep 22 23:14:27.770 INFO [1] Transition from New to WaitActive
17343 Sep 22 23:14:27.770 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) WaitActive WaitActive New ds_transition to WaitActive
17344 Sep 22 23:14:27.770 INFO [2] Transition from New to WaitActive
17345 Sep 22 23:14:27.770 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 active request set
17346 Sep 22 23:14:27.770 INFO Current flush_numbers [0..12]: [0, 0]
17347 Sep 22 23:14:27.770 INFO [0] received activate with gen 2
17348 Sep 22 23:14:27.770 INFO [0] client got ds_active_rx, promote! session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
17349 Sep 22 23:14:27.770 INFO [1] received activate with gen 2
17350 Sep 22 23:14:27.770 INFO [1] client got ds_active_rx, promote! session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
17351 Sep 22 23:14:27.770 INFO [2] received activate with gen 2
17352 Sep 22 23:14:27.770 INFO [2] client got ds_active_rx, promote! session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
17353 Sep 22 23:14:27.771 INFO UUID: cc5d982e-2876-45d7-90a8-4f67b873e08d
17354 Sep 22 23:14:27.771 INFO Downstairs has completed Negotiation, task: proc
17355 Sep 22 23:14:27.771 INFO Blocks per extent:5 Total Extents: 2
17356 Sep 22 23:14:27.771 INFO [0] downstairs client at 127.0.0.1:34728 has UUID a0901c3f-7fbf-40b8-85c2-142a27a641c8
17357 Sep 22 23:14:27.771 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a0901c3f-7fbf-40b8-85c2-142a27a641c8, encrypted: true, database_read_version: 1, database_write_version: 1 }
17358 Sep 22 23:14:27.771 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
17359 Sep 22 23:14:27.771 INFO Crucible Version: Crucible Version: 0.0.1
17360 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17361 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17362 rustc: 1.70.0 stable x86_64-unknown-illumos
17363 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17364 Sep 22 23:14:27.771 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17365 Sep 22 23:14:27.771 INFO Using address: 127.0.0.1:50210, task: main
17366 Sep 22 23:14:27.771 INFO [1] downstairs client at 127.0.0.1:61240 has UUID e45251b5-0344-4afb-bb50-e9e4af673e5f
17367 Sep 22 23:14:27.771 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e45251b5-0344-4afb-bb50-e9e4af673e5f, encrypted: true, database_read_version: 1, database_write_version: 1 }
17368 Sep 22 23:14:27.771 INFO [0] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17369 Sep 22 23:14:27.771 INFO [0] Transition from WaitActive to WaitQuorum
17370 Sep 22 23:14:27.771 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
17371 Sep 22 23:14:27.771 WARN [0] new RM replaced this: None
17372 Sep 22 23:14:27.771 INFO [0] Starts reconcile loop
17373 Sep 22 23:14:27.771 INFO [2] downstairs client at 127.0.0.1:49345 has UUID c2d08264-7ded-412a-9acb-ca0bc576793e
17374 Sep 22 23:14:27.771 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c2d08264-7ded-412a-9acb-ca0bc576793e, encrypted: true, database_read_version: 1, database_write_version: 1 }
17375 Sep 22 23:14:27.771 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
17376 Sep 22 23:14:27.771 INFO [2] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
17377 Sep 22 23:14:27.771 INFO [2] Transition from WaitActive to WaitQuorum
17378 Sep 22 23:14:27.771 WARN [2] new RM replaced this: None
17379 Sep 22 23:14:27.771 INFO [2] Starts reconcile loop
17380 Sep 22 23:14:27.771 INFO [1] 8c772b52-d880-4be4-a2d6-4ced50104923 (f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
17381 Sep 22 23:14:27.771 INFO Current flush_numbers [0..12]: [1, 1]
17382 Sep 22 23:14:27.771 INFO [1] Transition from WaitActive to WaitQuorum
17383 Sep 22 23:14:27.771 WARN [1] new RM replaced this: None
17384 Sep 22 23:14:27.771 INFO [1] Starts reconcile loop
17385 Sep 22 23:14:27.771 INFO Repair listens on 127.0.0.1:0, task: repair
17386 Sep 22 23:14:27.771 INFO Downstairs has completed Negotiation, task: proc
17387 Sep 22 23:14:27.771 INFO [0] 127.0.0.1:40377 task reports connection:true
17388 Sep 22 23:14:27.772 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 WaitQuorum WaitQuorum WaitQuorum
17389 Sep 22 23:14:27.772 INFO [0]R flush_numbers: [0, 0]
17390 Sep 22 23:14:27.772 INFO [0]R generation: [0, 0]
17391 Sep 22 23:14:27.772 INFO [0]R dirty: [false, false]
17392 Sep 22 23:14:27.772 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57719, task: repair
17393 Sep 22 23:14:27.772 INFO [1]R flush_numbers: [0, 0]
17394 Sep 22 23:14:27.772 INFO [1]R generation: [0, 0]
17395 Sep 22 23:14:27.772 INFO [1]R dirty: [false, false]
17396 Sep 22 23:14:27.772 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57719, task: repair
17397 Sep 22 23:14:27.772 INFO [2]R flush_numbers: [0, 0]
17398 Sep 22 23:14:27.772 INFO [2]R generation: [0, 0]
17399 Sep 22 23:14:27.772 INFO [2]R dirty: [false, false]
17400 Sep 22 23:14:27.772 INFO listening, local_addr: 127.0.0.1:57719, task: repair
17401 Sep 22 23:14:27.772 INFO Max found gen is 1
17402 Sep 22 23:14:27.772 INFO Generation requested: 1 >= found:1
17403 Sep 22 23:14:27.772 INFO Current flush_numbers [0..12]: [1, 1]
17404 Sep 22 23:14:27.772 INFO Next flush: 1
17405 Sep 22 23:14:27.772 INFO current number of open files limit 65536 is already the maximum
17406 Sep 22 23:14:27.772 INFO All extents match
17407 Sep 22 23:14:27.772 INFO No downstairs repair required
17408 Sep 22 23:14:27.772 INFO Opened existing region file "/tmp/downstairs-pJh0v4dT/region.json"
17409 Sep 22 23:14:27.772 INFO Database read version 1
17410 Sep 22 23:14:27.772 INFO No initial repair work was required
17411 Sep 22 23:14:27.772 INFO Database write version 1
17412 Sep 22 23:14:27.772 INFO Set Downstairs and Upstairs active
17413 Sep 22 23:14:27.772 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 is now active with session: f18e75dd-0520-46ef-a5dd-f5dbb5b1b3d6
17414 Sep 22 23:14:27.772 INFO Downstairs has completed Negotiation, task: proc
17415 Sep 22 23:14:27.772 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57719, task: repair
17416 Sep 22 23:14:27.772 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 Set Active after no repair
17417 Sep 22 23:14:27.772 INFO Notify all downstairs, region set compare is done.
17418 Sep 22 23:14:27.772 INFO Using repair address: 127.0.0.1:57719, task: main
17419 Sep 22 23:14:27.772 INFO No SSL acceptor configured, task: main
17420 Sep 22 23:14:27.772 INFO Set check for repair
17421 Sep 22 23:14:27.772 INFO [2] 127.0.0.1:53080 task reports connection:true
17422 Sep 22 23:14:27.772 INFO Current flush_numbers [0..12]: [1, 1]
17423 Sep 22 23:14:27.772 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 Active Active Active
17424 Sep 22 23:14:27.772 INFO Set check for repair
17425 Sep 22 23:14:27.772 INFO [1] 127.0.0.1:50371 task reports connection:true
17426 Sep 22 23:14:27.772 INFO 8c772b52-d880-4be4-a2d6-4ced50104923 Active Active Active
17427 Sep 22 23:14:27.772 INFO Set check for repair
17428 Sep 22 23:14:27.772 INFO Downstairs has completed Negotiation, task: proc
17429 Sep 22 23:14:27.772 INFO [0] received reconcile message
17430 Sep 22 23:14:27.772 INFO [0] All repairs completed, exit
17431 Sep 22 23:14:27.772 INFO [0] Starts cmd_loop
17432 Sep 22 23:14:27.772 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17433 Sep 22 23:14:27.772 INFO [1] received reconcile message
17434 Sep 22 23:14:27.772 INFO [0] Transition from WaitActive to WaitQuorum
17435 Sep 22 23:14:27.772 INFO [1] All repairs completed, exit
17436 Sep 22 23:14:27.772 WARN [0] new RM replaced this: None
17437 Sep 22 23:14:27.772 INFO [1] Starts cmd_loop
17438 Sep 22 23:14:27.772 INFO [0] Starts reconcile loop
17439 Sep 22 23:14:27.772 INFO [2] received reconcile message
17440 Sep 22 23:14:27.772 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
17441 Sep 22 23:14:27.772 INFO [1] Transition from WaitActive to WaitQuorum
17442 Sep 22 23:14:27.772 INFO current number of open files limit 65536 is already the maximum
17443 Sep 22 23:14:27.772 WARN [1] new RM replaced this: None
17444 Sep 22 23:14:27.772 INFO [2] All repairs completed, exit
17445 Sep 22 23:14:27.772 INFO Opened existing region file "/tmp/downstairs-hIre6xvC/region.json"
17446 Sep 22 23:14:27.772 INFO [1] Starts reconcile loop
17447 Sep 22 23:14:27.772 INFO [2] Starts cmd_loop
17448 Sep 22 23:14:27.772 INFO Database read version 1
17449 Sep 22 23:14:27.773 INFO Database write version 1
17450 Sep 22 23:14:27.773 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
17451 Sep 22 23:14:27.773 INFO [2] Transition from WaitActive to WaitQuorum
17452 Sep 22 23:14:27.773 WARN [2] new RM replaced this: None
17453 The guest has finished waiting for activation
17454 Sep 22 23:14:27.773 INFO [2] Starts reconcile loop
17455 Sep 22 23:14:27.773 INFO [0] 127.0.0.1:34728 task reports connection:true
17456 Sep 22 23:14:27.773 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitQuorum WaitQuorum WaitQuorum
17457 Sep 22 23:14:27.773 INFO [0]R flush_numbers: [1, 1]
17458 Sep 22 23:14:27.773 INFO [0]R generation: [1, 1]
17459 Sep 22 23:14:27.773 INFO [0]R dirty: [false, false]
17460 Sep 22 23:14:27.773 INFO [1]R flush_numbers: [1, 1]
17461 Sep 22 23:14:27.773 INFO [1]R generation: [1, 1]
17462 Sep 22 23:14:27.773 INFO [1]R dirty: [false, false]
17463 Sep 22 23:14:27.773 INFO [2]R flush_numbers: [1, 1]
17464 Sep 22 23:14:27.773 INFO [2]R generation: [1, 1]
17465 Sep 22 23:14:27.773 INFO [2]R dirty: [false, false]
17466 Sep 22 23:14:27.773 INFO Max found gen is 2
17467 Sep 22 23:14:27.773 INFO Generation requested: 2 >= found:2
17468 Sep 22 23:14:27.773 INFO Next flush: 2
17469 Sep 22 23:14:27.773 INFO All extents match
17470 Sep 22 23:14:27.773 INFO current number of open files limit 65536 is already the maximum
17471 Sep 22 23:14:27.773 INFO No downstairs repair required
17472 Sep 22 23:14:27.773 INFO No initial repair work was required
17473 Sep 22 23:14:27.773 INFO Set Downstairs and Upstairs active
17474 Sep 22 23:14:27.773 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 is now active with session: 90c0414c-0a2e-4dcb-869c-f885bd47f77c
17475 Sep 22 23:14:27.773 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Set Active after no repair
17476 Sep 22 23:14:27.773 INFO Notify all downstairs, region set compare is done.
17477 Sep 22 23:14:27.773 INFO Set check for repair
17478 Sep 22 23:14:27.773 DEBG IO Read 1000 has deps []
17479 Sep 22 23:14:27.773 INFO Created new region file "/tmp/downstairs-zWC5Bd4H/region.json"
17480 Sep 22 23:14:27.773 INFO [1] 127.0.0.1:61240 task reports connection:true
17481 Sep 22 23:14:27.773 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
17482 Sep 22 23:14:27.773 INFO Set check for repair
17483 Sep 22 23:14:27.773 INFO [2] 127.0.0.1:49345 task reports connection:true
17484 Sep 22 23:14:27.773 DEBG IO Write 1001 has deps [JobId(1000)]
17485 Sep 22 23:14:27.773 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
17486 Sep 22 23:14:27.773 INFO Set check for repair
17487 Sep 22 23:14:27.773 INFO [0] received reconcile message
17488 Sep 22 23:14:27.773 INFO [0] All repairs completed, exit
17489 Sep 22 23:14:27.773 INFO [0] Starts cmd_loop
17490 Sep 22 23:14:27.773 DEBG up_ds_listen was notified
17491 Sep 22 23:14:27.773 INFO [1] received reconcile message
17492 Sep 22 23:14:27.773 DEBG up_ds_listen process 1001
17493 Sep 22 23:14:27.773 INFO [1] All repairs completed, exit
17494 Sep 22 23:14:27.773 DEBG [A] ack job 1001:2, : downstairs
17495 Sep 22 23:14:27.773 INFO [1] Starts cmd_loop
17496 Sep 22 23:14:27.773 INFO [2] received reconcile message
17497 Sep 22 23:14:27.773 DEBG up_ds_listen checked 1 jobs, back to waiting
17498 Sep 22 23:14:27.773 INFO [2] All repairs completed, exit
17499 Sep 22 23:14:27.773 INFO [2] Starts cmd_loop
17500 The guest has finished waiting for activation
17501 Sep 22 23:14:27.774 DEBG IO Read 1000 has deps []
17502 Sep 22 23:14:27.774 DEBG Read :1000 deps:[] res:true
17503 Sep 22 23:14:27.774 INFO UUID: 5ee54174-c6d6-4c0b-b60d-527f92f52f40
17504 Sep 22 23:14:27.774 INFO Blocks per extent:5 Total Extents: 2
17505 Sep 22 23:14:27.774 INFO Crucible Version: Crucible Version: 0.0.1
17506 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17507 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17508 rustc: 1.70.0 stable x86_64-unknown-illumos
17509 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17510 Sep 22 23:14:27.774 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17511 Sep 22 23:14:27.774 INFO Using address: 127.0.0.1:41940, task: main
17512 Sep 22 23:14:27.774 DEBG Read :1000 deps:[] res:true
17513 Sep 22 23:14:27.774 INFO Repair listens on 127.0.0.1:0, task: repair
17514 Sep 22 23:14:27.774 DEBG Read :1000 deps:[] res:true
17515 Sep 22 23:14:27.775 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47488, task: repair
17516 Sep 22 23:14:27.775 DEBG Read :1000 deps:[] res:true
17517 Sep 22 23:14:27.775 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47488, task: repair
17518 Sep 22 23:14:27.775 INFO UUID: 17fc807b-48af-4955-a80e-8a9cfce39310
17519 Sep 22 23:14:27.775 INFO Blocks per extent:5 Total Extents: 2
17520 Sep 22 23:14:27.775 INFO listening, local_addr: 127.0.0.1:47488, task: repair
17521 Sep 22 23:14:27.775 INFO Crucible Version: Crucible Version: 0.0.1
17522 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17523 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17524 rustc: 1.70.0 stable x86_64-unknown-illumos
17525 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17526 Sep 22 23:14:27.775 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17527 Sep 22 23:14:27.775 INFO Using address: 127.0.0.1:43395, task: main
17528 Sep 22 23:14:27.775 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47488, task: repair
17529 Sep 22 23:14:27.775 INFO Using repair address: 127.0.0.1:47488, task: main
17530 Sep 22 23:14:27.775 INFO No SSL acceptor configured, task: main
17531 Sep 22 23:14:27.775 INFO Repair listens on 127.0.0.1:0, task: repair
17532 Sep 22 23:14:27.775 DEBG Read :1000 deps:[] res:true
17533 Sep 22 23:14:27.775 INFO current number of open files limit 65536 is already the maximum
17534 Sep 22 23:14:27.775 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48582, task: repair
17535 Sep 22 23:14:27.775 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48582, task: repair
17536 Sep 22 23:14:27.775 INFO listening, local_addr: 127.0.0.1:48582, task: repair
17537 Sep 22 23:14:27.775 INFO Created new region file "/tmp/downstairs-jTnTqoHD/region.json"
17538 Sep 22 23:14:27.776 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48582, task: repair
17539 Sep 22 23:14:27.776 INFO Using repair address: 127.0.0.1:48582, task: main
17540 Sep 22 23:14:27.776 INFO No SSL acceptor configured, task: main
17541 Sep 22 23:14:27.776 DEBG [0] Read AckReady 1000, : downstairs
17542 Sep 22 23:14:27.776 DEBG Read :1000 deps:[] res:true
17543 Sep 22 23:14:27.776 DEBG [2] Read already AckReady 1000, : downstairs
17544 Sep 22 23:14:27.776 INFO current number of open files limit 65536 is already the maximum
17545 Sep 22 23:14:27.776 INFO Created new region file "/tmp/downstairs-bEN17gOw/region.json"
17546 Sep 22 23:14:27.776 DEBG [1] Read already AckReady 1000, : downstairs
17547 Sep 22 23:14:27.776 DEBG up_ds_listen was notified
17548 Sep 22 23:14:27.776 DEBG up_ds_listen process 1000
17549 Sep 22 23:14:27.776 DEBG [A] ack job 1000:1, : downstairs
17550 Sep 22 23:14:27.777 DEBG up_ds_listen checked 1 jobs, back to waiting
17551 Sep 22 23:14:27.777 INFO current number of open files limit 65536 is already the maximum
17552 Sep 22 23:14:27.777 INFO Opened existing region file "/tmp/downstairs-zWC5Bd4H/region.json"
17553 Sep 22 23:14:27.777 INFO Database read version 1
17554 Sep 22 23:14:27.777 INFO Database write version 1
17555 Sep 22 23:14:27.778 DEBG Write :1001 deps:[JobId(1000)] res:true
17556 Sep 22 23:14:27.779 DEBG Write :1001 deps:[JobId(1000)] res:true
17557 Sep 22 23:14:27.780 DEBG IO Write 1001 has deps [JobId(1000)]
17558 Sep 22 23:14:27.780 DEBG up_ds_listen was notified
17559 Sep 22 23:14:27.780 DEBG up_ds_listen process 1001
17560 Sep 22 23:14:27.780 DEBG [A] ack job 1001:2, : downstairs
17561 Sep 22 23:14:27.780 DEBG up_ds_listen checked 1 jobs, back to waiting
17562 Sep 22 23:14:27.780 INFO UUID: e660550a-6e0f-4d89-b104-4c2e71068362
17563 Sep 22 23:14:27.780 INFO Blocks per extent:5 Total Extents: 2
17564 Sep 22 23:14:27.780 DEBG Write :1001 deps:[JobId(1000)] res:true
17565 Sep 22 23:14:27.780 INFO current number of open files limit 65536 is already the maximum
17566 Sep 22 23:14:27.780 DEBG [0] Read AckReady 1000, : downstairs
17567 Sep 22 23:14:27.780 INFO Crucible Version: Crucible Version: 0.0.1
17568 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17569 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17570 rustc: 1.70.0 stable x86_64-unknown-illumos
17571 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17572 Sep 22 23:14:27.780 INFO Opened existing region file "/tmp/downstairs-jTnTqoHD/region.json"
17573 Sep 22 23:14:27.780 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17574 Sep 22 23:14:27.780 INFO Database read version 1
17575 Sep 22 23:14:27.780 INFO Database write version 1
17576 Sep 22 23:14:27.780 INFO Using address: 127.0.0.1:58713, task: main
17577 Sep 22 23:14:27.780 DEBG IO Read 1002 has deps [JobId(1001)]
17578 Sep 22 23:14:27.780 INFO Repair listens on 127.0.0.1:0, task: repair
17579 Sep 22 23:14:27.781 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64431, task: repair
17580 Sep 22 23:14:27.781 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64431, task: repair
17581 Sep 22 23:14:27.781 INFO listening, local_addr: 127.0.0.1:64431, task: repair
17582 Sep 22 23:14:27.781 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64431, task: repair
17583 Sep 22 23:14:27.781 INFO Using repair address: 127.0.0.1:64431, task: main
17584 Sep 22 23:14:27.781 INFO No SSL acceptor configured, task: main
17585 Sep 22 23:14:27.781 DEBG Read :1002 deps:[JobId(1001)] res:true
17586 Sep 22 23:14:27.781 INFO current number of open files limit 65536 is already the maximum
17587 Sep 22 23:14:27.781 INFO Opened existing region file "/tmp/downstairs-bEN17gOw/region.json"
17588 Sep 22 23:14:27.781 INFO Database read version 1
17589 Sep 22 23:14:27.781 INFO Database write version 1
17590 Sep 22 23:14:27.782 DEBG Read :1002 deps:[JobId(1001)] res:true
17591 Sep 22 23:14:27.782 DEBG Read :1002 deps:[JobId(1001)] res:true
17592 Sep 22 23:14:27.782 INFO Upstairs starts
17593 Sep 22 23:14:27.782 INFO Crucible Version: BuildInfo {
17594 version: "0.0.1",
17595 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17596 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17597 git_branch: "main",
17598 rustc_semver: "1.70.0",
17599 rustc_channel: "stable",
17600 rustc_host_triple: "x86_64-unknown-illumos",
17601 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17602 cargo_triple: "x86_64-unknown-illumos",
17603 debug: true,
17604 opt_level: 0,
17605 }
17606 Sep 22 23:14:27.782 INFO Upstairs <-> Downstairs Message Version: 4
17607 Sep 22 23:14:27.782 INFO Crucible stats registered with UUID: d583c1f2-1fde-4509-a36e-fd07084b7951
17608 Sep 22 23:14:27.782 INFO Crucible d583c1f2-1fde-4509-a36e-fd07084b7951 has session id: 5384ce6a-6e03-461e-bf2d-cd3683006171
17609 Sep 22 23:14:27.783 DEBG [1] Read already AckReady 1000, : downstairs
17610 Sep 22 23:14:27.783 INFO listening on 127.0.0.1:0, task: main
17611 Sep 22 23:14:27.783 INFO listening on 127.0.0.1:0, task: main
17612 Sep 22 23:14:27.783 INFO listening on 127.0.0.1:0, task: main
17613 Sep 22 23:14:27.783 INFO [0] connecting to 127.0.0.1:62529, looper: 0
17614 Sep 22 23:14:27.783 INFO [1] connecting to 127.0.0.1:50210, looper: 1
17615 Sep 22 23:14:27.783 DEBG Write :1001 deps:[JobId(1000)] res:true
17616 Sep 22 23:14:27.783 INFO [2] connecting to 127.0.0.1:58713, looper: 2
17617 Sep 22 23:14:27.783 INFO up_listen starts, task: up_listen
17618 Sep 22 23:14:27.783 INFO Wait for all three downstairs to come online
17619 Sep 22 23:14:27.783 INFO UUID: 83bc82a1-6a8c-4f0e-8d39-8e2df2a396fa
17620 Sep 22 23:14:27.783 INFO Flush timeout: 0.5
17621 Sep 22 23:14:27.783 INFO Blocks per extent:5 Total Extents: 2
17622 Sep 22 23:14:27.783 INFO Crucible Version: Crucible Version: 0.0.1
17623 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17624 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17625 rustc: 1.70.0 stable x86_64-unknown-illumos
17626 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17627 Sep 22 23:14:27.783 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17628 Sep 22 23:14:27.783 INFO Using address: 127.0.0.1:61419, task: main
17629 Sep 22 23:14:27.783 INFO accepted connection from 127.0.0.1:59757, task: main
17630 Sep 22 23:14:27.784 INFO accepted connection from 127.0.0.1:54884, task: main
17631 Sep 22 23:14:27.784 INFO accepted connection from 127.0.0.1:40330, task: main
17632 Sep 22 23:14:27.784 INFO Repair listens on 127.0.0.1:0, task: repair
17633 Sep 22 23:14:27.784 INFO [0] d583c1f2-1fde-4509-a36e-fd07084b7951 looper connected, looper: 0
17634 Sep 22 23:14:27.784 INFO [0] Proc runs for 127.0.0.1:62529 in state New
17635 Sep 22 23:14:27.784 DEBG Write :1001 deps:[JobId(1000)] res:true
17636 Sep 22 23:14:27.784 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43993, task: repair
17637 Sep 22 23:14:27.784 INFO [1] d583c1f2-1fde-4509-a36e-fd07084b7951 looper connected, looper: 1
17638 Sep 22 23:14:27.784 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43993, task: repair
17639 Sep 22 23:14:27.784 INFO [1] Proc runs for 127.0.0.1:50210 in state New
17640 Sep 22 23:14:27.784 INFO listening, local_addr: 127.0.0.1:43993, task: repair
17641 Sep 22 23:14:27.784 INFO [2] d583c1f2-1fde-4509-a36e-fd07084b7951 looper connected, looper: 2
17642 Sep 22 23:14:27.784 INFO [2] Proc runs for 127.0.0.1:58713 in state New
17643 Sep 22 23:14:27.784 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43993, task: repair
17644 Sep 22 23:14:27.784 INFO Using repair address: 127.0.0.1:43993, task: main
17645 Sep 22 23:14:27.784 INFO No SSL acceptor configured, task: main
17646 Sep 22 23:14:27.784 INFO Connection request from d583c1f2-1fde-4509-a36e-fd07084b7951 with version 4, task: proc
17647 Sep 22 23:14:27.784 INFO upstairs UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } connected, version 4, task: proc
17648 Sep 22 23:14:27.784 DEBG Write :1001 deps:[JobId(1000)] res:true
17649 Sep 22 23:14:27.785 INFO Connection request from d583c1f2-1fde-4509-a36e-fd07084b7951 with version 4, task: proc
17650 Sep 22 23:14:27.785 INFO current number of open files limit 65536 is already the maximum
17651 Sep 22 23:14:27.785 INFO upstairs UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } connected, version 4, task: proc
17652 Sep 22 23:14:27.785 INFO Connection request from d583c1f2-1fde-4509-a36e-fd07084b7951 with version 4, task: proc
17653 Sep 22 23:14:27.785 INFO Created new region file "/tmp/downstairs-QARdZ8Qd/region.json"
17654 Sep 22 23:14:27.785 INFO upstairs UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } connected, version 4, task: proc
17655 Sep 22 23:14:27.785 INFO UUID: 69deab50-0fe3-44d0-8b06-66f3843da939
17656 Sep 22 23:14:27.785 DEBG IO Read 1002 has deps [JobId(1001)]
17657 Sep 22 23:14:27.785 INFO Blocks per extent:5 Total Extents: 2
17658 Sep 22 23:14:27.785 INFO Crucible Version: Crucible Version: 0.0.1
17659 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17660 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17661 rustc: 1.70.0 stable x86_64-unknown-illumos
17662 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17663 The guest has requested activation
17664 Sep 22 23:14:27.785 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17665 Sep 22 23:14:27.785 INFO Using address: 127.0.0.1:40972, task: main
17666 Sep 22 23:14:27.785 INFO [0] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) New New New ds_transition to WaitActive
17667 Sep 22 23:14:27.785 INFO [0] Transition from New to WaitActive
17668 Sep 22 23:14:27.785 INFO [1] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) WaitActive New New ds_transition to WaitActive
17669 Sep 22 23:14:27.785 INFO [1] Transition from New to WaitActive
17670 Sep 22 23:14:27.785 INFO [2] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) WaitActive WaitActive New ds_transition to WaitActive
17671 Sep 22 23:14:27.785 INFO [2] Transition from New to WaitActive
17672 Sep 22 23:14:27.785 INFO Repair listens on 127.0.0.1:0, task: repair
17673 Sep 22 23:14:27.785 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 active request set
17674 Sep 22 23:14:27.785 DEBG [2] Read already AckReady 1000, : downstairs
17675 Sep 22 23:14:27.785 INFO [0] received activate with gen 1
17676 Sep 22 23:14:27.785 DEBG up_ds_listen was notified
17677 Sep 22 23:14:27.785 DEBG Read :1002 deps:[JobId(1001)] res:true
17678 Sep 22 23:14:27.785 INFO [0] client got ds_active_rx, promote! session ea4150c9-255d-4c41-aae3-77a60f4803c2
17679 Sep 22 23:14:27.785 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39838, task: repair
17680 Sep 22 23:14:27.785 DEBG [1] Read AckReady 1002, : downstairs
17681 Sep 22 23:14:27.785 DEBG up_ds_listen process 1000
17682 Sep 22 23:14:27.785 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39838, task: repair
17683 Sep 22 23:14:27.785 DEBG [A] ack job 1000:1, : downstairs
17684 Sep 22 23:14:27.785 INFO listening, local_addr: 127.0.0.1:39838, task: repair
17685 Sep 22 23:14:27.785 INFO [1] received activate with gen 1
17686 Sep 22 23:14:27.785 INFO [1] client got ds_active_rx, promote! session ea4150c9-255d-4c41-aae3-77a60f4803c2
17687 Sep 22 23:14:27.786 INFO [2] received activate with gen 1
17688 Sep 22 23:14:27.786 INFO [2] client got ds_active_rx, promote! session ea4150c9-255d-4c41-aae3-77a60f4803c2
17689 Sep 22 23:14:27.786 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39838, task: repair
17690 Sep 22 23:14:27.786 INFO Using repair address: 127.0.0.1:39838, task: main
17691 Sep 22 23:14:27.786 INFO No SSL acceptor configured, task: main
17692 Sep 22 23:14:27.786 INFO UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } is now active (read-write)
17693 Sep 22 23:14:27.786 DEBG Read :1002 deps:[JobId(1001)] res:true
17694 Sep 22 23:14:27.786 INFO UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } is now active (read-write)
17695 Sep 22 23:14:27.786 DEBG up_ds_listen checked 1 jobs, back to waiting
17696 Sep 22 23:14:27.786 INFO UpstairsConnection { upstairs_id: d583c1f2-1fde-4509-a36e-fd07084b7951, session_id: ea4150c9-255d-4c41-aae3-77a60f4803c2, gen: 1 } is now active (read-write)
17697 Sep 22 23:14:27.786 DEBG Read :1002 deps:[JobId(1001)] res:true
17698 Sep 22 23:14:27.786 DEBG IO Flush 1001 has deps [JobId(1000)]
17699 Sep 22 23:14:27.786 INFO [0] downstairs client at 127.0.0.1:62529 has UUID 2cb21d94-77f3-4d85-9833-6fd57802dc13
17700 Sep 22 23:14:27.786 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2cb21d94-77f3-4d85-9833-6fd57802dc13, encrypted: true, database_read_version: 1, database_write_version: 1 }
17701 Sep 22 23:14:27.787 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 WaitActive WaitActive WaitActive
17702 Sep 22 23:14:27.787 INFO [1] downstairs client at 127.0.0.1:50210 has UUID cc5d982e-2876-45d7-90a8-4f67b873e08d
17703 Sep 22 23:14:27.787 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: cc5d982e-2876-45d7-90a8-4f67b873e08d, encrypted: true, database_read_version: 1, database_write_version: 1 }
17704 Sep 22 23:14:27.787 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 WaitActive WaitActive WaitActive
17705 Sep 22 23:14:27.787 INFO [2] downstairs client at 127.0.0.1:58713 has UUID e660550a-6e0f-4d89-b104-4c2e71068362
17706 Sep 22 23:14:27.787 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e660550a-6e0f-4d89-b104-4c2e71068362, encrypted: true, database_read_version: 1, database_write_version: 1 }
17707 Sep 22 23:14:27.787 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 WaitActive WaitActive WaitActive
17708 Sep 22 23:14:27.787 INFO Upstairs starts
17709 Sep 22 23:14:27.787 INFO Crucible Version: BuildInfo {
17710 version: "0.0.1",
17711 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17712 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17713 git_branch: "main",
17714 rustc_semver: "1.70.0",
17715 rustc_channel: "stable",
17716 rustc_host_triple: "x86_64-unknown-illumos",
17717 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17718 cargo_triple: "x86_64-unknown-illumos",
17719 debug: true,
17720 opt_level: 0,
17721 }
17722 Sep 22 23:14:27.787 INFO Upstairs <-> Downstairs Message Version: 4
17723 Sep 22 23:14:27.787 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
17724 Sep 22 23:14:27.787 INFO Crucible stats registered with UUID: d733e47e-2719-435d-bec3-afb64fc731d4
17725 Sep 22 23:14:27.787 INFO Crucible d733e47e-2719-435d-bec3-afb64fc731d4 has session id: 34a9eb0f-3a64-4202-86be-263557e51230
17726 Sep 22 23:14:27.787 INFO Current flush_numbers [0..12]: [0, 0]
17727 Sep 22 23:14:27.787 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
17728 Sep 22 23:14:27.787 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
17729 Sep 22 23:14:27.787 INFO listening on 127.0.0.1:0, task: main
17730 Sep 22 23:14:27.787 INFO Downstairs has completed Negotiation, task: proc
17731 Sep 22 23:14:27.787 INFO listening on 127.0.0.1:0, task: main
17732 Sep 22 23:14:27.787 INFO listening on 127.0.0.1:0, task: main
17733 Sep 22 23:14:27.787 INFO [0] connecting to 127.0.0.1:61471, looper: 0
17734 Sep 22 23:14:27.787 INFO Current flush_numbers [0..12]: [0, 0]
17735 Sep 22 23:14:27.787 DEBG up_ds_listen was notified
17736 Sep 22 23:14:27.788 DEBG up_ds_listen process 1001
17737 Sep 22 23:14:27.788 INFO [1] connecting to 127.0.0.1:43395, looper: 1
17738 Sep 22 23:14:27.788 DEBG [A] ack job 1001:2, : downstairs
17739 Sep 22 23:14:27.788 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
17740 Sep 22 23:14:27.788 DEBG up_ds_listen checked 1 jobs, back to waiting
17741 Sep 22 23:14:27.788 INFO [2] connecting to 127.0.0.1:40972, looper: 2
17742 Sep 22 23:14:27.788 INFO Downstairs has completed Negotiation, task: proc
17743 Sep 22 23:14:27.788 INFO current number of open files limit 65536 is already the maximum
17744 Sep 22 23:14:27.788 INFO up_listen starts, task: up_listen
17745 Sep 22 23:14:27.788 INFO Wait for all three downstairs to come online
17746 Sep 22 23:14:27.788 INFO Flush timeout: 0.5
17747 Sep 22 23:14:27.788 INFO Created new region file "/tmp/downstairs-NuEPeMJL/region.json"
17748 Sep 22 23:14:27.788 INFO Current flush_numbers [0..12]: [0, 0]
17749 Sep 22 23:14:27.788 INFO accepted connection from 127.0.0.1:61129, task: main
17750 Sep 22 23:14:27.788 INFO current number of open files limit 65536 is already the maximum
17751 Sep 22 23:14:27.788 INFO Opened existing region file "/tmp/downstairs-QARdZ8Qd/region.json"
17752 Sep 22 23:14:27.788 INFO Database read version 1
17753 Sep 22 23:14:27.788 INFO accepted connection from 127.0.0.1:48948, task: main
17754 Sep 22 23:14:27.788 INFO Database write version 1
17755 Sep 22 23:14:27.788 DEBG [0] Read already AckReady 1002, : downstairs
17756 Sep 22 23:14:27.788 INFO Downstairs has completed Negotiation, task: proc
17757 Sep 22 23:14:27.788 INFO accepted connection from 127.0.0.1:43249, task: main
17758 Sep 22 23:14:27.788 INFO [0] d733e47e-2719-435d-bec3-afb64fc731d4 looper connected, looper: 0
17759 Sep 22 23:14:27.788 INFO [0] Proc runs for 127.0.0.1:61471 in state New
17760 Sep 22 23:14:27.788 INFO [0] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17761 Sep 22 23:14:27.788 INFO [0] Transition from WaitActive to WaitQuorum
17762 Sep 22 23:14:27.788 WARN [0] new RM replaced this: None
17763 Sep 22 23:14:27.788 INFO [1] d733e47e-2719-435d-bec3-afb64fc731d4 looper connected, looper: 1
17764 Sep 22 23:14:27.788 INFO [1] Proc runs for 127.0.0.1:43395 in state New
17765 Sep 22 23:14:27.788 INFO [0] Starts reconcile loop
17766 Sep 22 23:14:27.789 INFO [2] d733e47e-2719-435d-bec3-afb64fc731d4 looper connected, looper: 2
17767 Sep 22 23:14:27.789 INFO [1] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
17768 Sep 22 23:14:27.789 INFO [1] Transition from WaitActive to WaitQuorum
17769 Sep 22 23:14:27.789 INFO [2] Proc runs for 127.0.0.1:40972 in state New
17770 Sep 22 23:14:27.789 WARN [1] new RM replaced this: None
17771 Sep 22 23:14:27.789 INFO [1] Starts reconcile loop
17772 Sep 22 23:14:27.789 INFO [2] d583c1f2-1fde-4509-a36e-fd07084b7951 (ea4150c9-255d-4c41-aae3-77a60f4803c2) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
17773 Sep 22 23:14:27.789 INFO [2] Transition from WaitActive to WaitQuorum
17774 Sep 22 23:14:27.789 WARN [2] new RM replaced this: None
17775 Sep 22 23:14:27.789 INFO [2] Starts reconcile loop
17776 Sep 22 23:14:27.789 INFO [0] 127.0.0.1:62529 task reports connection:true
17777 Sep 22 23:14:27.789 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 WaitQuorum WaitQuorum WaitQuorum
17778 Sep 22 23:14:27.789 INFO Connection request from d733e47e-2719-435d-bec3-afb64fc731d4 with version 4, task: proc
17779 Sep 22 23:14:27.789 INFO [0]R flush_numbers: [0, 0]
17780 Sep 22 23:14:27.789 INFO [0]R generation: [0, 0]
17781 Sep 22 23:14:27.789 INFO upstairs UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } connected, version 4, task: proc
17782 Sep 22 23:14:27.789 INFO [0]R dirty: [false, false]
17783 Sep 22 23:14:27.789 INFO [1]R flush_numbers: [0, 0]
17784 Sep 22 23:14:27.789 INFO [1]R generation: [0, 0]
17785 Sep 22 23:14:27.789 INFO [1]R dirty: [false, false]
17786 Sep 22 23:14:27.789 INFO [2]R flush_numbers: [0, 0]
17787 Sep 22 23:14:27.789 INFO [2]R generation: [0, 0]
17788 Sep 22 23:14:27.789 INFO [2]R dirty: [false, false]
17789 Sep 22 23:14:27.789 INFO Max found gen is 1
17790 Sep 22 23:14:27.789 DEBG [0] Read AckReady 1002, : downstairs
17791 Sep 22 23:14:27.789 INFO Generation requested: 1 >= found:1
17792 Sep 22 23:14:27.789 INFO Next flush: 1
17793 Sep 22 23:14:27.789 INFO Connection request from d733e47e-2719-435d-bec3-afb64fc731d4 with version 4, task: proc
17794 Sep 22 23:14:27.789 INFO upstairs UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } connected, version 4, task: proc
17795 Sep 22 23:14:27.789 INFO All extents match
17796 Sep 22 23:14:27.789 INFO No downstairs repair required
17797 Sep 22 23:14:27.789 INFO No initial repair work was required
17798 Sep 22 23:14:27.789 INFO Set Downstairs and Upstairs active
17799 Sep 22 23:14:27.789 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 is now active with session: ea4150c9-255d-4c41-aae3-77a60f4803c2
17800 Sep 22 23:14:27.789 INFO Connection request from d733e47e-2719-435d-bec3-afb64fc731d4 with version 4, task: proc
17801 Sep 22 23:14:27.789 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 Set Active after no repair
17802 Sep 22 23:14:27.789 INFO upstairs UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } connected, version 4, task: proc
17803 Sep 22 23:14:27.789 INFO Notify all downstairs, region set compare is done.
17804 Sep 22 23:14:27.789 INFO Set check for repair
17805 Sep 22 23:14:27.789 INFO [1] 127.0.0.1:50210 task reports connection:true
17806 Sep 22 23:14:27.789 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 Active Active Active
17807 Sep 22 23:14:27.789 INFO Set check for repair
17808 The guest has requested activation
17809 Sep 22 23:14:27.789 INFO [2] 127.0.0.1:58713 task reports connection:true
17810 Sep 22 23:14:27.789 INFO d583c1f2-1fde-4509-a36e-fd07084b7951 Active Active Active
17811 Sep 22 23:14:27.789 INFO Set check for repair
17812 Sep 22 23:14:27.789 INFO [0] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) New New New ds_transition to WaitActive
17813 Sep 22 23:14:27.789 INFO [0] Transition from New to WaitActive
17814 Sep 22 23:14:27.789 INFO [0] received reconcile message
17815 Sep 22 23:14:27.789 INFO [0] All repairs completed, exit
17816 Sep 22 23:14:27.789 INFO [1] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) WaitActive New New ds_transition to WaitActive
17817 Sep 22 23:14:27.789 INFO [1] Transition from New to WaitActive
17818 Sep 22 23:14:27.789 INFO [0] Starts cmd_loop
17819 Sep 22 23:14:27.790 INFO [2] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) WaitActive WaitActive New ds_transition to WaitActive
17820 Sep 22 23:14:27.790 INFO [1] received reconcile message
17821 Sep 22 23:14:27.790 INFO [2] Transition from New to WaitActive
17822 Sep 22 23:14:27.790 INFO [1] All repairs completed, exit
17823 Sep 22 23:14:27.790 INFO [1] Starts cmd_loop
17824 Sep 22 23:14:27.790 INFO d733e47e-2719-435d-bec3-afb64fc731d4 active request set
17825 Sep 22 23:14:27.790 INFO [2] received reconcile message
17826 Sep 22 23:14:27.790 INFO [2] All repairs completed, exit
17827 Sep 22 23:14:27.790 INFO [0] received activate with gen 1
17828 Sep 22 23:14:27.790 INFO [2] Starts cmd_loop
17829 Sep 22 23:14:27.790 INFO [0] client got ds_active_rx, promote! session e0e1aae2-c32e-4145-89ce-1904fa05fe10
17830 Sep 22 23:14:27.790 INFO [1] received activate with gen 1
17831 The guest has finished waiting for activation
17832 Sep 22 23:14:27.790 INFO [1] client got ds_active_rx, promote! session e0e1aae2-c32e-4145-89ce-1904fa05fe10
17833 Sep 22 23:14:27.790 INFO [2] received activate with gen 1
17834 Sep 22 23:14:27.790 INFO [2] client got ds_active_rx, promote! session e0e1aae2-c32e-4145-89ce-1904fa05fe10
17835 Sep 22 23:14:27.790 INFO UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } is now active (read-write)
17836 Sep 22 23:14:27.790 INFO UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } is now active (read-write)
17837 Sep 22 23:14:27.790 DEBG IO Read 1000 has deps []
17838 Sep 22 23:14:27.790 INFO UpstairsConnection { upstairs_id: d733e47e-2719-435d-bec3-afb64fc731d4, session_id: e0e1aae2-c32e-4145-89ce-1904fa05fe10, gen: 1 } is now active (read-write)
17839 Sep 22 23:14:27.791 INFO [0] downstairs client at 127.0.0.1:61471 has UUID cc26b772-bef2-4af0-a9fc-0733fa8e0d6d
17840 Sep 22 23:14:27.791 DEBG [2] Read already AckReady 1002, : downstairs
17841 Sep 22 23:14:27.791 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: cc26b772-bef2-4af0-a9fc-0733fa8e0d6d, encrypted: true, database_read_version: 1, database_write_version: 1 }
17842 Sep 22 23:14:27.791 INFO UUID: b64ddd6d-152e-4e06-b873-7258ee165522
17843 Sep 22 23:14:27.791 INFO Blocks per extent:5 Total Extents: 2
17844 Sep 22 23:14:27.791 INFO d733e47e-2719-435d-bec3-afb64fc731d4 WaitActive WaitActive WaitActive
17845 Sep 22 23:14:27.791 INFO Crucible Version: Crucible Version: 0.0.1
17846 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
17847 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
17848 rustc: 1.70.0 stable x86_64-unknown-illumos
17849 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
17850 Sep 22 23:14:27.791 INFO [1] downstairs client at 127.0.0.1:43395 has UUID 17fc807b-48af-4955-a80e-8a9cfce39310
17851 Sep 22 23:14:27.791 INFO Upstairs <-> Downstairs Message Version: 4, task: main
17852 Sep 22 23:14:27.791 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 17fc807b-48af-4955-a80e-8a9cfce39310, encrypted: true, database_read_version: 1, database_write_version: 1 }
17853 Sep 22 23:14:27.791 INFO Using address: 127.0.0.1:36571, task: main
17854 Sep 22 23:14:27.791 DEBG [2] Read already AckReady 1002, : downstairs
17855 Sep 22 23:14:27.791 INFO d733e47e-2719-435d-bec3-afb64fc731d4 WaitActive WaitActive WaitActive
17856 Sep 22 23:14:27.791 DEBG up_ds_listen was notified
17857 Sep 22 23:14:27.791 DEBG up_ds_listen process 1002
17858 Sep 22 23:14:27.791 INFO [2] downstairs client at 127.0.0.1:40972 has UUID 69deab50-0fe3-44d0-8b06-66f3843da939
17859 Sep 22 23:14:27.791 DEBG [A] ack job 1002:3, : downstairs
17860 Sep 22 23:14:27.791 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 69deab50-0fe3-44d0-8b06-66f3843da939, encrypted: true, database_read_version: 1, database_write_version: 1 }
17861 Sep 22 23:14:27.791 INFO d733e47e-2719-435d-bec3-afb64fc731d4 WaitActive WaitActive WaitActive
17862 Sep 22 23:14:27.791 DEBG Read :1000 deps:[] res:true
17863 Sep 22 23:14:27.791 INFO Repair listens on 127.0.0.1:0, task: repair
17864 Sep 22 23:14:27.791 INFO Current flush_numbers [0..12]: [0, 0]
17865 Sep 22 23:14:27.791 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36238, task: repair
17866 Sep 22 23:14:27.791 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36238, task: repair
17867 Sep 22 23:14:27.792 DEBG up_ds_listen checked 1 jobs, back to waiting
17868 Sep 22 23:14:27.792 INFO listening, local_addr: 127.0.0.1:36238, task: repair
17869 Sep 22 23:14:27.792 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36238, task: repair
17870 Sep 22 23:14:27.792 INFO Using repair address: 127.0.0.1:36238, task: main
17871 Sep 22 23:14:27.792 INFO No SSL acceptor configured, task: main
17872 Sep 22 23:14:27.792 DEBG Read :1000 deps:[] res:true
17873 Sep 22 23:14:27.792 INFO Downstairs has completed Negotiation, task: proc
17874 Sep 22 23:14:27.792 INFO Current flush_numbers [0..12]: [0, 0]
17875 Sep 22 23:14:27.792 DEBG Read :1000 deps:[] res:true
17876 Sep 22 23:14:27.792 INFO Downstairs has completed Negotiation, task: proc
17877 Sep 22 23:14:27.792 INFO Current flush_numbers [0..12]: [0, 0]
17878 Sep 22 23:14:27.793 DEBG [1] Read already AckReady 1002, : downstairs
17879 Sep 22 23:14:27.793 DEBG up_ds_listen was notified
17880 Sep 22 23:14:27.793 DEBG up_ds_listen process 1002
17881 Sep 22 23:14:27.793 DEBG [A] ack job 1002:3, : downstairs
17882 Sep 22 23:14:27.793 INFO Downstairs has completed Negotiation, task: proc
17883 Sep 22 23:14:27.793 INFO [0] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
17884 Sep 22 23:14:27.793 INFO [0] Transition from WaitActive to WaitQuorum
17885 Sep 22 23:14:27.793 WARN [0] new RM replaced this: None
17886 Sep 22 23:14:27.793 INFO [0] Starts reconcile loop
17887 Sep 22 23:14:27.793 DEBG up_ds_listen checked 1 jobs, back to waiting
17888 Sep 22 23:14:27.793 INFO [1] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
17889 Sep 22 23:14:27.793 INFO [1] Transition from WaitActive to WaitQuorum
17890 Sep 22 23:14:27.793 WARN [1] new RM replaced this: None
17891 Sep 22 23:14:27.793 INFO [1] Starts reconcile loop
17892 Sep 22 23:14:27.793 INFO [2] d733e47e-2719-435d-bec3-afb64fc731d4 (e0e1aae2-c32e-4145-89ce-1904fa05fe10) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
17893 Sep 22 23:14:27.793 INFO [2] Transition from WaitActive to WaitQuorum
17894 Sep 22 23:14:27.793 WARN [2] new RM replaced this: None
17895 Sep 22 23:14:27.793 INFO [2] Starts reconcile loop
17896 Sep 22 23:14:27.793 INFO [0] 127.0.0.1:61471 task reports connection:true
17897 Sep 22 23:14:27.793 INFO d733e47e-2719-435d-bec3-afb64fc731d4 WaitQuorum WaitQuorum WaitQuorum
17898 Sep 22 23:14:27.793 DEBG [0] Read AckReady 1000, : downstairs
17899 Sep 22 23:14:27.793 INFO [0]R flush_numbers: [0, 0]
17900 Sep 22 23:14:27.793 INFO [0]R generation: [0, 0]
17901 Sep 22 23:14:27.793 INFO [0]R dirty: [false, false]
17902 Sep 22 23:14:27.793 INFO [1]R flush_numbers: [0, 0]
17903 Sep 22 23:14:27.793 INFO [1]R generation: [0, 0]
17904 Sep 22 23:14:27.793 INFO Upstairs starts
17905 Sep 22 23:14:27.793 INFO [1]R dirty: [false, false]
17906 Sep 22 23:14:27.793 INFO [2]R flush_numbers: [0, 0]
17907 Sep 22 23:14:27.793 INFO [2]R generation: [0, 0]
17908 Sep 22 23:14:27.793 INFO Crucible Version: BuildInfo {
17909 version: "0.0.1",
17910 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
17911 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
17912 git_branch: "main",
17913 rustc_semver: "1.70.0",
17914 rustc_channel: "stable",
17915 rustc_host_triple: "x86_64-unknown-illumos",
17916 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
17917 cargo_triple: "x86_64-unknown-illumos",
17918 debug: true,
17919 opt_level: 0,
17920 }
17921 Sep 22 23:14:27.793 INFO [2]R dirty: [false, false]
17922 Sep 22 23:14:27.793 INFO current number of open files limit 65536 is already the maximum
17923 Sep 22 23:14:27.793 INFO Upstairs <-> Downstairs Message Version: 4
17924 Sep 22 23:14:27.793 INFO Max found gen is 1
17925 Sep 22 23:14:27.793 INFO Opened existing region file "/tmp/downstairs-NuEPeMJL/region.json"
17926 Sep 22 23:14:27.793 INFO Database read version 1
17927 Sep 22 23:14:27.793 INFO Crucible stats registered with UUID: 2937f30d-6553-49bb-81e4-1d5ec4457063
17928 Sep 22 23:14:27.793 INFO Generation requested: 1 >= found:1
17929 Sep 22 23:14:27.794 INFO Database write version 1
17930 Sep 22 23:14:27.794 INFO Next flush: 1
17931 Sep 22 23:14:27.794 INFO Crucible 2937f30d-6553-49bb-81e4-1d5ec4457063 has session id: 46a39c1c-6716-4158-bde6-2d848a4789fd
17932 Sep 22 23:14:27.794 INFO All extents match
17933 Sep 22 23:14:27.794 INFO No downstairs repair required
17934 Sep 22 23:14:27.794 DEBG [1] Read already AckReady 1000, : downstairs
17935 Sep 22 23:14:27.794 INFO No initial repair work was required
17936 Sep 22 23:14:27.794 INFO Set Downstairs and Upstairs active
17937 Sep 22 23:14:27.794 INFO d733e47e-2719-435d-bec3-afb64fc731d4 is now active with session: e0e1aae2-c32e-4145-89ce-1904fa05fe10
17938 Sep 22 23:14:27.794 INFO d733e47e-2719-435d-bec3-afb64fc731d4 Set Active after no repair
17939 Sep 22 23:14:27.794 INFO Notify all downstairs, region set compare is done.
17940 Sep 22 23:14:27.794 INFO listening on 127.0.0.1:0, task: main
17941 Sep 22 23:14:27.794 INFO Set check for repair
17942 Sep 22 23:14:27.794 INFO listening on 127.0.0.1:0, task: main
17943 Sep 22 23:14:27.794 INFO [1] 127.0.0.1:43395 task reports connection:true
17944 Sep 22 23:14:27.794 INFO listening on 127.0.0.1:0, task: main
17945 Sep 22 23:14:27.794 DEBG [2] Read already AckReady 1000, : downstairs
17946 Sep 22 23:14:27.794 INFO d733e47e-2719-435d-bec3-afb64fc731d4 Active Active Active
17947 Sep 22 23:14:27.794 INFO Set check for repair
17948 Sep 22 23:14:27.794 INFO [0] connecting to 127.0.0.1:41940, looper: 0
17949 Sep 22 23:14:27.794 DEBG up_ds_listen was notified
17950 Sep 22 23:14:27.794 INFO [2] 127.0.0.1:40972 task reports connection:true
17951 Sep 22 23:14:27.794 DEBG up_ds_listen process 1000
17952 Sep 22 23:14:27.794 INFO d733e47e-2719-435d-bec3-afb64fc731d4 Active Active Active
17953 Sep 22 23:14:27.794 INFO Set check for repair
17954 Sep 22 23:14:27.794 DEBG [A] ack job 1000:1, : downstairs
17955 Sep 22 23:14:27.794 INFO [1] connecting to 127.0.0.1:61419, looper: 1
17956 Sep 22 23:14:27.794 INFO [0] received reconcile message
17957 Sep 22 23:14:27.794 INFO [0] All repairs completed, exit
17958 Sep 22 23:14:27.794 INFO [0] Starts cmd_loop
17959 Sep 22 23:14:27.794 INFO [1] received reconcile message
17960 Sep 22 23:14:27.794 INFO [2] connecting to 127.0.0.1:36571, looper: 2
17961 Sep 22 23:14:27.794 INFO [1] All repairs completed, exit
17962 Sep 22 23:14:27.794 INFO [1] Starts cmd_loop
17963 Sep 22 23:14:27.794 INFO up_listen starts, task: up_listen
17964 Sep 22 23:14:27.794 INFO [2] received reconcile message
17965 Sep 22 23:14:27.794 INFO Wait for all three downstairs to come online
17966 Sep 22 23:14:27.794 INFO Flush timeout: 0.5
17967 Sep 22 23:14:27.794 INFO [2] All repairs completed, exit
17968 Sep 22 23:14:27.794 INFO [2] Starts cmd_loop
17969 The guest has finished waiting for activation
17970 Sep 22 23:14:27.794 INFO [0] 2937f30d-6553-49bb-81e4-1d5ec4457063 looper connected, looper: 0
17971 Sep 22 23:14:27.794 INFO [0] Proc runs for 127.0.0.1:41940 in state New
17972 Sep 22 23:14:27.794 DEBG up_ds_listen checked 1 jobs, back to waiting
17973 Sep 22 23:14:27.794 INFO [1] 2937f30d-6553-49bb-81e4-1d5ec4457063 looper connected, looper: 1
17974 Sep 22 23:14:27.794 INFO [1] Proc runs for 127.0.0.1:61419 in state New
17975 Sep 22 23:14:27.795 INFO [2] 2937f30d-6553-49bb-81e4-1d5ec4457063 looper connected, looper: 2
17976 Sep 22 23:14:27.795 INFO [2] Proc runs for 127.0.0.1:36571 in state New
17977 Sep 22 23:14:27.795 DEBG IO Read 1000 has deps []
17978 Sep 22 23:14:27.795 INFO accepted connection from 127.0.0.1:36187, task: main
17979 Sep 22 23:14:27.795 INFO accepted connection from 127.0.0.1:43841, task: main
17980 Sep 22 23:14:27.795 INFO accepted connection from 127.0.0.1:55496, task: main
17981 Sep 22 23:14:27.795 INFO Connection request from 2937f30d-6553-49bb-81e4-1d5ec4457063 with version 4, task: proc
17982 Sep 22 23:14:27.795 INFO upstairs UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } connected, version 4, task: proc
17983 Sep 22 23:14:27.795 INFO Connection request from 2937f30d-6553-49bb-81e4-1d5ec4457063 with version 4, task: proc
17984 Sep 22 23:14:27.795 INFO upstairs UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } connected, version 4, task: proc
17985 Sep 22 23:14:27.795 INFO Connection request from 2937f30d-6553-49bb-81e4-1d5ec4457063 with version 4, task: proc
17986 Sep 22 23:14:27.795 INFO upstairs UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } connected, version 4, task: proc
17987 The guest has requested activation
17988 Sep 22 23:14:27.796 INFO [0] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) New New New ds_transition to WaitActive
17989 Sep 22 23:14:27.796 INFO [0] Transition from New to WaitActive
17990 Sep 22 23:14:27.796 DEBG Read :1000 deps:[] res:true
17991 Sep 22 23:14:27.796 INFO [1] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) WaitActive New New ds_transition to WaitActive
17992 Sep 22 23:14:27.796 INFO [1] Transition from New to WaitActive
17993 Sep 22 23:14:27.796 INFO [2] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) WaitActive WaitActive New ds_transition to WaitActive
17994 Sep 22 23:14:27.796 INFO [2] Transition from New to WaitActive
17995 Sep 22 23:14:27.796 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 active request set
17996 Sep 22 23:14:27.796 INFO [0] received activate with gen 1
17997 Sep 22 23:14:27.796 INFO [0] client got ds_active_rx, promote! session 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d
17998 Sep 22 23:14:27.796 INFO [1] received activate with gen 1
17999 Sep 22 23:14:27.796 INFO [1] client got ds_active_rx, promote! session 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d
18000 Sep 22 23:14:27.796 INFO [2] received activate with gen 1
18001 Sep 22 23:14:27.796 INFO [2] client got ds_active_rx, promote! session 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d
18002 Sep 22 23:14:27.796 INFO UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } is now active (read-write)
18003 Sep 22 23:14:27.796 INFO UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } is now active (read-write)
18004 Sep 22 23:14:27.796 DEBG Read :1000 deps:[] res:true
18005 Sep 22 23:14:27.796 INFO UpstairsConnection { upstairs_id: 2937f30d-6553-49bb-81e4-1d5ec4457063, session_id: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d, gen: 1 } is now active (read-write)
18006 Sep 22 23:14:27.797 INFO [0] downstairs client at 127.0.0.1:41940 has UUID 5ee54174-c6d6-4c0b-b60d-527f92f52f40
18007 Sep 22 23:14:27.797 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5ee54174-c6d6-4c0b-b60d-527f92f52f40, encrypted: true, database_read_version: 1, database_write_version: 1 }
18008 Sep 22 23:14:27.797 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 WaitActive WaitActive WaitActive
18009 Sep 22 23:14:27.797 INFO [1] downstairs client at 127.0.0.1:61419 has UUID 83bc82a1-6a8c-4f0e-8d39-8e2df2a396fa
18010 Sep 22 23:14:27.797 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 83bc82a1-6a8c-4f0e-8d39-8e2df2a396fa, encrypted: true, database_read_version: 1, database_write_version: 1 }
18011 Sep 22 23:14:27.797 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 WaitActive WaitActive WaitActive
18012 Sep 22 23:14:27.797 INFO [2] downstairs client at 127.0.0.1:36571 has UUID b64ddd6d-152e-4e06-b873-7258ee165522
18013 Sep 22 23:14:27.797 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b64ddd6d-152e-4e06-b873-7258ee165522, encrypted: true, database_read_version: 1, database_write_version: 1 }
18014 Sep 22 23:14:27.797 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 WaitActive WaitActive WaitActive
18015 Sep 22 23:14:27.797 DEBG Read :1000 deps:[] res:true
18016 Sep 22 23:14:27.797 INFO Current flush_numbers [0..12]: [0, 0]
18017 test test::integration_test_three_layers ... ok
18018 Sep 22 23:14:27.797 INFO Downstairs has completed Negotiation, task: proc
18019 Sep 22 23:14:27.798 INFO UUID: 63793045-02dd-4dbb-ad46-c6b5f41c9d53
18020 Sep 22 23:14:27.798 INFO Blocks per extent:5 Total Extents: 2
18021 Sep 22 23:14:27.798 INFO Crucible Version: Crucible Version: 0.0.1
18022 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18023 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18024 rustc: 1.70.0 stable x86_64-unknown-illumos
18025 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18026 Sep 22 23:14:27.798 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18027 Sep 22 23:14:27.798 INFO current number of open files limit 65536 is already the maximum
18028 Sep 22 23:14:27.798 INFO Using address: 127.0.0.1:53065, task: main
18029 Sep 22 23:14:27.798 INFO Current flush_numbers [0..12]: [0, 0]
18030 Sep 22 23:14:27.798 INFO Created new region file "/tmp/downstairs-5yJd5IRE/region.json"
18031 test test::integration_test_two_layers ... ok
18032 Sep 22 23:14:27.798 INFO Downstairs has completed Negotiation, task: proc
18033 Sep 22 23:14:27.798 INFO Repair listens on 127.0.0.1:0, task: repair
18034 Sep 22 23:14:27.798 DEBG IO Write 1001 has deps [JobId(1000)]
18035 Sep 22 23:14:27.798 INFO current number of open files limit 65536 is already the maximum
18036 Sep 22 23:14:27.798 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58319, task: repair
18037 Sep 22 23:14:27.798 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58319, task: repair
18038 Sep 22 23:14:27.798 INFO Current flush_numbers [0..12]: [0, 0]
18039 Sep 22 23:14:27.798 INFO listening, local_addr: 127.0.0.1:58319, task: repair
18040 Sep 22 23:14:27.798 INFO Created new region file "/tmp/downstairs-98yOFxje/region.json"
18041 Sep 22 23:14:27.798 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58319, task: repair
18042 Sep 22 23:14:27.798 INFO Downstairs has completed Negotiation, task: proc
18043 Sep 22 23:14:27.798 DEBG [0] Read AckReady 1000, : downstairs
18044 Sep 22 23:14:27.798 INFO Using repair address: 127.0.0.1:58319, task: main
18045 Sep 22 23:14:27.798 INFO No SSL acceptor configured, task: main
18046 Sep 22 23:14:27.799 INFO [0] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18047 Sep 22 23:14:27.799 INFO [0] Transition from WaitActive to WaitQuorum
18048 Sep 22 23:14:27.799 DEBG [1] Read already AckReady 1000, : downstairs
18049 Sep 22 23:14:27.799 WARN [0] new RM replaced this: None
18050 Sep 22 23:14:27.799 INFO [0] Starts reconcile loop
18051 Sep 22 23:14:27.799 INFO current number of open files limit 65536 is already the maximum
18052 Sep 22 23:14:27.799 INFO [1] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
18053 Sep 22 23:14:27.799 INFO [1] Transition from WaitActive to WaitQuorum
18054 Sep 22 23:14:27.799 WARN [1] new RM replaced this: None
18055 Sep 22 23:14:27.799 INFO [1] Starts reconcile loop
18056 Sep 22 23:14:27.799 INFO Created new region file "/tmp/downstairs-A4dsswXF/region.json"
18057 Sep 22 23:14:27.799 DEBG [2] Read already AckReady 1000, : downstairs
18058 Sep 22 23:14:27.799 DEBG up_ds_listen was notified
18059 Sep 22 23:14:27.799 INFO [2] 2937f30d-6553-49bb-81e4-1d5ec4457063 (3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18060 Sep 22 23:14:27.799 DEBG up_ds_listen process 1000
18061 Sep 22 23:14:27.799 INFO [2] Transition from WaitActive to WaitQuorum
18062 Sep 22 23:14:27.799 WARN [2] new RM replaced this: None
18063 Sep 22 23:14:27.799 DEBG [A] ack job 1000:1, : downstairs
18064 Sep 22 23:14:27.799 INFO [2] Starts reconcile loop
18065 Sep 22 23:14:27.799 INFO [0] 127.0.0.1:41940 task reports connection:true
18066 Sep 22 23:14:27.799 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 WaitQuorum WaitQuorum WaitQuorum
18067 Sep 22 23:14:27.799 INFO [0]R flush_numbers: [0, 0]
18068 Sep 22 23:14:27.799 INFO [0]R generation: [0, 0]
18069 Sep 22 23:14:27.799 INFO [0]R dirty: [false, false]
18070 Sep 22 23:14:27.799 INFO [1]R flush_numbers: [0, 0]
18071 Sep 22 23:14:27.799 INFO [1]R generation: [0, 0]
18072 Sep 22 23:14:27.799 INFO [1]R dirty: [false, false]
18073 Sep 22 23:14:27.799 INFO [2]R flush_numbers: [0, 0]
18074 Sep 22 23:14:27.799 INFO [2]R generation: [0, 0]
18075 Sep 22 23:14:27.799 INFO [2]R dirty: [false, false]
18076 Sep 22 23:14:27.799 INFO Max found gen is 1
18077 Sep 22 23:14:27.799 INFO Generation requested: 1 >= found:1
18078 Sep 22 23:14:27.799 INFO Next flush: 1
18079 Sep 22 23:14:27.799 INFO All extents match
18080 Sep 22 23:14:27.799 INFO No downstairs repair required
18081 Sep 22 23:14:27.799 INFO No initial repair work was required
18082 Sep 22 23:14:27.799 INFO Set Downstairs and Upstairs active
18083 Sep 22 23:14:27.799 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 is now active with session: 3aafe1d5-c70b-4d8b-8ee4-fa029b9cf14d
18084 Sep 22 23:14:27.799 DEBG up_ds_listen checked 1 jobs, back to waiting
18085 Sep 22 23:14:27.799 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 Set Active after no repair
18086 Sep 22 23:14:27.799 INFO Notify all downstairs, region set compare is done.
18087 Sep 22 23:14:27.799 INFO Set check for repair
18088 Sep 22 23:14:27.800 INFO [1] 127.0.0.1:61419 task reports connection:true
18089 Sep 22 23:14:27.800 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 Active Active Active
18090 Sep 22 23:14:27.800 INFO Set check for repair
18091 Sep 22 23:14:27.800 INFO [2] 127.0.0.1:36571 task reports connection:true
18092 Sep 22 23:14:27.800 INFO 2937f30d-6553-49bb-81e4-1d5ec4457063 Active Active Active
18093 Sep 22 23:14:27.800 INFO Set check for repair
18094 Sep 22 23:14:27.800 INFO [0] received reconcile message
18095 Sep 22 23:14:27.800 INFO [0] All repairs completed, exit
18096 Sep 22 23:14:27.800 INFO [0] Starts cmd_loop
18097 Sep 22 23:14:27.800 INFO [1] received reconcile message
18098 Sep 22 23:14:27.800 INFO [1] All repairs completed, exit
18099 Sep 22 23:14:27.800 INFO [1] Starts cmd_loop
18100 Sep 22 23:14:27.800 INFO [2] received reconcile message
18101 Sep 22 23:14:27.800 INFO [2] All repairs completed, exit
18102 Sep 22 23:14:27.800 INFO [2] Starts cmd_loop
18103 The guest has finished waiting for activation
18104 Sep 22 23:14:27.800 DEBG IO Read 1000 has deps []
18105 Sep 22 23:14:27.801 DEBG Read :1000 deps:[] res:true
18106 Sep 22 23:14:27.802 INFO current number of open files limit 65536 is already the maximum
18107 Sep 22 23:14:27.802 INFO Opened existing region file "/tmp/downstairs-98yOFxje/region.json"
18108 Sep 22 23:14:27.802 INFO Database read version 1
18109 Sep 22 23:14:27.802 INFO Database write version 1
18110 Sep 22 23:14:27.802 INFO current number of open files limit 65536 is already the maximum
18111 Sep 22 23:14:27.802 INFO Opened existing region file "/tmp/downstairs-5yJd5IRE/region.json"
18112 Sep 22 23:14:27.802 INFO Database read version 1
18113 Sep 22 23:14:27.802 INFO Database write version 1
18114 Sep 22 23:14:27.802 DEBG Read :1000 deps:[] res:true
18115 Sep 22 23:14:27.802 INFO current number of open files limit 65536 is already the maximum
18116 Sep 22 23:14:27.802 INFO Opened existing region file "/tmp/downstairs-A4dsswXF/region.json"
18117 Sep 22 23:14:27.802 INFO Database read version 1
18118 Sep 22 23:14:27.802 INFO Database write version 1
18119 Sep 22 23:14:27.802 DEBG up_ds_listen was notified
18120 Sep 22 23:14:27.802 DEBG up_ds_listen process 1001
18121 Sep 22 23:14:27.802 DEBG [A] ack job 1001:2, : downstairs
18122 Sep 22 23:14:27.802 DEBG Read :1000 deps:[] res:true
18123 Sep 22 23:14:27.802 DEBG up_ds_listen checked 1 jobs, back to waiting
18124 Sep 22 23:14:27.802 DEBG IO Read 1002 has deps [JobId(1001)]
18125 Sep 22 23:14:27.803 INFO UUID: 231089c9-45a4-4798-b0b3-3fef94d32b3b
18126 Sep 22 23:14:27.803 INFO Blocks per extent:5 Total Extents: 2
18127 Sep 22 23:14:27.803 INFO Crucible Version: Crucible Version: 0.0.1
18128 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18129 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18130 rustc: 1.70.0 stable x86_64-unknown-illumos
18131 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18132 Sep 22 23:14:27.803 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18133 Sep 22 23:14:27.803 INFO Using address: 127.0.0.1:39253, task: main
18134 Sep 22 23:14:27.803 DEBG IO Write 1001 has deps [JobId(1000)]
18135 Sep 22 23:14:27.803 DEBG up_ds_listen was notified
18136 Sep 22 23:14:27.803 DEBG up_ds_listen process 1001
18137 Sep 22 23:14:27.803 DEBG Read :1002 deps:[JobId(1001)] res:true
18138 Sep 22 23:14:27.803 DEBG [A] ack job 1001:2, : downstairs
18139 Sep 22 23:14:27.803 DEBG up_ds_listen checked 1 jobs, back to waiting
18140 Sep 22 23:14:27.803 INFO Repair listens on 127.0.0.1:0, task: repair
18141 Sep 22 23:14:27.803 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35095, task: repair
18142 Sep 22 23:14:27.803 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35095, task: repair
18143 Sep 22 23:14:27.803 DEBG [0] Read AckReady 1000, : downstairs
18144 Sep 22 23:14:27.803 INFO listening, local_addr: 127.0.0.1:35095, task: repair
18145 Sep 22 23:14:27.803 DEBG Read :1002 deps:[JobId(1001)] res:true
18146 Sep 22 23:14:27.804 INFO UUID: 73465f72-83ec-4ff4-8d45-e623bca59794
18147 Sep 22 23:14:27.804 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35095, task: repair
18148 Sep 22 23:14:27.804 INFO Blocks per extent:5 Total Extents: 2
18149 Sep 22 23:14:27.804 DEBG [1] Read already AckReady 1000, : downstairs
18150 Sep 22 23:14:27.804 INFO Using repair address: 127.0.0.1:35095, task: main
18151 Sep 22 23:14:27.804 INFO No SSL acceptor configured, task: main
18152 Sep 22 23:14:27.804 INFO Crucible Version: Crucible Version: 0.0.1
18153 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18154 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18155 rustc: 1.70.0 stable x86_64-unknown-illumos
18156 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18157 Sep 22 23:14:27.804 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18158 Sep 22 23:14:27.804 DEBG Read :1002 deps:[JobId(1001)] res:true
18159 Sep 22 23:14:27.804 INFO Using address: 127.0.0.1:42670, task: main
18160 Sep 22 23:14:27.804 DEBG [2] Read already AckReady 1000, : downstairs
18161 Sep 22 23:14:27.804 DEBG up_ds_listen was notified
18162 Sep 22 23:14:27.804 DEBG up_ds_listen process 1000
18163 Sep 22 23:14:27.804 DEBG [A] ack job 1000:1, : downstairs
18164 Sep 22 23:14:27.804 INFO current number of open files limit 65536 is already the maximum
18165 Sep 22 23:14:27.804 INFO Repair listens on 127.0.0.1:0, task: repair
18166 Sep 22 23:14:27.804 INFO Created new region file "/tmp/downstairs-Ap9nIgML/region.json"
18167 Sep 22 23:14:27.804 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54260, task: repair
18168 Sep 22 23:14:27.804 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54260, task: repair
18169 Sep 22 23:14:27.804 INFO listening, local_addr: 127.0.0.1:54260, task: repair
18170 Sep 22 23:14:27.804 INFO UUID: ee2cace4-5815-4aa2-9ac4-dbe4afbd13c0
18171 Sep 22 23:14:27.804 INFO Blocks per extent:5 Total Extents: 2
18172 Sep 22 23:14:27.804 INFO Crucible Version: Crucible Version: 0.0.1
18173 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18174 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18175 rustc: 1.70.0 stable x86_64-unknown-illumos
18176 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18177 Sep 22 23:14:27.804 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18178 Sep 22 23:14:27.804 INFO Using address: 127.0.0.1:39350, task: main
18179 Sep 22 23:14:27.804 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54260, task: repair
18180 Sep 22 23:14:27.805 DEBG up_ds_listen checked 1 jobs, back to waiting
18181 Sep 22 23:14:27.805 INFO Using repair address: 127.0.0.1:54260, task: main
18182 Sep 22 23:14:27.805 INFO No SSL acceptor configured, task: main
18183 Sep 22 23:14:27.805 INFO Repair listens on 127.0.0.1:0, task: repair
18184 Sep 22 23:14:27.805 INFO current number of open files limit 65536 is already the maximum
18185 Sep 22 23:14:27.805 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52539, task: repair
18186 Sep 22 23:14:27.805 INFO Created new region file "/tmp/downstairs-Kyfe8fba/region.json"
18187 Sep 22 23:14:27.805 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52539, task: repair
18188 Sep 22 23:14:27.805 INFO listening, local_addr: 127.0.0.1:52539, task: repair
18189 Sep 22 23:14:27.805 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52539, task: repair
18190 Sep 22 23:14:27.805 INFO Using repair address: 127.0.0.1:52539, task: main
18191 Sep 22 23:14:27.805 INFO No SSL acceptor configured, task: main
18192 Sep 22 23:14:27.805 INFO current number of open files limit 65536 is already the maximum
18193 Sep 22 23:14:27.806 INFO Created new region file "/tmp/downstairs-qgY2n0od/region.json"
18194 Sep 22 23:14:27.806 DEBG [0] Read AckReady 1002, : downstairs
18195 Sep 22 23:14:27.807 DEBG Write :1001 deps:[JobId(1000)] res:true
18196 Sep 22 23:14:27.808 DEBG Write :1001 deps:[JobId(1000)] res:true
18197 Sep 22 23:14:27.808 INFO current number of open files limit 65536 is already the maximum
18198 Sep 22 23:14:27.808 INFO Opened existing region file "/tmp/downstairs-Ap9nIgML/region.json"
18199 Sep 22 23:14:27.808 INFO Database read version 1
18200 Sep 22 23:14:27.808 INFO Database write version 1
18201 Sep 22 23:14:27.808 DEBG [1] Read already AckReady 1002, : downstairs
18202 Sep 22 23:14:27.808 DEBG IO Write 1001 has deps [JobId(1000)]
18203 Sep 22 23:14:27.809 DEBG Write :1001 deps:[JobId(1000)] res:true
18204 Sep 22 23:14:27.809 DEBG IO Read 1002 has deps [JobId(1001)]
18205 Sep 22 23:14:27.809 INFO current number of open files limit 65536 is already the maximum
18206 Sep 22 23:14:27.809 INFO Opened existing region file "/tmp/downstairs-Kyfe8fba/region.json"
18207 Sep 22 23:14:27.809 INFO Database read version 1
18208 Sep 22 23:14:27.809 INFO Database write version 1
18209 Sep 22 23:14:27.810 DEBG Read :1002 deps:[JobId(1001)] res:true
18210 Sep 22 23:14:27.810 DEBG [2] Read already AckReady 1002, : downstairs
18211 Sep 22 23:14:27.810 DEBG up_ds_listen was notified
18212 Sep 22 23:14:27.810 DEBG up_ds_listen process 1002
18213 Sep 22 23:14:27.810 DEBG [A] ack job 1002:3, : downstairs
18214 Sep 22 23:14:27.810 DEBG Read :1002 deps:[JobId(1001)] res:true
18215 Sep 22 23:14:27.810 DEBG up_ds_listen checked 1 jobs, back to waiting
18216 Sep 22 23:14:27.811 INFO UUID: 5d23d6e0-25c6-4cbe-ad9c-d04399dce2ff
18217 Sep 22 23:14:27.811 INFO current number of open files limit 65536 is already the maximum
18218 Sep 22 23:14:27.811 INFO Blocks per extent:5 Total Extents: 2
18219 Sep 22 23:14:27.811 INFO Opened existing region file "/tmp/downstairs-qgY2n0od/region.json"
18220 Sep 22 23:14:27.811 INFO Database read version 1
18221 Sep 22 23:14:27.811 INFO Database write version 1
18222 Sep 22 23:14:27.811 INFO Crucible Version: Crucible Version: 0.0.1
18223 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18224 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18225 rustc: 1.70.0 stable x86_64-unknown-illumos
18226 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18227 Sep 22 23:14:27.811 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18228 Sep 22 23:14:27.811 INFO Using address: 127.0.0.1:46036, task: main
18229 Sep 22 23:14:27.811 DEBG Read :1002 deps:[JobId(1001)] res:true
18230 Sep 22 23:14:27.811 INFO Repair listens on 127.0.0.1:0, task: repair
18231 Sep 22 23:14:27.811 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58152, task: repair
18232 Sep 22 23:14:27.811 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58152, task: repair
18233 Sep 22 23:14:27.811 INFO listening, local_addr: 127.0.0.1:58152, task: repair
18234 Sep 22 23:14:27.811 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58152, task: repair
18235 Sep 22 23:14:27.812 INFO Using repair address: 127.0.0.1:58152, task: main
18236 Sep 22 23:14:27.812 INFO No SSL acceptor configured, task: main
18237 Sep 22 23:14:27.812 INFO UUID: 80f95ab7-da4b-4ddc-b498-8768e5802752
18238 Sep 22 23:14:27.812 INFO Blocks per extent:5 Total Extents: 2
18239 Sep 22 23:14:27.812 INFO Crucible Version: Crucible Version: 0.0.1
18240 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18241 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18242 rustc: 1.70.0 stable x86_64-unknown-illumos
18243 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18244 Sep 22 23:14:27.812 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18245 Sep 22 23:14:27.812 INFO current number of open files limit 65536 is already the maximum
18246 Sep 22 23:14:27.812 INFO Using address: 127.0.0.1:59248, task: main
18247 Sep 22 23:14:27.812 INFO Created new region file "/tmp/downstairs-v7cW1efY/region.json"
18248 Sep 22 23:14:27.813 INFO Repair listens on 127.0.0.1:0, task: repair
18249 Sep 22 23:14:27.813 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52645, task: repair
18250 Sep 22 23:14:27.813 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52645, task: repair
18251 Sep 22 23:14:27.813 INFO listening, local_addr: 127.0.0.1:52645, task: repair
18252 Sep 22 23:14:27.813 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52645, task: repair
18253 Sep 22 23:14:27.813 INFO Using repair address: 127.0.0.1:52645, task: main
18254 Sep 22 23:14:27.813 INFO No SSL acceptor configured, task: main
18255 Sep 22 23:14:27.814 INFO UUID: 687c5d9e-4bfd-42d4-a91e-861a6091204d
18256 Sep 22 23:14:27.814 INFO current number of open files limit 65536 is already the maximum
18257 Sep 22 23:14:27.814 INFO Blocks per extent:5 Total Extents: 2
18258 Sep 22 23:14:27.814 INFO Crucible Version: Crucible Version: 0.0.1
18259 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18260 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18261 rustc: 1.70.0 stable x86_64-unknown-illumos
18262 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18263 Sep 22 23:14:27.814 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18264 Sep 22 23:14:27.814 INFO Using address: 127.0.0.1:40647, task: main
18265 Sep 22 23:14:27.814 INFO Created new region file "/tmp/downstairs-M87v3oQj/region.json"
18266 Sep 22 23:14:27.814 INFO Repair listens on 127.0.0.1:0, task: repair
18267 Sep 22 23:14:27.814 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41223, task: repair
18268 Sep 22 23:14:27.814 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41223, task: repair
18269 Sep 22 23:14:27.814 INFO listening, local_addr: 127.0.0.1:41223, task: repair
18270 Sep 22 23:14:27.815 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41223, task: repair
18271 Sep 22 23:14:27.815 INFO Using repair address: 127.0.0.1:41223, task: main
18272 Sep 22 23:14:27.815 INFO No SSL acceptor configured, task: main
18273 Sep 22 23:14:27.815 DEBG [0] Read AckReady 1002, : downstairs
18274 Sep 22 23:14:27.815 INFO Upstairs starts
18275 Sep 22 23:14:27.815 INFO Crucible Version: BuildInfo {
18276 version: "0.0.1",
18277 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18278 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18279 git_branch: "main",
18280 rustc_semver: "1.70.0",
18281 rustc_channel: "stable",
18282 rustc_host_triple: "x86_64-unknown-illumos",
18283 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18284 cargo_triple: "x86_64-unknown-illumos",
18285 debug: true,
18286 opt_level: 0,
18287 }
18288 Sep 22 23:14:27.815 INFO Upstairs <-> Downstairs Message Version: 4
18289 Sep 22 23:14:27.815 INFO Crucible stats registered with UUID: ee584fbe-3f93-4d75-9965-f456698e3f69
18290 Sep 22 23:14:27.815 INFO Crucible ee584fbe-3f93-4d75-9965-f456698e3f69 has session id: c547f3af-41d0-403e-8177-49c20795e0f9
18291 test test::integration_test_two_layers_parent_smaller ... ok
18292 Sep 22 23:14:27.815 DEBG up_ds_listen was notified
18293 Sep 22 23:14:27.815 DEBG up_ds_listen process 1001
18294 Sep 22 23:14:27.815 DEBG [A] ack job 1001:2, : downstairs
18295 Sep 22 23:14:27.815 INFO listening on 127.0.0.1:0, task: main
18296 Sep 22 23:14:27.816 DEBG up_ds_listen checked 1 jobs, back to waiting
18297 Sep 22 23:14:27.816 INFO listening on 127.0.0.1:0, task: main
18298 Sep 22 23:14:27.816 INFO listening on 127.0.0.1:0, task: main
18299 Sep 22 23:14:27.816 INFO [0] connecting to 127.0.0.1:53065, looper: 0
18300 Sep 22 23:14:27.816 INFO current number of open files limit 65536 is already the maximum
18301 Sep 22 23:14:27.816 INFO [1] connecting to 127.0.0.1:39350, looper: 1
18302 Sep 22 23:14:27.816 INFO Created new region file "/tmp/downstairs-AXy3vnYW/region.json"
18303 Sep 22 23:14:27.816 INFO [2] connecting to 127.0.0.1:40647, looper: 2
18304 Sep 22 23:14:27.816 INFO up_listen starts, task: up_listen
18305 Sep 22 23:14:27.816 INFO Wait for all three downstairs to come online
18306 Sep 22 23:14:27.816 INFO Flush timeout: 0.5
18307 Sep 22 23:14:27.816 INFO accepted connection from 127.0.0.1:54717, task: main
18308 Sep 22 23:14:27.816 INFO accepted connection from 127.0.0.1:58391, task: main
18309 Sep 22 23:14:27.816 INFO [0] ee584fbe-3f93-4d75-9965-f456698e3f69 looper connected, looper: 0
18310 Sep 22 23:14:27.816 INFO [0] Proc runs for 127.0.0.1:53065 in state New
18311 Sep 22 23:14:27.816 INFO [1] ee584fbe-3f93-4d75-9965-f456698e3f69 looper connected, looper: 1
18312 Sep 22 23:14:27.816 INFO [1] Proc runs for 127.0.0.1:39350 in state New
18313 Sep 22 23:14:27.816 DEBG IO Read 1002 has deps [JobId(1001)]
18314 Sep 22 23:14:27.816 INFO [2] ee584fbe-3f93-4d75-9965-f456698e3f69 looper connected, looper: 2
18315 Sep 22 23:14:27.817 INFO [2] Proc runs for 127.0.0.1:40647 in state New
18316 Sep 22 23:14:27.817 INFO accepted connection from 127.0.0.1:33403, task: main
18317 Sep 22 23:14:27.817 INFO current number of open files limit 65536 is already the maximum
18318 Sep 22 23:14:27.817 INFO Opened existing region file "/tmp/downstairs-v7cW1efY/region.json"
18319 Sep 22 23:14:27.817 INFO Database read version 1
18320 Sep 22 23:14:27.817 INFO Upstairs starts
18321 Sep 22 23:14:27.817 INFO Database write version 1
18322 Sep 22 23:14:27.817 INFO Crucible Version: BuildInfo {
18323 version: "0.0.1",
18324 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18325 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18326 git_branch: "main",
18327 rustc_semver: "1.70.0",
18328 rustc_channel: "stable",
18329 rustc_host_triple: "x86_64-unknown-illumos",
18330 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18331 cargo_triple: "x86_64-unknown-illumos",
18332 debug: true,
18333 opt_level: 0,
18334 }
18335 Sep 22 23:14:27.817 INFO Upstairs <-> Downstairs Message Version: 4
18336 Sep 22 23:14:27.817 INFO Crucible stats registered with UUID: d173eb3a-6ff3-4b10-89cb-971e46f2c897
18337 Sep 22 23:14:27.817 INFO Crucible d173eb3a-6ff3-4b10-89cb-971e46f2c897 has session id: 96ff5ca8-e85b-458b-8306-7ac3cdb25aba
18338 Sep 22 23:14:27.817 INFO Connection request from ee584fbe-3f93-4d75-9965-f456698e3f69 with version 4, task: proc
18339 Sep 22 23:14:27.817 INFO upstairs UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } connected, version 4, task: proc
18340 Sep 22 23:14:27.817 INFO Connection request from ee584fbe-3f93-4d75-9965-f456698e3f69 with version 4, task: proc
18341 Sep 22 23:14:27.817 INFO upstairs UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } connected, version 4, task: proc
18342 Sep 22 23:14:27.817 DEBG [1] Read already AckReady 1002, : downstairs
18343 Sep 22 23:14:27.817 INFO Connection request from ee584fbe-3f93-4d75-9965-f456698e3f69 with version 4, task: proc
18344 Sep 22 23:14:27.817 INFO upstairs UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } connected, version 4, task: proc
18345 Sep 22 23:14:27.817 DEBG Read :1002 deps:[JobId(1001)] res:true
18346 Sep 22 23:14:27.817 INFO [0] connecting to 127.0.0.1:34728, looper: 0
18347 Sep 22 23:14:27.817 INFO [1] connecting to 127.0.0.1:61240, looper: 1
18348 Sep 22 23:14:27.818 INFO [2] connecting to 127.0.0.1:49345, looper: 2
18349 Sep 22 23:14:27.818 INFO up_listen starts, task: up_listen
18350 Sep 22 23:14:27.818 INFO Wait for all three downstairs to come online
18351 Sep 22 23:14:27.818 INFO Flush timeout: 0.5
18352 Sep 22 23:14:27.818 INFO accepted connection from 127.0.0.1:34095, task: main
18353 Sep 22 23:14:27.818 DEBG Read :1002 deps:[JobId(1001)] res:true
18354 Sep 22 23:14:27.818 INFO accepted connection from 127.0.0.1:37051, task: main
18355 Sep 22 23:14:27.818 INFO accepted connection from 127.0.0.1:33972, task: main
18356 Sep 22 23:14:27.818 INFO current number of open files limit 65536 is already the maximum
18357 Sep 22 23:14:27.818 INFO Opened existing region file "/tmp/downstairs-M87v3oQj/region.json"
18358 Sep 22 23:14:27.818 INFO Database read version 1
18359 Sep 22 23:14:27.818 INFO Database write version 1
18360 Sep 22 23:14:27.818 INFO [0] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) New New New ds_transition to WaitActive
18361 Sep 22 23:14:27.818 INFO [0] Transition from New to WaitActive
18362 Sep 22 23:14:27.818 INFO [1] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) WaitActive New New ds_transition to WaitActive
18363 Sep 22 23:14:27.818 INFO [1] Transition from New to WaitActive
18364 Sep 22 23:14:27.818 INFO [2] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) WaitActive WaitActive New ds_transition to WaitActive
18365 Sep 22 23:14:27.818 INFO [2] Transition from New to WaitActive
18366 Sep 22 23:14:27.818 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 0
18367 Sep 22 23:14:27.818 INFO [0] Proc runs for 127.0.0.1:34728 in state New
18368 Sep 22 23:14:27.818 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 1
18369 Sep 22 23:14:27.818 INFO [1] Proc runs for 127.0.0.1:61240 in state New
18370 Sep 22 23:14:27.818 DEBG Read :1002 deps:[JobId(1001)] res:true
18371 Sep 22 23:14:27.818 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 looper connected, looper: 2
18372 Sep 22 23:14:27.818 INFO [2] Proc runs for 127.0.0.1:49345 in state New
18373 Sep 22 23:14:27.819 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
18374 Sep 22 23:14:27.819 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } connected, version 4, task: proc
18375 Sep 22 23:14:27.819 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
18376 Sep 22 23:14:27.819 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } connected, version 4, task: proc
18377 Sep 22 23:14:27.819 INFO Connection request from d173eb3a-6ff3-4b10-89cb-971e46f2c897 with version 4, task: proc
18378 Sep 22 23:14:27.819 INFO upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } connected, version 4, task: proc
18379 Sep 22 23:14:27.819 INFO UUID: e165f739-3242-442c-822a-5ed698df44b4
18380 Sep 22 23:14:27.819 INFO Blocks per extent:5 Total Extents: 2
18381 Sep 22 23:14:27.819 INFO Crucible Version: Crucible Version: 0.0.1
18382 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18383 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18384 rustc: 1.70.0 stable x86_64-unknown-illumos
18385 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18386 Sep 22 23:14:27.819 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) New New New ds_transition to WaitActive
18387 Sep 22 23:14:27.819 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18388 Sep 22 23:14:27.819 INFO [0] Transition from New to WaitActive
18389 Sep 22 23:14:27.819 INFO Using address: 127.0.0.1:37718, task: main
18390 Sep 22 23:14:27.819 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) WaitActive New New ds_transition to WaitActive
18391 Sep 22 23:14:27.819 INFO [1] Transition from New to WaitActive
18392 Sep 22 23:14:27.819 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) WaitActive WaitActive New ds_transition to WaitActive
18393 Sep 22 23:14:27.819 INFO [2] Transition from New to WaitActive
18394 The guest has requested activation
18395 Sep 22 23:14:27.820 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 active request set
18396 Sep 22 23:14:27.820 INFO [0] received activate with gen 3
18397 Sep 22 23:14:27.820 DEBG [2] Read already AckReady 1002, : downstairs
18398 Sep 22 23:14:27.820 INFO [0] client got ds_active_rx, promote! session b4945474-3075-4e49-b652-62cc3ebba236
18399 Sep 22 23:14:27.820 INFO Repair listens on 127.0.0.1:0, task: repair
18400 Sep 22 23:14:27.820 INFO [1] received activate with gen 3
18401 Sep 22 23:14:27.820 DEBG up_ds_listen was notified
18402 Sep 22 23:14:27.820 INFO [1] client got ds_active_rx, promote! session b4945474-3075-4e49-b652-62cc3ebba236
18403 Sep 22 23:14:27.820 DEBG up_ds_listen process 1002
18404 Sep 22 23:14:27.820 INFO [2] received activate with gen 3
18405 Sep 22 23:14:27.820 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57972, task: repair
18406 Sep 22 23:14:27.820 INFO [2] client got ds_active_rx, promote! session b4945474-3075-4e49-b652-62cc3ebba236
18407 Sep 22 23:14:27.820 DEBG [A] ack job 1002:3, : downstairs
18408 Sep 22 23:14:27.820 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57972, task: repair
18409 Sep 22 23:14:27.820 INFO listening, local_addr: 127.0.0.1:57972, task: repair
18410 Sep 22 23:14:27.820 INFO UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } is now active (read-write)
18411 Sep 22 23:14:27.820 INFO UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } is now active (read-write)
18412 Sep 22 23:14:27.820 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57972, task: repair
18413 Sep 22 23:14:27.820 INFO UpstairsConnection { upstairs_id: ee584fbe-3f93-4d75-9965-f456698e3f69, session_id: b4945474-3075-4e49-b652-62cc3ebba236, gen: 3 } is now active (read-write)
18414 Sep 22 23:14:27.820 INFO Using repair address: 127.0.0.1:57972, task: main
18415 Sep 22 23:14:27.820 INFO No SSL acceptor configured, task: main
18416 Sep 22 23:14:27.820 DEBG up_ds_listen checked 1 jobs, back to waiting
18417 Sep 22 23:14:27.820 INFO [0] downstairs client at 127.0.0.1:53065 has UUID 63793045-02dd-4dbb-ad46-c6b5f41c9d53
18418 Sep 22 23:14:27.820 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 63793045-02dd-4dbb-ad46-c6b5f41c9d53, encrypted: true, database_read_version: 1, database_write_version: 1 }
18419 Sep 22 23:14:27.820 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 WaitActive WaitActive WaitActive
18420 Sep 22 23:14:27.820 INFO [1] downstairs client at 127.0.0.1:39350 has UUID ee2cace4-5815-4aa2-9ac4-dbe4afbd13c0
18421 Sep 22 23:14:27.820 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ee2cace4-5815-4aa2-9ac4-dbe4afbd13c0, encrypted: true, database_read_version: 1, database_write_version: 1 }
18422 Sep 22 23:14:27.821 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 WaitActive WaitActive WaitActive
18423 Sep 22 23:14:27.821 INFO current number of open files limit 65536 is already the maximum
18424 Sep 22 23:14:27.821 INFO Opened existing region file "/tmp/downstairs-AXy3vnYW/region.json"
18425 Sep 22 23:14:27.821 INFO [2] downstairs client at 127.0.0.1:40647 has UUID 687c5d9e-4bfd-42d4-a91e-861a6091204d
18426 Sep 22 23:14:27.821 INFO Database read version 1
18427 Sep 22 23:14:27.821 INFO Database write version 1
18428 Sep 22 23:14:27.821 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 687c5d9e-4bfd-42d4-a91e-861a6091204d, encrypted: true, database_read_version: 1, database_write_version: 1 }
18429 Sep 22 23:14:27.821 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 WaitActive WaitActive WaitActive
18430 Sep 22 23:14:27.821 INFO UUID: 124ac7ec-a4db-423d-aef5-37bb4184fe05
18431 Sep 22 23:14:27.821 INFO Blocks per extent:5 Total Extents: 2
18432 Sep 22 23:14:27.821 INFO Current flush_numbers [0..12]: [0, 0]
18433 Sep 22 23:14:27.821 INFO Crucible Version: Crucible Version: 0.0.1
18434 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18435 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18436 rustc: 1.70.0 stable x86_64-unknown-illumos
18437 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18438 Sep 22 23:14:27.821 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18439 Sep 22 23:14:27.821 INFO Using address: 127.0.0.1:39733, task: main
18440 Sep 22 23:14:27.821 INFO Downstairs has completed Negotiation, task: proc
18441 Sep 22 23:14:27.821 INFO Current flush_numbers [0..12]: [0, 0]
18442 Sep 22 23:14:27.821 INFO Repair listens on 127.0.0.1:0, task: repair
18443 Sep 22 23:14:27.821 INFO Downstairs has completed Negotiation, task: proc
18444 Sep 22 23:14:27.821 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53212, task: repair
18445 Sep 22 23:14:27.821 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53212, task: repair
18446 Sep 22 23:14:27.821 INFO listening, local_addr: 127.0.0.1:53212, task: repair
18447 Sep 22 23:14:27.821 INFO Current flush_numbers [0..12]: [0, 0]
18448 Sep 22 23:14:27.822 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53212, task: repair
18449 Sep 22 23:14:27.822 INFO Downstairs has completed Negotiation, task: proc
18450 Sep 22 23:14:27.822 INFO Using repair address: 127.0.0.1:53212, task: main
18451 Sep 22 23:14:27.822 INFO No SSL acceptor configured, task: main
18452 Sep 22 23:14:27.822 INFO Upstairs starts
18453 Sep 22 23:14:27.822 INFO Crucible Version: BuildInfo {
18454 version: "0.0.1",
18455 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18456 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18457 git_branch: "main",
18458 rustc_semver: "1.70.0",
18459 rustc_channel: "stable",
18460 rustc_host_triple: "x86_64-unknown-illumos",
18461 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18462 cargo_triple: "x86_64-unknown-illumos",
18463 debug: true,
18464 opt_level: 0,
18465 }
18466 Sep 22 23:14:27.822 INFO Upstairs <-> Downstairs Message Version: 4
18467 Sep 22 23:14:27.822 INFO Crucible stats registered with UUID: da9f63ff-0d34-4427-ba79-865cdebab321
18468 Sep 22 23:14:27.822 INFO Crucible da9f63ff-0d34-4427-ba79-865cdebab321 has session id: b7dc2749-2f69-4d42-bf43-7535ee1f4ae4
18469 Sep 22 23:14:27.822 INFO [0] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18470 Sep 22 23:14:27.822 INFO [0] Transition from WaitActive to WaitQuorum
18471 Sep 22 23:14:27.822 WARN [0] new RM replaced this: None
18472 Sep 22 23:14:27.822 INFO [0] Starts reconcile loop
18473 Sep 22 23:14:27.822 INFO listening on 127.0.0.1:0, task: main
18474 Sep 22 23:14:27.822 INFO [1] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
18475 Sep 22 23:14:27.822 INFO [1] Transition from WaitActive to WaitQuorum
18476 Sep 22 23:14:27.822 WARN [1] new RM replaced this: None
18477 Sep 22 23:14:27.822 INFO listening on 127.0.0.1:0, task: main
18478 Sep 22 23:14:27.822 INFO [1] Starts reconcile loop
18479 Sep 22 23:14:27.822 INFO listening on 127.0.0.1:0, task: main
18480 Sep 22 23:14:27.822 INFO [2] ee584fbe-3f93-4d75-9965-f456698e3f69 (b4945474-3075-4e49-b652-62cc3ebba236) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18481 Sep 22 23:14:27.822 INFO [2] Transition from WaitActive to WaitQuorum
18482 Sep 22 23:14:27.822 WARN [2] new RM replaced this: None
18483 Sep 22 23:14:27.822 INFO [0] connecting to 127.0.0.1:39253, looper: 0
18484 Sep 22 23:14:27.822 INFO [2] Starts reconcile loop
18485 Sep 22 23:14:27.822 INFO [0] 127.0.0.1:53065 task reports connection:true
18486 Sep 22 23:14:27.822 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 WaitQuorum WaitQuorum WaitQuorum
18487 Sep 22 23:14:27.822 INFO [0]R flush_numbers: [0, 0]
18488 Sep 22 23:14:27.822 INFO [0]R generation: [0, 0]
18489 Sep 22 23:14:27.822 INFO [0]R dirty: [false, false]
18490 Sep 22 23:14:27.822 INFO [1] connecting to 127.0.0.1:46036, looper: 1
18491 Sep 22 23:14:27.822 INFO [1]R flush_numbers: [0, 0]
18492 Sep 22 23:14:27.822 INFO [1]R generation: [0, 0]
18493 Sep 22 23:14:27.822 INFO [1]R dirty: [false, false]
18494 Sep 22 23:14:27.822 INFO [2]R flush_numbers: [0, 0]
18495 Sep 22 23:14:27.822 INFO [2]R generation: [0, 0]
18496 {"msg":"Upstairs startsSep 22 23:14:27.822 INFO [2]R dirty: [false, false]
18497 ","v":0,"name":"crucible","level":30Sep 22 23:14:27.822 INFO Max found gen is 1
18498 Sep 22 23:14:27.822 INFO Generation requested: 3 >= found:1
18499 ,"time":"2023-09-22T23:14:27.82289313Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18500 Sep 22 23:14:27.822 INFO Next flush: 1
18501 {"msg":"Crucible Version: BuildInfo {\n version: \"0.0.1\"Sep 22 23:14:27.822 INFO [2] connecting to 127.0.0.1:37718, looper: 2
18502 ,\n git_sha: \"ed48f294784d46ea7d4bb99336918b74358eca46\",\n git_commit_timestamp: \"2023-09-22T22:51:18.000000000Z\",\n git_branch: \"main\",\n rustc_semver: \"Sep 22 23:14:27.822 INFO All extents match
18503 1.70.0\",\n rustc_channel: \"stable\",\n rustc_host_triple: \"x86_64-unknown-illumos\",\n rustc_commit_sha: \"90c541806f23a127002de5b4038be731ba1458ca\",\n cargo_triple: \"x86_64-unknown-illumos\",\n debug: true,\n opt_level: 0,\n}","v":Sep 22 23:14:27.822 INFO No downstairs repair required
18504 0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.822 INFO No initial repair work was required
18505 2023-09-22T23:14:27.822983873Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18506 {Sep 22 23:14:27.822 INFO Set Downstairs and Upstairs active
18507 "msg":"Upstairs <-> Downstairs Message Version: 4","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.823016525Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18508 {"msg":"Crucible stats registered with UUID: 77ae10ed-b83f-4607-b62b-b92e32be1f1cSep 22 23:14:27.823 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 is now active with session: b4945474-3075-4e49-b652-62cc3ebba236
18509 ","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.823042292Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18510 Sep 22 23:14:27.823 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 Set Active after no repair
18511 {"msg":"Sep 22 23:14:27.823 INFO up_listen starts, task: up_listen
18512 Sep 22 23:14:27.823 INFO Notify all downstairs, region set compare is done.
18513 Crucible 77ae10ed-b83f-4607-b62b-b92e32be1f1c has session id: d9b12516-1c3f-4c95-b098-931fde4cb58b","v":0,"name":"crucible","level":30Sep 22 23:14:27.823 INFO Wait for all three downstairs to come online
18514 ,"time":"2023-09-22T23:14:27.823078924Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18515 Sep 22 23:14:27.823 INFO Flush timeout: 0.5
18516 Sep 22 23:14:27.823 INFO Set check for repair
18517 Sep 22 23:14:27.823 INFO [1] 127.0.0.1:39350 task reports connection:true
18518 Sep 22 23:14:27.823 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 Active Active Active
18519 Sep 22 23:14:27.823 INFO Set check for repair
18520 Sep 22 23:14:27.823 INFO [2] 127.0.0.1:40647 task reports connection:true
18521 Sep 22 23:14:27.823 INFO ee584fbe-3f93-4d75-9965-f456698e3f69 Active Active Active
18522 Sep 22 23:14:27.823 INFO Set check for repair
18523 Sep 22 23:14:27.823 INFO [1] da9f63ff-0d34-4427-ba79-865cdebab321 looper connected, looper: 1
18524 The guest has requested activation
18525 Sep 22 23:14:27.823 INFO [1] Proc runs for 127.0.0.1:46036 in state New
18526 Sep 22 23:14:27.823 INFO [0] received reconcile message
18527 Sep 22 23:14:27.823 DEBG [0] Read AckReady 1002, : downstairs
18528 Sep 22 23:14:27.823 INFO listening on 127.0.0.1:0, task: main
18529 Sep 22 23:14:27.823 INFO [0] All repairs completed, exit
18530 Sep 22 23:14:27.823 INFO [0] Starts cmd_loop
18531 Sep 22 23:14:27.823 INFO listening on 127.0.0.1:0, task: main
18532 Sep 22 23:14:27.823 INFO [0] da9f63ff-0d34-4427-ba79-865cdebab321 looper connected, looper: 0
18533 Sep 22 23:14:27.823 INFO [0] Proc runs for 127.0.0.1:39253 in state New
18534 Sep 22 23:14:27.823 INFO listening on 127.0.0.1:0, task: main
18535 Sep 22 23:14:27.823 INFO [2] da9f63ff-0d34-4427-ba79-865cdebab321 looper connected, looper: 2
18536 {"Sep 22 23:14:27.823 INFO [2] Proc runs for 127.0.0.1:37718 in state New
18537 msg":"[0] connecting to 127.0.0.1:42670","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.823647343Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"0"}
18538 Sep 22 23:14:27.823 INFO [1] received reconcile message
18539 Sep 22 23:14:27.823 INFO [1] All repairs completed, exit
18540 Sep 22 23:14:27.823 INFO [1] Starts cmd_loop
18541 {"msg":"[1] connecting to 127.0.0.1:59248","v":0,"name":"crucible","level":30Sep 22 23:14:27.823 INFO [2] received reconcile message
18542 ,"time":"2023-09-22T23:14:27.82379893Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
18543 Sep 22 23:14:27.823 INFO [2] All repairs completed, exit
18544 Sep 22 23:14:27.823 INFO [2] Starts cmd_loop
18545 The guest has finished waiting for activation
18546 {"msg":"[2] connecting to 127.0.0.1:39733","v":0,"name":"crucible","level":30Sep 22 23:14:27.823 INFO accepted connection from 127.0.0.1:60176, task: main
18547 ,"time":"2023-09-22T23:14:27.823951761Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"2"}
18548 Sep 22 23:14:27.824 INFO accepted connection from 127.0.0.1:56689, task: main
18549 Sep 22 23:14:27.824 INFO UUID: fe635929-4814-4e1e-bd6d-2218abf254fb
18550 {"msg":"up_listen starts"Sep 22 23:14:27.824 INFO accepted connection from 127.0.0.1:64466, task: main
18551 ,"v":0,"name":"crucible","level":Sep 22 23:14:27.824 INFO Blocks per extent:5 Total Extents: 2
18552 30,"time":"2023-09-22T23:14:27.824119905Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"task":"up_listen"}
185532023-09-22T23:14:27.824ZINFOcrucible: Wait for all three downstairs to come online
18554 {"Sep 22 23:14:27.824 INFO Crucible Version: Crucible Version: 0.0.1
18555 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18556 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18557 rustc: 1.70.0 stable x86_64-unknown-illumos
18558 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18559 msg":"Flush timeout: 0.5","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.824215589Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18560 Sep 22 23:14:27.824 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18561 {"msg":"77ae10ed-b83f-4607-b62b-b92e32be1f1c active request set","v":0,"name":"crucible","level":30Sep 22 23:14:27.824 INFO Using address: 127.0.0.1:52814, task: main
18562 ,"time":"2023-09-22T23:14:27.824258263Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18563 Sep 22 23:14:27.824 INFO accepted connection from 127.0.0.1:46489, task: main
18564 The guest has requested activation
18565 Sep 22 23:14:27.824 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 active request set
185662023-09-22T23:14:27.824ZINFOcrucible: [0] 77ae10ed-b83f-4607-b62b-b92e32be1f1c looper connected looper = 0
18567 {"msg":"[0] Proc runs for 127.0.0.1:42670 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.824593927Z","Sep 22 23:14:27.824 INFO [0] received activate with gen 3
18568 hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18569 {"msg":"Sep 22 23:14:27.824 INFO [0] client got ds_active_rx, promote! session bfec8e34-60e1-4c3f-9196-b8cad2cf6289
18570 [1] 77ae10ed-b83f-4607-b62b-b92e32be1f1c looper connected","v":0,"name":"crucible","level":Sep 22 23:14:27.824 INFO Repair listens on 127.0.0.1:0, task: repair
18571 30,"time":"2023-09-22T23:14:27.824652463Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"looper":"1"}
185722023-09-22T23:14:27.824ZINFOcrucible: [1] Proc runs for 127.0.0.1:59248 in state New
185732023-09-22T23:14:27.824ZINFOcrucible: [2] 77ae10ed-b83f-4607-b62b-b92e32be1f1c looper connected looper = 2
18574 {"msg":"[2] Proc runs for 127.0.0.1:39733 in state New","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.824722603Z",Sep 22 23:14:27.824 INFO [1] received activate with gen 3
18575 "hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18576 Sep 22 23:14:27.824 INFO [1] client got ds_active_rx, promote! session bfec8e34-60e1-4c3f-9196-b8cad2cf6289
18577 Sep 22 23:14:27.824 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45393, task: repair
18578 Sep 22 23:14:27.824 INFO accepted connection from 127.0.0.1:63683, task: main
18579 Sep 22 23:14:27.824 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45393, task: repair
18580 Sep 22 23:14:27.824 INFO [2] received activate with gen 3
18581 Sep 22 23:14:27.824 INFO [2] client got ds_active_rx, promote! session bfec8e34-60e1-4c3f-9196-b8cad2cf6289
18582 Sep 22 23:14:27.824 INFO listening, local_addr: 127.0.0.1:45393, task: repair
18583 Sep 22 23:14:27.824 INFO accepted connection from 127.0.0.1:65112, task: main
18584 Sep 22 23:14:27.825 INFO Signaling to UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } thread that UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } is being promoted (read-only)
18585 Sep 22 23:14:27.825 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45393, task: repair
18586 Sep 22 23:14:27.825 INFO Using repair address: 127.0.0.1:45393, task: main
18587 Sep 22 23:14:27.825 INFO No SSL acceptor configured, task: main
18588 Sep 22 23:14:27.825 INFO Signaling to UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } thread that UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } is being promoted (read-only)
18589 Sep 22 23:14:27.825 INFO Connection request from 77ae10ed-b83f-4607-b62b-b92e32be1f1c with version 4, task: proc
18590 Sep 22 23:14:27.825 INFO upstairs UpstairsConnection { upstairs_id: 77ae10ed-b83f-4607-b62b-b92e32be1f1c, session_id: d1991615-52c2-49f2-91b4-d8c5e990be50, gen: 1 } connected, version 4, task: proc
18591 Sep 22 23:14:27.825 INFO Signaling to UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 } thread that UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } is being promoted (read-only)
18592 Sep 22 23:14:27.825 INFO Connection request from 77ae10ed-b83f-4607-b62b-b92e32be1f1c with version 4, task: proc
18593 test test::integration_test_two_layers_parent_smaller_unwritten ... Sep 22 23:14:27.825 INFO current number of open files limit 65536 is already the maximum
18594 ok
18595 Sep 22 23:14:27.825 INFO upstairs UpstairsConnection { upstairs_id: 77ae10ed-b83f-4607-b62b-b92e32be1f1c, session_id: d1991615-52c2-49f2-91b4-d8c5e990be50, gen: 1 } connected, version 4, task: proc
18596 Sep 22 23:14:27.825 WARN Another upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 }, task: main
18597 Sep 22 23:14:27.825 INFO Connection request from 77ae10ed-b83f-4607-b62b-b92e32be1f1c with version 4, task: proc
18598 Sep 22 23:14:27.825 INFO Created new region file "/tmp/downstairs-CtoLca49/region.json"
18599 Sep 22 23:14:27.825 INFO connection (127.0.0.1:40760): all done
18600 Sep 22 23:14:27.825 INFO upstairs UpstairsConnection { upstairs_id: 77ae10ed-b83f-4607-b62b-b92e32be1f1c, session_id: d1991615-52c2-49f2-91b4-d8c5e990be50, gen: 1 } connected, version 4, task: proc
18601 Sep 22 23:14:27.825 WARN Another upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 }, task: main
18602 Sep 22 23:14:27.825 INFO connection (127.0.0.1:53958): all done
18603 Sep 22 23:14:27.825 WARN Another upstairs UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: bfec8e34-60e1-4c3f-9196-b8cad2cf6289, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: d173eb3a-6ff3-4b10-89cb-971e46f2c897, session_id: 90c0414c-0a2e-4dcb-869c-f885bd47f77c, gen: 2 }, task: main
186042023-09-22T23:14:27.825ZINFOcrucible: [0] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) New New New ds_transition to WaitActive
18605 {Sep 22 23:14:27.825 INFO connection (127.0.0.1:53496): all done
18606 "msg":"[0] Transition from New to WaitActive","v":0,"name":"crucible","level":30Sep 22 23:14:27.825 INFO current number of open files limit 65536 is already the maximum
18607 ,"time":"2023-09-22T23:14:27.825757132Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
186082023-09-22T23:14:27.825ZINFOcrucible: [0] client is_active_req TRUE, promote! session d1991615-52c2-49f2-91b4-d8c5e990be50
186092023-09-22T23:14:27.825ZINFOcrucible: [1] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) WaitActive New New ds_transition to WaitActive
18610 Sep 22 23:14:27.825 INFO Created new region file "/tmp/downstairs-MJYSYpkx/region.json"
186112023-09-22T23:14:27.825ZINFOcrucible: [1] Transition from New to WaitActive
186122023-09-22T23:14:27.825ZINFOcrucible: [1] client is_active_req TRUE, promote! session d1991615-52c2-49f2-91b4-d8c5e990be50
186132023-09-22T23:14:27.825ZINFOcrucible: [2] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) WaitActive WaitActive New ds_transition to WaitActive
186142023-09-22T23:14:27.826ZINFOcrucible: [2] Transition from New to WaitActive
18615 {"msg":"Sep 22 23:14:27.826 ERRO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) cmd_loop saw YouAreNoLongerActive d173eb3a-6ff3-4b10-89cb-971e46f2c897 bfec8e34-60e1-4c3f-9196-b8cad2cf6289 3
18616 [2] client is_active_req TRUE, promote! session d1991615-52c2-49f2-91b4-d8c5e990be50","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.826053561Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18617 Sep 22 23:14:27.826 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) Active Active Active ds_transition to Disabled
18618 Sep 22 23:14:27.826 INFO [0] Transition from Active to Disabled
18619 Sep 22 23:14:27.826 DEBG [1] Read already AckReady 1002, : downstairs
18620 Sep 22 23:14:27.826 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 set inactive, session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
18621 Sep 22 23:14:27.826 ERRO 127.0.0.1:34728: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 0
18622 Sep 22 23:14:27.826 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Disabled to Disconnected
18623 Sep 22 23:14:27.826 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:34728 closed, looper: 0
18624 Sep 22 23:14:27.826 ERRO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) cmd_loop saw YouAreNoLongerActive d173eb3a-6ff3-4b10-89cb-971e46f2c897 bfec8e34-60e1-4c3f-9196-b8cad2cf6289 3
18625 Sep 22 23:14:27.826 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) Disconnected Active Active ds_transition to Disabled
18626 Sep 22 23:14:27.826 INFO [1] Transition from Active to Disabled
18627 Sep 22 23:14:27.826 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 set inactive, session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
186282023-09-22T23:14:27.826ZINFOcrucible: [0] downstairs client at 127.0.0.1:42670 has UUID 73465f72-83ec-4ff4-8d45-e623bca59794
18629 {"msg":"Sep 22 23:14:27.826 ERRO 127.0.0.1:61240: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 1
18630 [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 73465f72-83ec-4ff4-8d45-e623bca59794, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30Sep 22 23:14:27.826 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Disabled to Disconnected
18631 ,"time":"2023-09-22T23:14:27.826663971Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18632 Sep 22 23:14:27.826 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:61240 closed, looper: 1
186332023-09-22T23:14:27.826ZINFOcrucible: 77ae10ed-b83f-4607-b62b-b92e32be1f1c WaitActive WaitActive WaitActive
186342023-09-22T23:14:27.826ZINFOcrucible: [1] downstairs client at 127.0.0.1:59248 has UUID 80f95ab7-da4b-4ddc-b498-8768e5802752
18635 {"msg":"Sep 22 23:14:27.826 ERRO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) cmd_loop saw YouAreNoLongerActive d173eb3a-6ff3-4b10-89cb-971e46f2c897 bfec8e34-60e1-4c3f-9196-b8cad2cf6289 3
18636 [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 80f95ab7-da4b-4ddc-b498-8768e5802752, encrypted: true, database_read_version: 1, database_write_version: 1 }","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.826781832Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}Sep 22 23:14:27.826 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (90c0414c-0a2e-4dcb-869c-f885bd47f77c) Disconnected Disconnected Active ds_transition to Disabled
18637 
18638 {"msg":"Sep 22 23:14:27.826 INFO [2] Transition from Active to Disabled
18639 77ae10ed-b83f-4607-b62b-b92e32be1f1c WaitActive WaitActive WaitActive","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.826 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 set inactive, session 90c0414c-0a2e-4dcb-869c-f885bd47f77c
18640 2023-09-22T23:14:27.826830831Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
186412023-09-22T23:14:27.826ZINFOcrucible: [2] downstairs client at 127.0.0.1:39733 has UUID 124ac7ec-a4db-423d-aef5-37bb4184fe05
186422023-09-22T23:14:27.826ZINFOcrucible: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 124ac7ec-a4db-423d-aef5-37bb4184fe05, encrypted: true, database_read_version: 1, database_write_version: 1 }
186432023-09-22T23:14:27.826ZINFOcrucible: 77ae10ed-b83f-4607-b62b-b92e32be1f1c WaitActive WaitActive WaitActive
18644 Sep 22 23:14:27.826 ERRO 127.0.0.1:49345: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2, looper: 2
18645 Sep 22 23:14:27.827 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 Gone missing, transition from Disabled to Disconnected
18646 Sep 22 23:14:27.827 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 connection to 127.0.0.1:49345 closed, looper: 2
18647 Sep 22 23:14:27.827 WARN [0] pm_task rx.recv() is None
18648 Sep 22 23:14:27.827 INFO Current flush_numbers [0..12]: [0, 0]
18649 Sep 22 23:14:27.827 INFO [0] 127.0.0.1:34728 task reports connection:false
18650 Sep 22 23:14:27.827 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Disconnected Disconnected Disconnected
18651 Sep 22 23:14:27.827 INFO [0] 127.0.0.1:34728 task reports offline
18652 Sep 22 23:14:27.827 INFO [1] 127.0.0.1:61240 task reports connection:false
18653 Sep 22 23:14:27.827 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Disconnected Disconnected Disconnected
18654 Sep 22 23:14:27.827 INFO [1] 127.0.0.1:61240 task reports offline
18655 Sep 22 23:14:27.827 INFO Downstairs has completed Negotiation, task: proc
18656 Sep 22 23:14:27.827 INFO [2] 127.0.0.1:49345 task reports connection:false
18657 Sep 22 23:14:27.827 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Disconnected Disconnected Disconnected
18658 Sep 22 23:14:27.827 INFO [2] 127.0.0.1:49345 task reports offline
18659 Sep 22 23:14:27.827 WARN [1] pm_task rx.recv() is None
18660 Sep 22 23:14:27.827 WARN [2] pm_task rx.recv() is None
18661 Sep 22 23:14:27.827 INFO Current flush_numbers [0..12]: [0, 0]
18662 Sep 22 23:14:27.827 INFO [0] downstairs client at 127.0.0.1:34728 has UUID a0901c3f-7fbf-40b8-85c2-142a27a641c8
18663 Sep 22 23:14:27.827 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a0901c3f-7fbf-40b8-85c2-142a27a641c8, encrypted: true, database_read_version: 1, database_write_version: 1 }
18664 Sep 22 23:14:27.828 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
18665 Sep 22 23:14:27.828 INFO [1] downstairs client at 127.0.0.1:61240 has UUID e45251b5-0344-4afb-bb50-e9e4af673e5f
18666 Sep 22 23:14:27.828 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e45251b5-0344-4afb-bb50-e9e4af673e5f, encrypted: true, database_read_version: 1, database_write_version: 1 }
18667 Sep 22 23:14:27.828 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
18668 Sep 22 23:14:27.828 INFO [2] downstairs client at 127.0.0.1:49345 has UUID c2d08264-7ded-412a-9acb-ca0bc576793e
18669 Sep 22 23:14:27.828 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c2d08264-7ded-412a-9acb-ca0bc576793e, encrypted: true, database_read_version: 1, database_write_version: 1 }
18670 Sep 22 23:14:27.828 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitActive WaitActive WaitActive
18671 Sep 22 23:14:27.828 INFO Downstairs has completed Negotiation, task: proc
18672 Sep 22 23:14:27.828 INFO Current flush_numbers [0..12]: [1, 1]
18673 Sep 22 23:14:27.828 INFO Downstairs has completed Negotiation, task: proc
18674 Sep 22 23:14:27.828 INFO Current flush_numbers [0..12]: [0, 0]
18675 Sep 22 23:14:27.828 INFO current number of open files limit 65536 is already the maximum
18676 Sep 22 23:14:27.828 DEBG [2] Read already AckReady 1002, : downstairs
18677 Sep 22 23:14:27.828 INFO Opened existing region file "/tmp/downstairs-CtoLca49/region.json"
18678 Sep 22 23:14:27.828 INFO Database read version 1
18679 Sep 22 23:14:27.828 INFO Database write version 1
18680 Sep 22 23:14:27.828 DEBG up_ds_listen was notified
18681 Sep 22 23:14:27.828 DEBG up_ds_listen process 1002
18682 Sep 22 23:14:27.828 DEBG [A] ack job 1002:3, : downstairs
18683 Sep 22 23:14:27.828 INFO Current flush_numbers [0..12]: [1, 1]
18684 Sep 22 23:14:27.828 INFO Downstairs has completed Negotiation, task: proc
18685 Sep 22 23:14:27.828 INFO Downstairs has completed Negotiation, task: proc
186862023-09-22T23:14:27.829ZINFOcrucible: [0] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18687 {Sep 22 23:14:27.829 INFO Current flush_numbers [0..12]: [1, 1]
18688 "msg":"[0] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.829174622Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.829 DEBG up_ds_listen checked 1 jobs, back to waiting
18689 pid":4301}
186902023-09-22T23:14:27.829ZWARNcrucible: [0] new RM replaced this: None
18691 {"msg":"[0] Starts reconcile loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.829298958Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.829 INFO Downstairs has completed Negotiation, task: proc
18692 }
186932023-09-22T23:14:27.829ZINFOcrucible: [1] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
186942023-09-22T23:14:27.829ZINFOcrucible: [1] Transition from WaitActive to WaitQuorum
186952023-09-22T23:14:27.829ZWARNcrucible: [1] new RM replaced this: None
186962023-09-22T23:14:27.829ZINFOcrucible: [1] Starts reconcile loop
186972023-09-22T23:14:27.829ZINFOcrucible: [2] 77ae10ed-b83f-4607-b62b-b92e32be1f1c (d1991615-52c2-49f2-91b4-d8c5e990be50) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18698 Sep 22 23:14:27.829 INFO [0] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
18699 {"msg":"[2] Transition from WaitActive to WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.829 INFO [0] Transition from WaitActive to WaitQuorum
18700 ,"time":"2023-09-22T23:14:27.829518683Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18701 {"msg":"Sep 22 23:14:27.829 WARN [0] new RM replaced this: None
18702 [2] new RM replaced this: None","v":0,"name":"crucible","level":40,"time":"2023-09-22T23:14:27.829558923Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18703 {"msg":"Sep 22 23:14:27.829 INFO [0] Starts reconcile loop
18704 [2] Starts reconcile loop","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.829596145Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
187052023-09-22T23:14:27.829ZINFOcrucible: [0] 127.0.0.1:42670 task reports connection:true
18706 {"msg":"77ae10ed-b83f-4607-b62b-b92e32be1f1c WaitQuorum WaitQuorum WaitQuorum","v":0,"name":"crucible","level":30Sep 22 23:14:27.829 INFO [1] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
18707 ,"time":"2023-09-22T23:14:27.8296677Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18708 {"msg":"[0]R flush_numbers: [0, 0]Sep 22 23:14:27.829 INFO [1] Transition from WaitActive to WaitQuorum
18709 ","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.829 WARN [1] new RM replaced this: None
18710 2023-09-22T23:14:27.829709698Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18711 {"msg":"[0]R generation: [0, 0]","v":0,"name":"crucible","level":30,"Sep 22 23:14:27.829 INFO [1] Starts reconcile loop
18712 time":"2023-09-22T23:14:27.829743274Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
187132023-09-22T23:14:27.829ZINFOcrucible: [0]R dirty: [false, false]
187142023-09-22T23:14:27.829ZINFOcrucible: [1]R flush_numbers: [0, 0]
18715 {"msg":"The guest has finished waiting for activation
18716 [1]R generation: [0, 0]Sep 22 23:14:27.829 INFO [2] d173eb3a-6ff3-4b10-89cb-971e46f2c897 (bfec8e34-60e1-4c3f-9196-b8cad2cf6289) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
18717 ","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.829 INFO [2] Transition from WaitActive to WaitQuorum
18718 2023-09-22T23:14:27.829850321Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18719 {"msg":"[1]R dirty: [false, false]","v":0,"name"Sep 22 23:14:27.829 WARN [2] new RM replaced this: None
18720 :"crucible","level":30,"time":"2023-09-22T23:14:27.829890738Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18721 {"msg":"Sep 22 23:14:27.829 INFO [2] Starts reconcile loop
18722 [2]R flush_numbers: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.829926564Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18723 {"msg":"[2]R generation: [0, 0]","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.82995768Z","hostname":"Sep 22 23:14:27.829 INFO current number of open files limit 65536 is already the maximum
18724 ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.829 INFO [0] 127.0.0.1:34728 task reports connection:true
18725 ,"pid":4301}
18726 {"msg":"[2]R dirty: [false, false]","v":0,"name":"crucible","level":30Sep 22 23:14:27.829 INFO Opened existing region file "/tmp/downstairs-MJYSYpkx/region.json"
18727 ,Sep 22 23:14:27.829 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 WaitQuorum WaitQuorum WaitQuorum
18728 "Sep 22 23:14:27.830 INFO Database read version 1
18729 time":"2023-09-22T23:14:27.830003224Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.830 INFO Database write version 1
18730 pid":4301}
18731 {"msg":"Max found gen is 1","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [0]R flush_numbers: [1, 1]
18732 ,"time":"2023-09-22T23:14:27.830048642Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18733 Sep 22 23:14:27.830 INFO [0]R generation: [1, 1]
18734 {"msg":"Generation requested: 1 >= found:1","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [0]R dirty: [false, false]
18735 ,"time":"2023-09-22T23:14:27.830082903Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18736 {"msgSep 22 23:14:27.830 INFO [1]R flush_numbers: [1, 1]
18737 ":"Next flush: 1","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [1]R generation: [1, 1]
18738 ,"time":"2023-09-22T23:14:27.830119033Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18739 Sep 22 23:14:27.830 INFO [1]R dirty: [false, false]
18740 {"msg":"All extents match","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [2]R flush_numbers: [1, 1]
18741 ,"time":"2023-09-22T23:14:27.830152662Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18742 {"msg":"Sep 22 23:14:27.830 INFO [2]R generation: [1, 1]
18743 No downstairs repair required","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [2]R dirty: [false, false]
18744 ,"time":"2023-09-22T23:14:27.830194341Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18745 Sep 22 23:14:27.830 INFO Max found gen is 2
18746 {"msg":"No initial repair work was required","v":0,"name":"crucible","level":30,"Sep 22 23:14:27.830 INFO Generation requested: 3 >= found:2
18747 time":"2023-09-22T23:14:27.830231957Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18748 {"msg":"Sep 22 23:14:27.830 INFO Next flush: 2
18749 Set Downstairs and Upstairs active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.830267567Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.830 INFO All extents match
18750 }
18751 {"msg":"77ae10ed-b83f-4607-b62b-b92e32be1f1c is now active with session: d1991615-52c2-49f2-91b4-d8c5e990be50","v":0,"name":"Sep 22 23:14:27.830 INFO No downstairs repair required
18752 crucible","level":30,"time":"2023-09-22T23:14:27.830311363Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","Sep 22 23:14:27.830 INFO No initial repair work was required
18753 pid":4301}
18754 {"msg":"77ae10ed-b83f-4607-b62b-b92e32be1f1c Set Active after no repair"Sep 22 23:14:27.830 INFO Set Downstairs and Upstairs active
18755 ,"v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.830353091Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18756 {"msg":"Notify all downstairs, region set compare is done.","v":Sep 22 23:14:27.830 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 is now active with session: bfec8e34-60e1-4c3f-9196-b8cad2cf6289
18757 0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.830388932Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal"Sep 22 23:14:27.830 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Set Active after no repair
18758 ,"pid":4301}
18759 {"msg":"Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO Notify all downstairs, region set compare is done.
18760 ,"time":"2023-09-22T23:14:27.830423629Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18761 {"msg":"[1] 127.0.0.1:59248 task reports connection:true","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO Set check for repair
18762 ,"time":"2023-09-22T23:14:27.830455403Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
187632023-09-22T23:14:27.830ZINFOcrucible: 77ae10ed-b83f-4607-b62b-b92e32be1f1c Active Active Active
18764 {"msg":"Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [1] 127.0.0.1:61240 task reports connection:true
18765 ,"time":"2023-09-22T23:14:27.830520587Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18766 {"msg":"[2] 127.0.0.1:39733 task reports connection:true","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
18767 ,"time":"2023-09-22T23:14:27.830559092Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18768 {Sep 22 23:14:27.830 INFO Set check for repair
18769 "msg":"77ae10ed-b83f-4607-b62b-b92e32be1f1c Active Active Active","v":0,"name":"crucible","level":30,"time":"2023-09-22T23:14:27.830602663Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18770 {"msg":"Set check for repair","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [2] 127.0.0.1:49345 task reports connection:true
18771 ,"time":"2023-09-22T23:14:27.830639423Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18772 {"msg":"[0] received reconcile message","v":0,"nameSep 22 23:14:27.830 INFO d173eb3a-6ff3-4b10-89cb-971e46f2c897 Active Active Active
18773 ":"crucible","level":30,"time":"2023-09-22T23:14:27.830679811Z","hostname":"Sep 22 23:14:27.830 INFO Set check for repair
18774 ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
187752023-09-22T23:14:27.830ZINFOcrucible: [0] All repairs completed, exit
18776 {"msg":"[0] Starts cmd_loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [0] received reconcile message
18777 ,"time":"2023-09-22T23:14:27.830760379Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18778 {"msg":"[1] received reconcile message","v":0,"name":"crucible","Sep 22 23:14:27.830 INFO [0] All repairs completed, exit
18779 level":30,"time":"2023-09-22T23:14:27.830806904Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301Sep 22 23:14:27.830 INFO [0] Starts cmd_loop
18780 }
187812023-09-22T23:14:27.830ZINFOcrucible: [1] All repairs completed, exit
18782 {"msg":"[1] Starts cmd_loop","v":0,"name":"crucible","level":30,"time":"Sep 22 23:14:27.830 INFO [1] received reconcile message
18783 2023-09-22T23:14:27.830882621Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18784 {"msg":"[2] received reconcile message","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [1] All repairs completed, exit
18785 ,"time":"2023-09-22T23:14:27.830927951Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18786 Sep 22 23:14:27.830 INFO [1] Starts cmd_loop
187872023-09-22T23:14:27.830ZINFOcrucible: [2] All repairs completed, exit
18788 {"msg":"[2] Starts cmd_loop","v":0,"name":"crucible","level":30Sep 22 23:14:27.830 INFO [2] received reconcile message
18789 ,"time":"2023-09-22T23:14:27.83100805Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301}
18790 Sep 22 23:14:27.831 INFO [2] All repairs completed, exit
18791 Sep 22 23:14:27.831 INFO [2] Starts cmd_loop
18792 The guest has finished waiting for activation
18793 Sep 22 23:14:27.831 DEBG IO Read 1000 has deps []
18794 Sep 22 23:14:27.832 INFO UUID: 8d8f9571-f1d4-4ccf-8009-092f7611cbf9
18795 Sep 22 23:14:27.832 INFO Blocks per extent:5 Total Extents: 2
18796 Sep 22 23:14:27.832 INFO Crucible Version: Crucible Version: 0.0.1
18797 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18798 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18799 rustc: 1.70.0 stable x86_64-unknown-illumos
18800 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18801 Sep 22 23:14:27.832 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18802 Sep 22 23:14:27.832 INFO Using address: 127.0.0.1:39989, task: main
18803 Sep 22 23:14:27.832 INFO Repair listens on 127.0.0.1:0, task: repair
18804 Sep 22 23:14:27.832 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49838, task: repair
18805 Sep 22 23:14:27.832 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49838, task: repair
18806 Sep 22 23:14:27.832 INFO listening, local_addr: 127.0.0.1:49838, task: repair
18807 Sep 22 23:14:27.832 INFO UUID: c0ea6320-dc19-4d5a-bcd9-72a0429ad856
18808 Sep 22 23:14:27.832 INFO Blocks per extent:5 Total Extents: 2
18809 Sep 22 23:14:27.833 INFO Crucible Version: Crucible Version: 0.0.1
18810 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18811 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18812 rustc: 1.70.0 stable x86_64-unknown-illumos
18813 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18814 Sep 22 23:14:27.833 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18815 Sep 22 23:14:27.833 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49838, task: repair
18816 Sep 22 23:14:27.833 INFO Using address: 127.0.0.1:43804, task: main
18817 Sep 22 23:14:27.833 INFO Using repair address: 127.0.0.1:49838, task: main
18818 Sep 22 23:14:27.833 INFO No SSL acceptor configured, task: main
18819 Sep 22 23:14:27.833 INFO Repair listens on 127.0.0.1:0, task: repair
18820 Sep 22 23:14:27.833 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36309, task: repair
18821 Sep 22 23:14:27.833 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36309, task: repair
18822 Sep 22 23:14:27.833 INFO listening, local_addr: 127.0.0.1:36309, task: repair
18823 Sep 22 23:14:27.833 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36309, task: repair
18824 Sep 22 23:14:27.833 INFO Using repair address: 127.0.0.1:36309, task: main
18825 Sep 22 23:14:27.833 INFO No SSL acceptor configured, task: main
18826 Sep 22 23:14:27.833 DEBG Read :1000 deps:[] res:true
18827 Sep 22 23:14:27.833 INFO current number of open files limit 65536 is already the maximum
18828 Sep 22 23:14:27.834 INFO Created new region file "/tmp/downstairs-mp86XavY/region.json"
18829 Sep 22 23:14:27.834 INFO current number of open files limit 65536 is already the maximum
18830 Sep 22 23:14:27.834 INFO Created new region file "/tmp/downstairs-KmUjvjT5/region.json"
18831 Sep 22 23:14:27.834 DEBG Read :1000 deps:[] res:true
18832 Sep 22 23:14:27.835 DEBG Read :1000 deps:[] res:true
18833 Sep 22 23:14:27.836 DEBG [0] Read AckReady 1000, : downstairs
18834 Sep 22 23:14:27.836 DEBG [1] Read already AckReady 1000, : downstairs
18835 Sep 22 23:14:27.836 INFO current number of open files limit 65536 is already the maximum
18836 Sep 22 23:14:27.836 INFO Opened existing region file "/tmp/downstairs-mp86XavY/region.json"
18837 Sep 22 23:14:27.836 INFO Database read version 1
18838 Sep 22 23:14:27.836 INFO Database write version 1
18839 Sep 22 23:14:27.836 DEBG [2] Read already AckReady 1000, : downstairs
18840 Sep 22 23:14:27.836 DEBG up_ds_listen was notified
18841 Sep 22 23:14:27.837 DEBG up_ds_listen process 1000
18842 Sep 22 23:14:27.837 DEBG [A] ack job 1000:1, : downstairs
18843 Sep 22 23:14:27.837 INFO current number of open files limit 65536 is already the maximum
18844 Sep 22 23:14:27.837 INFO Opened existing region file "/tmp/downstairs-KmUjvjT5/region.json"
18845 Sep 22 23:14:27.837 INFO Database read version 1
18846 Sep 22 23:14:27.837 INFO Database write version 1
18847 Sep 22 23:14:27.837 DEBG up_ds_listen checked 1 jobs, back to waiting
18848 Sep 22 23:14:27.837 DEBG IO Read 1000 has deps []
18849 Sep 22 23:14:27.838 INFO UUID: 83e10676-7666-4b88-b22b-93d3b70ce9e9
18850 Sep 22 23:14:27.838 INFO Blocks per extent:5 Total Extents: 2
18851 Sep 22 23:14:27.838 INFO Crucible Version: Crucible Version: 0.0.1
18852 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18853 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18854 rustc: 1.70.0 stable x86_64-unknown-illumos
18855 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18856 Sep 22 23:14:27.838 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18857 Sep 22 23:14:27.838 INFO Using address: 127.0.0.1:52387, task: main
18858 Sep 22 23:14:27.838 INFO Repair listens on 127.0.0.1:0, task: repair
18859 Sep 22 23:14:27.838 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40949, task: repair
18860 Sep 22 23:14:27.838 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40949, task: repair
18861 Sep 22 23:14:27.838 INFO listening, local_addr: 127.0.0.1:40949, task: repair
18862 Sep 22 23:14:27.838 INFO UUID: e00f36ef-b7ad-4a98-871a-5cea6209adc6
18863 Sep 22 23:14:27.838 INFO Blocks per extent:5 Total Extents: 2
18864 Sep 22 23:14:27.838 DEBG Read :1000 deps:[] res:true
18865 Sep 22 23:14:27.838 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40949, task: repair
18866 Sep 22 23:14:27.838 INFO Crucible Version: Crucible Version: 0.0.1
18867 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18868 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18869 rustc: 1.70.0 stable x86_64-unknown-illumos
18870 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18871 Sep 22 23:14:27.838 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18872 Sep 22 23:14:27.838 INFO Using repair address: 127.0.0.1:40949, task: main
18873 Sep 22 23:14:27.838 INFO Using address: 127.0.0.1:60915, task: main
18874 Sep 22 23:14:27.838 INFO No SSL acceptor configured, task: main
18875 Sep 22 23:14:27.839 INFO Upstairs starts
18876 Sep 22 23:14:27.839 INFO Crucible Version: BuildInfo {
18877 version: "0.0.1",
18878 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18879 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18880 git_branch: "main",
18881 rustc_semver: "1.70.0",
18882 rustc_channel: "stable",
18883 rustc_host_triple: "x86_64-unknown-illumos",
18884 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18885 cargo_triple: "x86_64-unknown-illumos",
18886 debug: true,
18887 opt_level: 0,
18888 }
18889 Sep 22 23:14:27.839 INFO Upstairs <-> Downstairs Message Version: 4
18890 Sep 22 23:14:27.839 INFO Crucible stats registered with UUID: 6ec3fe13-11cc-4835-9539-e92f50952191
18891 Sep 22 23:14:27.839 INFO Crucible 6ec3fe13-11cc-4835-9539-e92f50952191 has session id: c5784896-58d8-45b1-afa5-9dc12f3cc620
18892 Sep 22 23:14:27.839 DEBG Read :1000 deps:[] res:true
18893 Sep 22 23:14:27.839 INFO listening on 127.0.0.1:0, task: main
18894 Sep 22 23:14:27.839 INFO listening on 127.0.0.1:0, task: main
18895 Sep 22 23:14:27.839 INFO Repair listens on 127.0.0.1:0, task: repair
18896 Sep 22 23:14:27.839 INFO listening on 127.0.0.1:0, task: main
18897 Sep 22 23:14:27.839 INFO [0] connecting to 127.0.0.1:52814, looper: 0
18898 Sep 22 23:14:27.839 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60859, task: repair
18899 Sep 22 23:14:27.839 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60859, task: repair
18900 Sep 22 23:14:27.839 INFO listening, local_addr: 127.0.0.1:60859, task: repair
18901 Sep 22 23:14:27.839 INFO [1] connecting to 127.0.0.1:39989, looper: 1
18902 Sep 22 23:14:27.839 DEBG Read :1000 deps:[] res:true
18903 Sep 22 23:14:27.839 INFO [2] connecting to 127.0.0.1:52387, looper: 2
18904 Sep 22 23:14:27.839 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60859, task: repair
18905 Sep 22 23:14:27.839 INFO Using repair address: 127.0.0.1:60859, task: main
18906 Sep 22 23:14:27.839 INFO up_listen starts, task: up_listen
18907 Sep 22 23:14:27.839 INFO No SSL acceptor configured, task: main
18908 Sep 22 23:14:27.839 INFO Wait for all three downstairs to come online
18909 Sep 22 23:14:27.839 INFO Flush timeout: 0.5
18910 Sep 22 23:14:27.839 INFO current number of open files limit 65536 is already the maximum
18911 Sep 22 23:14:27.839 INFO accepted connection from 127.0.0.1:50594, task: main
18912 Sep 22 23:14:27.839 INFO Created new region file "/tmp/downstairs-Y7vkRtTl/region.json"
18913 Sep 22 23:14:27.839 INFO accepted connection from 127.0.0.1:37331, task: main
18914 Sep 22 23:14:27.839 INFO accepted connection from 127.0.0.1:36073, task: main
18915 Sep 22 23:14:27.839 INFO [0] 6ec3fe13-11cc-4835-9539-e92f50952191 looper connected, looper: 0
18916 Sep 22 23:14:27.840 INFO [0] Proc runs for 127.0.0.1:52814 in state New
18917 Sep 22 23:14:27.840 INFO [1] 6ec3fe13-11cc-4835-9539-e92f50952191 looper connected, looper: 1
18918 Sep 22 23:14:27.840 INFO [1] Proc runs for 127.0.0.1:39989 in state New
18919 Sep 22 23:14:27.840 INFO [2] 6ec3fe13-11cc-4835-9539-e92f50952191 looper connected, looper: 2
18920 test test::integration_test_two_layers_write_unwritten ... ok
18921 Sep 22 23:14:27.840 INFO [2] Proc runs for 127.0.0.1:52387 in state New
18922 test test::integration_test_upstairs_read_only_rejects_write ... ok
18923 Sep 22 23:14:27.840 INFO current number of open files limit 65536 is already the maximum
18924 Sep 22 23:14:27.840 INFO current number of open files limit 65536 is already the maximum
18925 Sep 22 23:14:27.840 INFO Created new region file "/tmp/downstairs-vJkzDs9L/region.json"
18926 Sep 22 23:14:27.840 INFO Created new region file "/tmp/downstairs-NPgDrsYr/region.json"
18927 Sep 22 23:14:27.840 INFO current number of open files limit 65536 is already the maximum
18928 Sep 22 23:14:27.840 INFO Created new region file "/tmp/downstairs-rxcAS3Gc/region.json"
18929 Sep 22 23:14:27.843 INFO current number of open files limit 65536 is already the maximum
18930 Sep 22 23:14:27.843 INFO Opened existing region file "/tmp/downstairs-Y7vkRtTl/region.json"
18931 Sep 22 23:14:27.843 INFO Database read version 1
18932 Sep 22 23:14:27.843 INFO Database write version 1
18933 Sep 22 23:14:27.843 DEBG [0] Read AckReady 1000, : downstairs
18934 Sep 22 23:14:27.844 INFO UUID: 7c134907-732f-4421-b0bd-357e07862d5c
18935 Sep 22 23:14:27.844 INFO Blocks per extent:5 Total Extents: 2
18936 Sep 22 23:14:27.844 INFO Crucible Version: Crucible Version: 0.0.1
18937 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
18938 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
18939 rustc: 1.70.0 stable x86_64-unknown-illumos
18940 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
18941 Sep 22 23:14:27.844 INFO Upstairs <-> Downstairs Message Version: 4, task: main
18942 Sep 22 23:14:27.844 INFO Using address: 127.0.0.1:42061, task: main
18943 Sep 22 23:14:27.845 INFO current number of open files limit 65536 is already the maximum
18944 Sep 22 23:14:27.845 INFO Opened existing region file "/tmp/downstairs-NPgDrsYr/region.json"
18945 Sep 22 23:14:27.845 INFO Database read version 1
18946 Sep 22 23:14:27.845 INFO Repair listens on 127.0.0.1:0, task: repair
18947 Sep 22 23:14:27.845 INFO Database write version 1
18948 Sep 22 23:14:27.845 INFO current number of open files limit 65536 is already the maximum
18949 Sep 22 23:14:27.845 INFO current number of open files limit 65536 is already the maximum
18950 Sep 22 23:14:27.845 INFO Opened existing region file "/tmp/downstairs-rxcAS3Gc/region.json"
18951 Sep 22 23:14:27.845 INFO Opened existing region file "/tmp/downstairs-vJkzDs9L/region.json"
18952 Sep 22 23:14:27.845 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38359, task: repair
18953 Sep 22 23:14:27.845 INFO Database read version 1
18954 Sep 22 23:14:27.845 INFO Database read version 1
18955 Sep 22 23:14:27.845 INFO Database write version 1
18956 Sep 22 23:14:27.845 INFO Database write version 1
18957 Sep 22 23:14:27.845 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38359, task: repair
18958 Sep 22 23:14:27.845 INFO listening, local_addr: 127.0.0.1:38359, task: repair
18959 Sep 22 23:14:27.845 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38359, task: repair
18960 Sep 22 23:14:27.845 INFO Using repair address: 127.0.0.1:38359, task: main
18961 Sep 22 23:14:27.845 INFO No SSL acceptor configured, task: main
18962 Sep 22 23:14:27.846 DEBG [1] Read already AckReady 1000, : downstairs
18963 Sep 22 23:14:27.846 INFO Upstairs starts
18964 Sep 22 23:14:27.847 INFO Crucible Version: BuildInfo {
18965 version: "0.0.1",
18966 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
18967 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
18968 git_branch: "main",
18969 rustc_semver: "1.70.0",
18970 rustc_channel: "stable",
18971 rustc_host_triple: "x86_64-unknown-illumos",
18972 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
18973 cargo_triple: "x86_64-unknown-illumos",
18974 debug: true,
18975 opt_level: 0,
18976 }
18977 Sep 22 23:14:27.847 INFO Upstairs <-> Downstairs Message Version: 4
18978 Sep 22 23:14:27.847 INFO Crucible stats registered with UUID: 63bdb731-3b56-40e2-821a-221152f1277d
18979 Sep 22 23:14:27.847 INFO Crucible 63bdb731-3b56-40e2-821a-221152f1277d has session id: 230ff5c2-9acf-42b9-b693-e86cdd9897cd
18980 Sep 22 23:14:27.847 INFO listening on 127.0.0.1:0, task: main
18981 Sep 22 23:14:27.847 INFO listening on 127.0.0.1:0, task: main
18982 Sep 22 23:14:27.847 INFO listening on 127.0.0.1:0, task: main
18983 Sep 22 23:14:27.847 INFO [0] connecting to 127.0.0.1:43804, looper: 0
18984 Sep 22 23:14:27.847 INFO [1] connecting to 127.0.0.1:60915, looper: 1
18985 Sep 22 23:14:27.847 INFO [2] connecting to 127.0.0.1:42061, looper: 2
18986 Sep 22 23:14:27.847 INFO up_listen starts, task: up_listen
18987 Sep 22 23:14:27.847 INFO Wait for all three downstairs to come online
18988 Sep 22 23:14:27.847 INFO Flush timeout: 0.5
18989 Sep 22 23:14:27.848 INFO accepted connection from 127.0.0.1:63241, task: main
18990 Sep 22 23:14:27.848 INFO accepted connection from 127.0.0.1:55431, task: main
18991 Sep 22 23:14:27.848 INFO accepted connection from 127.0.0.1:35399, task: main
18992 Sep 22 23:14:27.848 INFO [0] 63bdb731-3b56-40e2-821a-221152f1277d looper connected, looper: 0
18993 Sep 22 23:14:27.848 INFO [0] Proc runs for 127.0.0.1:43804 in state New
18994 Sep 22 23:14:27.848 INFO UUID: 8e0311d9-6cee-4187-8ec5-c1067f73eb45
18995 Sep 22 23:14:27.848 INFO [1] 63bdb731-3b56-40e2-821a-221152f1277d looper connected, looper: 1
18996 Sep 22 23:14:27.848 INFO UUID: 81b401c7-cdba-4739-be96-9800dad0d240
18997 Sep 22 23:14:27.848 INFO UUID: 30596aa6-8959-4f8c-98b8-ac91fdc30fc1
18998 Sep 22 23:14:27.848 INFO Blocks per extent:5 Total Extents: 2
18999 Sep 22 23:14:27.848 INFO Blocks per extent:5 Total Extents: 2
19000 Sep 22 23:14:27.848 INFO [1] Proc runs for 127.0.0.1:60915 in state New
19001 Sep 22 23:14:27.848 DEBG [2] Read already AckReady 1000, : downstairs
19002 Sep 22 23:14:27.848 INFO Crucible Version: Crucible Version: 0.0.1
19003 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19004 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19005 rustc: 1.70.0 stable x86_64-unknown-illumos
19006 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19007 Sep 22 23:14:27.848 INFO Blocks per extent:5 Total Extents: 2
19008 Sep 22 23:14:27.848 INFO Crucible Version: Crucible Version: 0.0.1
19009 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19010 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19011 rustc: 1.70.0 stable x86_64-unknown-illumos
19012 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19013 Sep 22 23:14:27.848 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19014 Sep 22 23:14:27.848 INFO Using address: 127.0.0.1:52905, task: main
19015 Sep 22 23:14:27.848 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19016 Sep 22 23:14:27.848 INFO Using address: 127.0.0.1:47986, task: main
19017 Sep 22 23:14:27.848 DEBG up_ds_listen was notified
19018 Sep 22 23:14:27.848 INFO [2] 63bdb731-3b56-40e2-821a-221152f1277d looper connected, looper: 2
19019 Sep 22 23:14:27.848 DEBG up_ds_listen process 1000
19020 Sep 22 23:14:27.848 INFO Crucible Version: Crucible Version: 0.0.1
19021 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19022 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19023 rustc: 1.70.0 stable x86_64-unknown-illumos
19024 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19025 Sep 22 23:14:27.848 INFO [2] Proc runs for 127.0.0.1:42061 in state New
19026 Sep 22 23:14:27.848 DEBG [A] ack job 1000:1, : downstairs
19027 Sep 22 23:14:27.848 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19028 Sep 22 23:14:27.848 INFO Using address: 127.0.0.1:45648, task: main
19029 Sep 22 23:14:27.849 INFO Connection request from 63bdb731-3b56-40e2-821a-221152f1277d with version 4, task: proc
19030 Sep 22 23:14:27.849 INFO upstairs UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } connected, version 4, task: proc
19031 Sep 22 23:14:27.849 INFO Repair listens on 127.0.0.1:0, task: repair
19032 Sep 22 23:14:27.849 DEBG up_ds_listen checked 1 jobs, back to waiting
19033 Sep 22 23:14:27.849 INFO Connection request from 63bdb731-3b56-40e2-821a-221152f1277d with version 4, task: proc
19034 Sep 22 23:14:27.849 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57508, task: repair
19035 Sep 22 23:14:27.849 INFO upstairs UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } connected, version 4, task: proc
19036 Sep 22 23:14:27.849 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57508, task: repair
19037 Sep 22 23:14:27.849 INFO Repair listens on 127.0.0.1:0, task: repair
19038 Sep 22 23:14:27.849 INFO listening, local_addr: 127.0.0.1:57508, task: repair
19039 Sep 22 23:14:27.849 INFO Connection request from 63bdb731-3b56-40e2-821a-221152f1277d with version 4, task: proc
19040 Sep 22 23:14:27.849 INFO upstairs UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } connected, version 4, task: proc
19041 Sep 22 23:14:27.849 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36948, task: repair
19042 Sep 22 23:14:27.849 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57508, task: repair
19043 Sep 22 23:14:27.849 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36948, task: repair
19044 Sep 22 23:14:27.849 INFO Using repair address: 127.0.0.1:57508, task: main
19045 Sep 22 23:14:27.849 INFO No SSL acceptor configured, task: main
19046 Sep 22 23:14:27.849 INFO Repair listens on 127.0.0.1:0, task: repair
19047 Sep 22 23:14:27.849 INFO listening, local_addr: 127.0.0.1:36948, task: repair
19048 The guest has requested activation
19049 Sep 22 23:14:27.849 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58541, task: repair
19050 Sep 22 23:14:27.849 INFO current number of open files limit 65536 is already the maximum
19051 Sep 22 23:14:27.849 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58541, task: repair
19052 Sep 22 23:14:27.849 INFO listening, local_addr: 127.0.0.1:58541, task: repair
19053 Sep 22 23:14:27.849 INFO [0] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) New New New ds_transition to WaitActive
19054 Sep 22 23:14:27.849 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36948, task: repair
19055 Sep 22 23:14:27.849 INFO [0] Transition from New to WaitActive
19056 Sep 22 23:14:27.849 INFO Created new region file "/tmp/downstairs-b4bTiF9L/region.json"
19057 Sep 22 23:14:27.849 INFO Using repair address: 127.0.0.1:36948, task: main
19058 Sep 22 23:14:27.849 INFO No SSL acceptor configured, task: main
19059 Sep 22 23:14:27.849 INFO [1] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) WaitActive New New ds_transition to WaitActive
19060 Sep 22 23:14:27.849 INFO [1] Transition from New to WaitActive
19061 Sep 22 23:14:27.849 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58541, task: repair
19062 Sep 22 23:14:27.849 INFO Using repair address: 127.0.0.1:58541, task: main
19063 Sep 22 23:14:27.849 INFO [2] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) WaitActive WaitActive New ds_transition to WaitActive
19064 Sep 22 23:14:27.849 DEBG IO Flush 1001 has deps [JobId(1000)]
19065 Sep 22 23:14:27.849 INFO No SSL acceptor configured, task: main
19066 Sep 22 23:14:27.850 INFO [2] Transition from New to WaitActive
19067 Sep 22 23:14:27.850 INFO 63bdb731-3b56-40e2-821a-221152f1277d active request set
19068 Sep 22 23:14:27.850 INFO [0] received activate with gen 1
19069 Sep 22 23:14:27.850 INFO [0] client got ds_active_rx, promote! session 089624e5-8c38-462d-b605-f332f3805c9d
19070 Sep 22 23:14:27.850 INFO current number of open files limit 65536 is already the maximum
19071 Sep 22 23:14:27.850 INFO Connection request from 6ec3fe13-11cc-4835-9539-e92f50952191 with version 4, task: proc
19072 Sep 22 23:14:27.850 INFO upstairs UpstairsConnection { upstairs_id: 6ec3fe13-11cc-4835-9539-e92f50952191, session_id: 2adb6f46-b730-4b58-84d3-87af2b7f60e0, gen: 1 } connected, version 4, task: proc
19073 Sep 22 23:14:27.850 INFO [1] received activate with gen 1
19074 Sep 22 23:14:27.850 INFO [1] client got ds_active_rx, promote! session 089624e5-8c38-462d-b605-f332f3805c9d
19075 Sep 22 23:14:27.850 INFO Created new region file "/tmp/downstairs-zarv4usP/region.json"
19076 Sep 22 23:14:27.850 INFO Connection request from 6ec3fe13-11cc-4835-9539-e92f50952191 with version 4, task: proc
19077 Sep 22 23:14:27.850 INFO upstairs UpstairsConnection { upstairs_id: 6ec3fe13-11cc-4835-9539-e92f50952191, session_id: 2adb6f46-b730-4b58-84d3-87af2b7f60e0, gen: 1 } connected, version 4, task: proc
19078 Sep 22 23:14:27.850 INFO [2] received activate with gen 1
19079 Sep 22 23:14:27.850 INFO [2] client got ds_active_rx, promote! session 089624e5-8c38-462d-b605-f332f3805c9d
19080 Sep 22 23:14:27.850 INFO Connection request from 6ec3fe13-11cc-4835-9539-e92f50952191 with version 4, task: proc
19081 Sep 22 23:14:27.850 INFO upstairs UpstairsConnection { upstairs_id: 6ec3fe13-11cc-4835-9539-e92f50952191, session_id: 2adb6f46-b730-4b58-84d3-87af2b7f60e0, gen: 1 } connected, version 4, task: proc
19082 Sep 22 23:14:27.850 INFO UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } is now active (read-write)
19083 Sep 22 23:14:27.850 INFO listening on 127.0.0.1:0, task: main
19084 Sep 22 23:14:27.850 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19085 Sep 22 23:14:27.850 WARN 6ec3fe13-11cc-4835-9539-e92f50952191 request to replace downstairs 127.0.0.1:52814 with 127.0.0.1:45648
19086 Sep 22 23:14:27.850 INFO UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } is now active (read-write)
19087 Sep 22 23:14:27.850 INFO 6ec3fe13-11cc-4835-9539-e92f50952191 found old target: 127.0.0.1:52814 at 0
19088 Sep 22 23:14:27.850 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19089 Sep 22 23:14:27.850 INFO 6ec3fe13-11cc-4835-9539-e92f50952191 replacing old: 127.0.0.1:52814 at 0
19090 Sep 22 23:14:27.850 INFO [0] client skip 0 in process jobs because fault, : downstairs
19091 Sep 22 23:14:27.850 INFO [0] changed 0 jobs to fault skipped, : downstairs
19092 Sep 22 23:14:27.850 INFO [0] 6ec3fe13-11cc-4835-9539-e92f50952191 (2adb6f46-b730-4b58-84d3-87af2b7f60e0) New New New ds_transition to Replacing
19093 Sep 22 23:14:27.850 INFO UpstairsConnection { upstairs_id: 63bdb731-3b56-40e2-821a-221152f1277d, session_id: 089624e5-8c38-462d-b605-f332f3805c9d, gen: 1 } is now active (read-write)
19094 Sep 22 23:14:27.850 INFO [0] Transition from New to Replacing
19095 Sep 22 23:14:27.850 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:3
19096 Sep 22 23:14:27.851 DEBG up_ds_listen was notified
19097 Sep 22 23:14:27.851 DEBG up_ds_listen process 1001
19098 Sep 22 23:14:27.851 DEBG [A] ack job 1001:2, : downstairs
19099 Sep 22 23:14:27.851 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
19100 Sep 22 23:14:27.851 INFO [0] downstairs client at 127.0.0.1:43804 has UUID c0ea6320-dc19-4d5a-bcd9-72a0429ad856
19101 Sep 22 23:14:27.851 DEBG up_ds_listen checked 1 jobs, back to waiting
19102 Sep 22 23:14:27.851 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c0ea6320-dc19-4d5a-bcd9-72a0429ad856, encrypted: true, database_read_version: 1, database_write_version: 1 }
19103 Sep 22 23:14:27.851 INFO 63bdb731-3b56-40e2-821a-221152f1277d WaitActive WaitActive WaitActive
19104 Sep 22 23:14:27.851 INFO [1] downstairs client at 127.0.0.1:60915 has UUID e00f36ef-b7ad-4a98-871a-5cea6209adc6
19105 Sep 22 23:14:27.851 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e00f36ef-b7ad-4a98-871a-5cea6209adc6, encrypted: true, database_read_version: 1, database_write_version: 1 }
19106 Sep 22 23:14:27.851 INFO 63bdb731-3b56-40e2-821a-221152f1277d WaitActive WaitActive WaitActive
19107 Sep 22 23:14:27.851 INFO [2] downstairs client at 127.0.0.1:42061 has UUID 7c134907-732f-4421-b0bd-357e07862d5c
19108 Sep 22 23:14:27.851 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7c134907-732f-4421-b0bd-357e07862d5c, encrypted: true, database_read_version: 1, database_write_version: 1 }
19109 Sep 22 23:14:27.851 INFO 63bdb731-3b56-40e2-821a-221152f1277d WaitActive WaitActive WaitActive
19110 Sep 22 23:14:27.851 DEBG IO Write 1002 has deps []
19111 Sep 22 23:14:27.851 DEBG up_ds_listen was notified
19112 Sep 22 23:14:27.851 DEBG up_ds_listen process 1002
19113 Sep 22 23:14:27.852 DEBG [A] ack job 1002:3, : downstairs
19114 Sep 22 23:14:27.852 DEBG up_ds_listen checked 1 jobs, back to waiting
19115 Sep 22 23:14:27.852 INFO Current flush_numbers [0..12]: [0, 0]
19116 Sep 22 23:14:27.853 INFO Downstairs has completed Negotiation, task: proc
19117 Sep 22 23:14:27.853 DEBG Write :1002 deps:[] res:true
19118 Sep 22 23:14:27.853 INFO current number of open files limit 65536 is already the maximum
19119 Sep 22 23:14:27.853 INFO Opened existing region file "/tmp/downstairs-b4bTiF9L/region.json"
19120 Sep 22 23:14:27.853 INFO Database read version 1
19121 Sep 22 23:14:27.853 INFO Database write version 1
19122 Sep 22 23:14:27.853 INFO Current flush_numbers [0..12]: [0, 0]
19123 Sep 22 23:14:27.853 DEBG Write :1002 deps:[] res:true
19124 Sep 22 23:14:27.854 INFO Downstairs has completed Negotiation, task: proc
19125 Sep 22 23:14:27.854 INFO Current flush_numbers [0..12]: [0, 0]
19126 Sep 22 23:14:27.855 DEBG Write :1002 deps:[] res:true
19127 Sep 22 23:14:27.855 INFO Downstairs has completed Negotiation, task: proc
19128 test test::integration_test_volume_inactive_replace_downstairs ... ok
19129 Sep 22 23:14:27.855 INFO UUID: 05b93ff5-c936-4fa6-adb6-db93f2642835
19130 Sep 22 23:14:27.855 INFO Blocks per extent:5 Total Extents: 2
19131 Sep 22 23:14:27.855 INFO Crucible Version: Crucible Version: 0.0.1
19132 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19133 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19134 rustc: 1.70.0 stable x86_64-unknown-illumos
19135 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19136 Sep 22 23:14:27.855 INFO current number of open files limit 65536 is already the maximum
19137 Sep 22 23:14:27.855 INFO [0] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19138 Sep 22 23:14:27.855 INFO [0] Transition from WaitActive to WaitQuorum
19139 Sep 22 23:14:27.855 INFO Opened existing region file "/tmp/downstairs-zarv4usP/region.json"
19140 Sep 22 23:14:27.855 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19141 Sep 22 23:14:27.855 INFO Database read version 1
19142 Sep 22 23:14:27.855 WARN [0] new RM replaced this: None
19143 Sep 22 23:14:27.855 INFO Using address: 127.0.0.1:48071, task: main
19144 Sep 22 23:14:27.855 INFO Database write version 1
19145 Sep 22 23:14:27.855 INFO [0] Starts reconcile loop
19146 Sep 22 23:14:27.855 INFO current number of open files limit 65536 is already the maximum
19147 Sep 22 23:14:27.855 DEBG IO Read 1003 has deps [JobId(1002)]
19148 Sep 22 23:14:27.855 INFO Created new region file "/tmp/downstairs-WRx1oCsz/region.json"
19149 Sep 22 23:14:27.855 INFO [1] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19150 Sep 22 23:14:27.855 INFO [1] Transition from WaitActive to WaitQuorum
19151 Sep 22 23:14:27.855 WARN [1] new RM replaced this: None
19152 Sep 22 23:14:27.855 INFO [1] Starts reconcile loop
19153 Sep 22 23:14:27.855 INFO [2] 63bdb731-3b56-40e2-821a-221152f1277d (089624e5-8c38-462d-b605-f332f3805c9d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19154 Sep 22 23:14:27.856 INFO [2] Transition from WaitActive to WaitQuorum
19155 Sep 22 23:14:27.856 WARN [2] new RM replaced this: None
19156 Sep 22 23:14:27.856 INFO [2] Starts reconcile loop
19157 Sep 22 23:14:27.856 INFO Repair listens on 127.0.0.1:0, task: repair
19158 Sep 22 23:14:27.856 INFO [0] 127.0.0.1:43804 task reports connection:true
19159 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d WaitQuorum WaitQuorum WaitQuorum
19160 Sep 22 23:14:27.856 INFO [0]R flush_numbers: [0, 0]
19161 Sep 22 23:14:27.856 INFO [0]R generation: [0, 0]
19162 Sep 22 23:14:27.856 INFO [0]R dirty: [false, false]
19163 Sep 22 23:14:27.856 INFO [1]R flush_numbers: [0, 0]
19164 Sep 22 23:14:27.856 INFO [1]R generation: [0, 0]
19165 Sep 22 23:14:27.856 INFO [1]R dirty: [false, false]
19166 Sep 22 23:14:27.856 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38532, task: repair
19167 Sep 22 23:14:27.856 INFO [2]R flush_numbers: [0, 0]
19168 Sep 22 23:14:27.856 INFO [2]R generation: [0, 0]
19169 Sep 22 23:14:27.856 INFO [2]R dirty: [false, false]
19170 Sep 22 23:14:27.856 INFO Max found gen is 1
19171 Sep 22 23:14:27.856 INFO Generation requested: 1 >= found:1
19172 Sep 22 23:14:27.856 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38532, task: repair
19173 Sep 22 23:14:27.856 INFO Next flush: 1
19174 Sep 22 23:14:27.856 INFO All extents match
19175 Sep 22 23:14:27.856 INFO No downstairs repair required
19176 Sep 22 23:14:27.856 INFO No initial repair work was required
19177 Sep 22 23:14:27.856 INFO listening, local_addr: 127.0.0.1:38532, task: repair
19178 Sep 22 23:14:27.856 INFO Set Downstairs and Upstairs active
19179 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d is now active with session: 089624e5-8c38-462d-b605-f332f3805c9d
19180 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d Set Active after no repair
19181 Sep 22 23:14:27.856 INFO Notify all downstairs, region set compare is done.
19182 Sep 22 23:14:27.856 INFO Set check for repair
19183 Sep 22 23:14:27.856 INFO [1] 127.0.0.1:60915 task reports connection:true
19184 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d Active Active Active
19185 Sep 22 23:14:27.856 INFO Set check for repair
19186 Sep 22 23:14:27.856 INFO [2] 127.0.0.1:42061 task reports connection:true
19187 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d Active Active Active
19188 Sep 22 23:14:27.856 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38532, task: repair
19189 Sep 22 23:14:27.856 INFO Set check for repair
19190 Sep 22 23:14:27.856 INFO Using repair address: 127.0.0.1:38532, task: main
19191 Sep 22 23:14:27.856 INFO [0] received reconcile message
19192 Sep 22 23:14:27.856 INFO No SSL acceptor configured, task: main
19193 Sep 22 23:14:27.856 INFO [0] All repairs completed, exit
19194 Sep 22 23:14:27.856 INFO [0] Starts cmd_loop
19195 Sep 22 23:14:27.856 INFO [1] received reconcile message
19196 Sep 22 23:14:27.856 INFO [1] All repairs completed, exit
19197 Sep 22 23:14:27.856 INFO [1] Starts cmd_loop
19198 Sep 22 23:14:27.856 INFO UUID: ff3ebd3a-c67d-435f-8151-d080a804dbf5
19199 Sep 22 23:14:27.856 INFO [2] received reconcile message
19200 Sep 22 23:14:27.856 INFO Blocks per extent:5 Total Extents: 2
19201 Sep 22 23:14:27.856 DEBG Read :1003 deps:[JobId(1002)] res:true
19202 Sep 22 23:14:27.856 INFO [2] All repairs completed, exit
19203 Sep 22 23:14:27.856 INFO [2] Starts cmd_loop
19204 Sep 22 23:14:27.856 INFO current number of open files limit 65536 is already the maximum
19205 Sep 22 23:14:27.856 INFO Crucible Version: Crucible Version: 0.0.1
19206 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19207 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19208 rustc: 1.70.0 stable x86_64-unknown-illumos
19209 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19210 The guest has finished waiting for activation
19211 Sep 22 23:14:27.856 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19212 Sep 22 23:14:27.856 INFO Using address: 127.0.0.1:43716, task: main
19213 Sep 22 23:14:27.856 INFO Created new region file "/tmp/downstairs-2VFKgxCo/region.json"
19214 Sep 22 23:14:27.856 WARN 63bdb731-3b56-40e2-821a-221152f1277d request to replace downstairs 127.0.0.1:43804 with 127.0.0.1:60915
19215 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d found old target: 127.0.0.1:43804 at 0
19216 Sep 22 23:14:27.856 INFO 63bdb731-3b56-40e2-821a-221152f1277d found new target: 127.0.0.1:60915 at 1
19217 Sep 22 23:14:27.856 DEBG Read :1003 deps:[JobId(1002)] res:true
19218 Sep 22 23:14:27.857 WARN 63bdb731-3b56-40e2-821a-221152f1277d request to replace downstairs 127.0.0.1:60915 with 127.0.0.1:43804
19219 Sep 22 23:14:27.857 INFO 63bdb731-3b56-40e2-821a-221152f1277d found new target: 127.0.0.1:43804 at 0
19220 Sep 22 23:14:27.857 INFO 63bdb731-3b56-40e2-821a-221152f1277d found old target: 127.0.0.1:60915 at 1
19221 Sep 22 23:14:27.857 INFO Repair listens on 127.0.0.1:0, task: repair
19222 Sep 22 23:14:27.857 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51875, task: repair
19223 Sep 22 23:14:27.857 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51875, task: repair
19224 Sep 22 23:14:27.857 INFO listening, local_addr: 127.0.0.1:51875, task: repair
19225 Sep 22 23:14:27.857 DEBG Read :1003 deps:[JobId(1002)] res:true
19226 Sep 22 23:14:27.857 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51875, task: repair
19227 Sep 22 23:14:27.857 INFO Using repair address: 127.0.0.1:51875, task: main
19228 Sep 22 23:14:27.857 INFO No SSL acceptor configured, task: main
19229 Sep 22 23:14:27.858 INFO current number of open files limit 65536 is already the maximum
19230 Sep 22 23:14:27.858 INFO Created new region file "/tmp/downstairs-Af0iu2U2/region.json"
19231 Sep 22 23:14:27.858 DEBG [0] Read AckReady 1003, : downstairs
19232 Sep 22 23:14:27.859 DEBG [1] Read already AckReady 1003, : downstairs
19233 test test::integration_test_volume_replace_active ... ok
19234 Sep 22 23:14:27.859 DEBG [2] Read already AckReady 1003, : downstairs
19235 Sep 22 23:14:27.859 DEBG up_ds_listen was notified
19236 Sep 22 23:14:27.859 DEBG up_ds_listen process 1003
19237 Sep 22 23:14:27.860 DEBG [A] ack job 1003:4, : downstairs
19238 Sep 22 23:14:27.860 DEBG up_ds_listen checked 1 jobs, back to waiting
19239 Sep 22 23:14:27.860 INFO current number of open files limit 65536 is already the maximum
19240 Sep 22 23:14:27.860 INFO Created new region file "/tmp/downstairs-jK06RaaJ/region.json"
19241 Sep 22 23:14:27.860 DEBG IO Read 1001 has deps []
19242 Sep 22 23:14:27.861 DEBG Read :1001 deps:[] res:true
19243 Sep 22 23:14:27.861 INFO current number of open files limit 65536 is already the maximum
19244 Sep 22 23:14:27.861 INFO Opened existing region file "/tmp/downstairs-2VFKgxCo/region.json"
19245 Sep 22 23:14:27.861 INFO Database read version 1
19246 Sep 22 23:14:27.861 INFO Database write version 1
19247 Sep 22 23:14:27.861 DEBG Read :1001 deps:[] res:true
19248 Sep 22 23:14:27.861 INFO current number of open files limit 65536 is already the maximum
19249 Sep 22 23:14:27.861 INFO Opened existing region file "/tmp/downstairs-Af0iu2U2/region.json"
19250 Sep 22 23:14:27.861 INFO Database read version 1
19251 Sep 22 23:14:27.861 INFO Database write version 1
19252 Sep 22 23:14:27.862 DEBG Read :1001 deps:[] res:true
19253 Sep 22 23:14:27.864 DEBG [0] Read AckReady 1001, : downstairs
19254 Sep 22 23:14:27.865 INFO UUID: be9bde0c-bb2e-4b52-8e82-23af310505da
19255 Sep 22 23:14:27.865 INFO Blocks per extent:5 Total Extents: 2
19256 Sep 22 23:14:27.865 INFO Crucible Version: Crucible Version: 0.0.1
19257 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19258 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19259 rustc: 1.70.0 stable x86_64-unknown-illumos
19260 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19261 Sep 22 23:14:27.865 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19262 Sep 22 23:14:27.865 INFO Using address: 127.0.0.1:46338, task: main
19263 Sep 22 23:14:27.865 INFO UUID: 36e095b9-e22f-4b83-be94-6325fd4ef84f
19264 Sep 22 23:14:27.865 INFO Blocks per extent:5 Total Extents: 2
19265 Sep 22 23:14:27.865 INFO Repair listens on 127.0.0.1:0, task: repair
19266 Sep 22 23:14:27.865 INFO Crucible Version: Crucible Version: 0.0.1
19267 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19268 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19269 rustc: 1.70.0 stable x86_64-unknown-illumos
19270 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19271 Sep 22 23:14:27.865 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19272 Sep 22 23:14:27.865 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:54607, task: repair
19273 Sep 22 23:14:27.865 INFO Using address: 127.0.0.1:64857, task: main
19274 Sep 22 23:14:27.865 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:54607, task: repair
19275 Sep 22 23:14:27.865 INFO listening, local_addr: 127.0.0.1:54607, task: repair
19276 Sep 22 23:14:27.865 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:54607, task: repair
19277 Sep 22 23:14:27.866 INFO current number of open files limit 65536 is already the maximum
19278 Sep 22 23:14:27.866 INFO Using repair address: 127.0.0.1:54607, task: main
19279 Sep 22 23:14:27.866 INFO No SSL acceptor configured, task: main
19280 Sep 22 23:14:27.866 INFO Opened existing region file "/tmp/downstairs-jK06RaaJ/region.json"
19281 Sep 22 23:14:27.866 INFO Database read version 1
19282 Sep 22 23:14:27.866 INFO Repair listens on 127.0.0.1:0, task: repair
19283 Sep 22 23:14:27.866 INFO Database write version 1
19284 Sep 22 23:14:27.866 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57115, task: repair
19285 Sep 22 23:14:27.866 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57115, task: repair
19286 Sep 22 23:14:27.866 INFO listening, local_addr: 127.0.0.1:57115, task: repair
19287 Sep 22 23:14:27.866 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57115, task: repair
19288 Sep 22 23:14:27.866 INFO Using repair address: 127.0.0.1:57115, task: main
19289 Sep 22 23:14:27.866 INFO No SSL acceptor configured, task: main
19290 Sep 22 23:14:27.866 DEBG [1] Read already AckReady 1001, : downstairs
19291 Sep 22 23:14:27.866 INFO Upstairs starts
19292 Sep 22 23:14:27.866 INFO Crucible Version: BuildInfo {
19293 version: "0.0.1",
19294 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19295 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19296 git_branch: "main",
19297 rustc_semver: "1.70.0",
19298 rustc_channel: "stable",
19299 rustc_host_triple: "x86_64-unknown-illumos",
19300 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19301 cargo_triple: "x86_64-unknown-illumos",
19302 debug: true,
19303 opt_level: 0,
19304 }
19305 Sep 22 23:14:27.866 INFO Upstairs <-> Downstairs Message Version: 4
19306 Sep 22 23:14:27.866 INFO Crucible stats registered with UUID: e94af85e-3796-4fab-91a1-f12add9c3020
19307 Sep 22 23:14:27.866 INFO Crucible e94af85e-3796-4fab-91a1-f12add9c3020 has session id: 61e725e9-9021-4fae-9e9d-a7be284a2c57
19308 Sep 22 23:14:27.866 INFO listening on 127.0.0.1:0, task: main
19309 Sep 22 23:14:27.866 INFO listening on 127.0.0.1:0, task: main
19310 Sep 22 23:14:27.867 INFO listening on 127.0.0.1:0, task: main
19311 Sep 22 23:14:27.867 INFO Upstairs starts
19312 Sep 22 23:14:27.867 INFO Crucible Version: BuildInfo {
19313 version: "0.0.1",
19314 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19315 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19316 git_branch: "main",
19317 rustc_semver: "1.70.0",
19318 rustc_channel: "stable",
19319 rustc_host_triple: "x86_64-unknown-illumos",
19320 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19321 cargo_triple: "x86_64-unknown-illumos",
19322 debug: true,
19323 opt_level: 0,
19324 }
19325 Sep 22 23:14:27.867 INFO [0] connecting to 127.0.0.1:52905, looper: 0
19326 Sep 22 23:14:27.867 INFO Upstairs <-> Downstairs Message Version: 4
19327 Sep 22 23:14:27.867 INFO Crucible stats registered with UUID: 63fa212f-4948-4c7a-a14a-d7319688df16
19328 Sep 22 23:14:27.867 INFO Crucible 63fa212f-4948-4c7a-a14a-d7319688df16 has session id: 9994941c-8051-4c2c-870a-bf5e9fb00b99
19329 Sep 22 23:14:27.867 INFO [1] connecting to 127.0.0.1:48071, looper: 1
19330 Sep 22 23:14:27.867 INFO listening on 127.0.0.1:0, task: main
19331 Sep 22 23:14:27.867 INFO [2] connecting to 127.0.0.1:46338, looper: 2
19332 Sep 22 23:14:27.867 INFO listening on 127.0.0.1:0, task: main
19333 Sep 22 23:14:27.867 INFO listening on 127.0.0.1:0, task: main
19334 Sep 22 23:14:27.867 INFO up_listen starts, task: up_listen
19335 Sep 22 23:14:27.867 INFO Wait for all three downstairs to come online
19336 Sep 22 23:14:27.867 INFO Flush timeout: 0.5
19337 Sep 22 23:14:27.867 INFO [0] connecting to 127.0.0.1:47986, looper: 0
19338 Sep 22 23:14:27.867 INFO [1] connecting to 127.0.0.1:43716, looper: 1
19339 Sep 22 23:14:27.867 INFO [2] connecting to 127.0.0.1:64857, looper: 2
19340 Sep 22 23:14:27.867 INFO accepted connection from 127.0.0.1:56581, task: main
19341 Sep 22 23:14:27.867 INFO accepted connection from 127.0.0.1:64564, task: main
19342 Sep 22 23:14:27.867 INFO up_listen starts, task: up_listen
19343 Sep 22 23:14:27.868 INFO Wait for all three downstairs to come online
19344 Sep 22 23:14:27.868 INFO Flush timeout: 0.5
19345 Sep 22 23:14:27.868 INFO accepted connection from 127.0.0.1:45396, task: main
19346 Sep 22 23:14:27.868 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 looper connected, looper: 0
19347 Sep 22 23:14:27.868 INFO [0] Proc runs for 127.0.0.1:52905 in state New
19348 Sep 22 23:14:27.868 INFO [1] e94af85e-3796-4fab-91a1-f12add9c3020 looper connected, looper: 1
19349 Sep 22 23:14:27.868 INFO accepted connection from 127.0.0.1:59696, task: main
19350 Sep 22 23:14:27.868 INFO [1] Proc runs for 127.0.0.1:48071 in state New
19351 Sep 22 23:14:27.868 INFO accepted connection from 127.0.0.1:36772, task: main
19352 Sep 22 23:14:27.868 INFO [2] e94af85e-3796-4fab-91a1-f12add9c3020 looper connected, looper: 2
19353 Sep 22 23:14:27.868 INFO accepted connection from 127.0.0.1:50348, task: main
19354 Sep 22 23:14:27.868 INFO [2] Proc runs for 127.0.0.1:46338 in state New
19355 Sep 22 23:14:27.868 INFO UUID: b78cfd9a-1100-4037-aec9-2c2cabfc0ce6
19356 Sep 22 23:14:27.868 INFO Blocks per extent:5 Total Extents: 2
19357 Sep 22 23:14:27.868 INFO [0] 63fa212f-4948-4c7a-a14a-d7319688df16 looper connected, looper: 0
19358 Sep 22 23:14:27.868 INFO [0] Proc runs for 127.0.0.1:47986 in state New
19359 Sep 22 23:14:27.868 INFO Crucible Version: Crucible Version: 0.0.1
19360 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19361 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19362 rustc: 1.70.0 stable x86_64-unknown-illumos
19363 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19364 Sep 22 23:14:27.868 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19365 Sep 22 23:14:27.868 INFO Using address: 127.0.0.1:53467, task: main
19366 Sep 22 23:14:27.868 INFO [1] 63fa212f-4948-4c7a-a14a-d7319688df16 looper connected, looper: 1
19367 Sep 22 23:14:27.868 INFO [1] Proc runs for 127.0.0.1:43716 in state New
19368 Sep 22 23:14:27.868 DEBG [2] Read already AckReady 1001, : downstairs
19369 Sep 22 23:14:27.868 DEBG up_ds_listen was notified
19370 Sep 22 23:14:27.868 INFO [2] 63fa212f-4948-4c7a-a14a-d7319688df16 looper connected, looper: 2
19371 Sep 22 23:14:27.868 INFO Connection request from e94af85e-3796-4fab-91a1-f12add9c3020 with version 4, task: proc
19372 Sep 22 23:14:27.868 DEBG up_ds_listen process 1001
19373 Sep 22 23:14:27.868 DEBG [A] ack job 1001:2, : downstairs
19374 Sep 22 23:14:27.868 INFO upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } connected, version 4, task: proc
19375 Sep 22 23:14:27.868 INFO [2] Proc runs for 127.0.0.1:64857 in state New
19376 Sep 22 23:14:27.868 INFO Connection request from e94af85e-3796-4fab-91a1-f12add9c3020 with version 4, task: proc
19377 Sep 22 23:14:27.868 INFO upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } connected, version 4, task: proc
19378 Sep 22 23:14:27.869 INFO Connection request from e94af85e-3796-4fab-91a1-f12add9c3020 with version 4, task: proc
19379 Sep 22 23:14:27.869 INFO Repair listens on 127.0.0.1:0, task: repair
19380 Sep 22 23:14:27.869 INFO upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } connected, version 4, task: proc
19381 Sep 22 23:14:27.869 INFO Connection request from 63fa212f-4948-4c7a-a14a-d7319688df16 with version 4, task: proc
19382 Sep 22 23:14:27.869 DEBG up_ds_listen checked 1 jobs, back to waiting
19383 Sep 22 23:14:27.869 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35868, task: repair
19384 Sep 22 23:14:27.869 INFO upstairs UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } connected, version 4, task: proc
19385 Sep 22 23:14:27.869 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35868, task: repair
19386 The guest has requested activation
19387 Sep 22 23:14:27.869 INFO listening, local_addr: 127.0.0.1:35868, task: repair
19388 Sep 22 23:14:27.869 INFO Connection request from 63fa212f-4948-4c7a-a14a-d7319688df16 with version 4, task: proc
19389 Sep 22 23:14:27.869 INFO upstairs UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } connected, version 4, task: proc
19390 Sep 22 23:14:27.869 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) New New New ds_transition to WaitActive
19391 Sep 22 23:14:27.869 INFO [0] Transition from New to WaitActive
19392 Sep 22 23:14:27.869 INFO Connection request from 63fa212f-4948-4c7a-a14a-d7319688df16 with version 4, task: proc
19393 Sep 22 23:14:27.869 INFO upstairs UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } connected, version 4, task: proc
19394 Sep 22 23:14:27.869 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35868, task: repair
19395 Sep 22 23:14:27.869 INFO [1] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) WaitActive New New ds_transition to WaitActive
19396 Sep 22 23:14:27.869 INFO [1] Transition from New to WaitActive
19397 Sep 22 23:14:27.869 INFO Using repair address: 127.0.0.1:35868, task: main
19398 Sep 22 23:14:27.869 INFO No SSL acceptor configured, task: main
19399 The guest has requested activation
19400 Sep 22 23:14:27.869 INFO [2] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) WaitActive WaitActive New ds_transition to WaitActive
19401 Sep 22 23:14:27.869 INFO [2] Transition from New to WaitActive
19402 Sep 22 23:14:27.869 INFO [0] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) New New New ds_transition to WaitActive
19403 Sep 22 23:14:27.869 INFO e94af85e-3796-4fab-91a1-f12add9c3020 active request set
19404 Sep 22 23:14:27.869 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002)]
19405 Sep 22 23:14:27.869 INFO [0] Transition from New to WaitActive
19406 Sep 22 23:14:27.869 INFO [0] received activate with gen 1
19407 Sep 22 23:14:27.869 INFO [1] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) WaitActive New New ds_transition to WaitActive
19408 Sep 22 23:14:27.869 INFO [0] client got ds_active_rx, promote! session a86ae319-11ad-4b2d-a80b-330efc92ebef
19409 Sep 22 23:14:27.869 INFO [1] Transition from New to WaitActive
19410 Sep 22 23:14:27.869 INFO [1] received activate with gen 1
19411 Sep 22 23:14:27.869 INFO [1] client got ds_active_rx, promote! session a86ae319-11ad-4b2d-a80b-330efc92ebef
19412 Sep 22 23:14:27.869 INFO [2] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) WaitActive WaitActive New ds_transition to WaitActive
19413 Sep 22 23:14:27.869 INFO [2] Transition from New to WaitActive
19414 Sep 22 23:14:27.869 INFO [2] received activate with gen 1
19415 Sep 22 23:14:27.869 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 active request set
19416 Sep 22 23:14:27.869 INFO [2] client got ds_active_rx, promote! session a86ae319-11ad-4b2d-a80b-330efc92ebef
19417 Sep 22 23:14:27.870 INFO [0] received activate with gen 1
19418 Sep 22 23:14:27.870 INFO [0] client got ds_active_rx, promote! session 67fdb17a-86e0-451b-a532-c1ff4695781c
19419 Sep 22 23:14:27.870 INFO current number of open files limit 65536 is already the maximum
19420 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } is now active (read-write)
19421 Sep 22 23:14:27.870 INFO [1] received activate with gen 1
19422 Sep 22 23:14:27.870 INFO [1] client got ds_active_rx, promote! session 67fdb17a-86e0-451b-a532-c1ff4695781c
19423 Sep 22 23:14:27.870 INFO Created new region file "/tmp/downstairs-5ryGVcVK/region.json"
19424 Sep 22 23:14:27.870 INFO [2] received activate with gen 1
19425 Sep 22 23:14:27.870 INFO [2] client got ds_active_rx, promote! session 67fdb17a-86e0-451b-a532-c1ff4695781c
19426 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } is now active (read-write)
19427 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } is now active (read-write)
19428 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } is now active (read-write)
19429 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } is now active (read-write)
19430 Sep 22 23:14:27.870 INFO UpstairsConnection { upstairs_id: 63fa212f-4948-4c7a-a14a-d7319688df16, session_id: 67fdb17a-86e0-451b-a532-c1ff4695781c, gen: 1 } is now active (read-write)
19431 Sep 22 23:14:27.870 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19432 Sep 22 23:14:27.870 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19433 Sep 22 23:14:27.870 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002)] res:true f:2 g:3
19434 Sep 22 23:14:27.870 INFO [0] downstairs client at 127.0.0.1:52905 has UUID 30596aa6-8959-4f8c-98b8-ac91fdc30fc1
19435 Sep 22 23:14:27.870 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 30596aa6-8959-4f8c-98b8-ac91fdc30fc1, encrypted: true, database_read_version: 1, database_write_version: 1 }
19436 Sep 22 23:14:27.871 INFO e94af85e-3796-4fab-91a1-f12add9c3020 WaitActive WaitActive WaitActive
19437 Sep 22 23:14:27.871 DEBG up_ds_listen was notified
19438 Sep 22 23:14:27.871 INFO [1] downstairs client at 127.0.0.1:48071 has UUID 05b93ff5-c936-4fa6-adb6-db93f2642835
19439 Sep 22 23:14:27.871 INFO [0] downstairs client at 127.0.0.1:47986 has UUID 8e0311d9-6cee-4187-8ec5-c1067f73eb45
19440 Sep 22 23:14:27.871 DEBG up_ds_listen process 1004
19441 Sep 22 23:14:27.871 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 05b93ff5-c936-4fa6-adb6-db93f2642835, encrypted: true, database_read_version: 1, database_write_version: 1 }
19442 Sep 22 23:14:27.871 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8e0311d9-6cee-4187-8ec5-c1067f73eb45, encrypted: true, database_read_version: 1, database_write_version: 1 }
19443 Sep 22 23:14:27.871 DEBG [A] ack job 1004:5, : downstairs
19444 Sep 22 23:14:27.871 INFO e94af85e-3796-4fab-91a1-f12add9c3020 WaitActive WaitActive WaitActive
19445 Sep 22 23:14:27.871 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 WaitActive WaitActive WaitActive
19446 Sep 22 23:14:27.871 DEBG [rc] retire 1004 clears [JobId(1002), JobId(1003), JobId(1004)], : downstairs
19447 Sep 22 23:14:27.871 DEBG up_ds_listen checked 1 jobs, back to waiting
19448 Sep 22 23:14:27.871 INFO [1] downstairs client at 127.0.0.1:43716 has UUID ff3ebd3a-c67d-435f-8151-d080a804dbf5
19449 Sep 22 23:14:27.871 INFO [2] downstairs client at 127.0.0.1:46338 has UUID be9bde0c-bb2e-4b52-8e82-23af310505da
19450 Sep 22 23:14:27.871 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ff3ebd3a-c67d-435f-8151-d080a804dbf5, encrypted: true, database_read_version: 1, database_write_version: 1 }
19451 Sep 22 23:14:27.871 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: be9bde0c-bb2e-4b52-8e82-23af310505da, encrypted: true, database_read_version: 1, database_write_version: 1 }
19452 Sep 22 23:14:27.871 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 WaitActive WaitActive WaitActive
19453 Sep 22 23:14:27.871 INFO e94af85e-3796-4fab-91a1-f12add9c3020 WaitActive WaitActive WaitActive
19454 Sep 22 23:14:27.871 INFO [2] downstairs client at 127.0.0.1:64857 has UUID 36e095b9-e22f-4b83-be94-6325fd4ef84f
19455 Sep 22 23:14:27.871 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 36e095b9-e22f-4b83-be94-6325fd4ef84f, encrypted: true, database_read_version: 1, database_write_version: 1 }
19456 Sep 22 23:14:27.871 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 WaitActive WaitActive WaitActive
19457 Sep 22 23:14:27.871 INFO Current flush_numbers [0..12]: [0, 0]
19458 Sep 22 23:14:27.871 INFO Current flush_numbers [0..12]: [0, 0]
19459 Sep 22 23:14:27.871 INFO Downstairs has completed Negotiation, task: proc
19460 Sep 22 23:14:27.872 INFO Current flush_numbers [0..12]: [0, 0]
19461 Sep 22 23:14:27.872 INFO Downstairs has completed Negotiation, task: proc
19462 Sep 22 23:14:27.872 INFO Downstairs has completed Negotiation, task: proc
19463 Sep 22 23:14:27.872 INFO Current flush_numbers [0..12]: [0, 0]
19464 Sep 22 23:14:27.872 INFO Current flush_numbers [0..12]: [0, 0]
19465 Sep 22 23:14:27.872 INFO Downstairs has completed Negotiation, task: proc
19466 Sep 22 23:14:27.872 INFO Downstairs has completed Negotiation, task: proc
19467 Sep 22 23:14:27.872 INFO Current flush_numbers [0..12]: [0, 0]
19468 Sep 22 23:14:27.873 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19469 Sep 22 23:14:27.873 INFO [0] Transition from WaitActive to WaitQuorum
19470 Sep 22 23:14:27.873 WARN [0] new RM replaced this: None
19471 Sep 22 23:14:27.873 INFO Downstairs has completed Negotiation, task: proc
19472 Sep 22 23:14:27.873 INFO [0] Starts reconcile loop
19473 Sep 22 23:14:27.873 INFO [1] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19474 Sep 22 23:14:27.873 INFO [1] Transition from WaitActive to WaitQuorum
19475 Sep 22 23:14:27.873 WARN [1] new RM replaced this: None
19476 Sep 22 23:14:27.873 INFO [0] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
19477 Sep 22 23:14:27.873 INFO [1] Starts reconcile loop
19478 Sep 22 23:14:27.873 INFO [0] Transition from WaitActive to WaitQuorum
19479 Sep 22 23:14:27.873 WARN [0] new RM replaced this: None
19480 Sep 22 23:14:27.873 INFO [0] Starts reconcile loop
19481 Sep 22 23:14:27.873 INFO [2] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19482 Sep 22 23:14:27.873 INFO [2] Transition from WaitActive to WaitQuorum
19483 Sep 22 23:14:27.873 WARN [2] new RM replaced this: None
19484 Sep 22 23:14:27.873 INFO current number of open files limit 65536 is already the maximum
19485 Sep 22 23:14:27.873 INFO [1] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
19486 Sep 22 23:14:27.873 INFO Opened existing region file "/tmp/downstairs-5ryGVcVK/region.json"
19487 Sep 22 23:14:27.873 INFO [1] Transition from WaitActive to WaitQuorum
19488 Sep 22 23:14:27.873 INFO [2] Starts reconcile loop
19489 Sep 22 23:14:27.873 INFO Database read version 1
19490 Sep 22 23:14:27.873 WARN [1] new RM replaced this: None
19491 Sep 22 23:14:27.873 INFO Database write version 1
19492 Sep 22 23:14:27.873 INFO [1] Starts reconcile loop
19493 Sep 22 23:14:27.873 INFO [0] 127.0.0.1:52905 task reports connection:true
19494 Sep 22 23:14:27.873 INFO e94af85e-3796-4fab-91a1-f12add9c3020 WaitQuorum WaitQuorum WaitQuorum
19495 Sep 22 23:14:27.873 INFO [2] 63fa212f-4948-4c7a-a14a-d7319688df16 (67fdb17a-86e0-451b-a532-c1ff4695781c) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
19496 Sep 22 23:14:27.873 INFO [0]R flush_numbers: [0, 0]
19497 Sep 22 23:14:27.873 INFO [2] Transition from WaitActive to WaitQuorum
19498 Sep 22 23:14:27.873 INFO [0]R generation: [0, 0]
19499 Sep 22 23:14:27.873 WARN [2] new RM replaced this: None
19500 Sep 22 23:14:27.873 INFO [0]R dirty: [false, false]
19501 Sep 22 23:14:27.873 INFO [1]R flush_numbers: [0, 0]
19502 Sep 22 23:14:27.873 INFO [2] Starts reconcile loop
19503 Sep 22 23:14:27.873 INFO [1]R generation: [0, 0]
19504 Sep 22 23:14:27.873 INFO [1]R dirty: [false, false]
19505 Sep 22 23:14:27.873 INFO [2]R flush_numbers: [0, 0]
19506 Sep 22 23:14:27.873 INFO [2]R generation: [0, 0]
19507 Sep 22 23:14:27.873 INFO [0] 127.0.0.1:47986 task reports connection:true
19508 Sep 22 23:14:27.873 INFO [2]R dirty: [false, false]
19509 Sep 22 23:14:27.873 INFO Max found gen is 1
19510 Sep 22 23:14:27.873 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 WaitQuorum WaitQuorum WaitQuorum
19511 Sep 22 23:14:27.873 INFO Generation requested: 1 >= found:1
19512 Sep 22 23:14:27.873 INFO Next flush: 1
19513 Sep 22 23:14:27.873 INFO [0]R flush_numbers: [0, 0]
19514 Sep 22 23:14:27.873 INFO [0]R generation: [0, 0]
19515 Sep 22 23:14:27.873 INFO All extents match
19516 Sep 22 23:14:27.873 INFO [0]R dirty: [false, false]
19517 Sep 22 23:14:27.873 INFO No downstairs repair required
19518 Sep 22 23:14:27.873 INFO [1]R flush_numbers: [0, 0]
19519 Sep 22 23:14:27.873 INFO No initial repair work was required
19520 Sep 22 23:14:27.873 INFO [1]R generation: [0, 0]
19521 Sep 22 23:14:27.873 INFO Set Downstairs and Upstairs active
19522 Sep 22 23:14:27.873 INFO [1]R dirty: [false, false]
19523 Sep 22 23:14:27.873 INFO [2]R flush_numbers: [0, 0]
19524 Sep 22 23:14:27.873 INFO [2]R generation: [0, 0]
19525 Sep 22 23:14:27.873 INFO e94af85e-3796-4fab-91a1-f12add9c3020 is now active with session: a86ae319-11ad-4b2d-a80b-330efc92ebef
19526 Sep 22 23:14:27.873 INFO [2]R dirty: [false, false]
19527 Sep 22 23:14:27.873 INFO Max found gen is 1
19528 Sep 22 23:14:27.873 INFO e94af85e-3796-4fab-91a1-f12add9c3020 Set Active after no repair
19529 Sep 22 23:14:27.873 INFO Generation requested: 1 >= found:1
19530 Sep 22 23:14:27.873 INFO Notify all downstairs, region set compare is done.
19531 Sep 22 23:14:27.873 INFO Next flush: 1
19532 Sep 22 23:14:27.874 INFO Set check for repair
19533 Sep 22 23:14:27.874 INFO All extents match
19534 Sep 22 23:14:27.874 INFO No downstairs repair required
19535 Sep 22 23:14:27.874 INFO No initial repair work was required
19536 Sep 22 23:14:27.874 INFO [1] 127.0.0.1:48071 task reports connection:true
19537 Sep 22 23:14:27.874 INFO Set Downstairs and Upstairs active
19538 Sep 22 23:14:27.874 INFO e94af85e-3796-4fab-91a1-f12add9c3020 Active Active Active
19539 Sep 22 23:14:27.874 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 is now active with session: 67fdb17a-86e0-451b-a532-c1ff4695781c
19540 Sep 22 23:14:27.874 INFO Set check for repair
19541 Sep 22 23:14:27.874 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 Set Active after no repair
19542 Sep 22 23:14:27.874 INFO Notify all downstairs, region set compare is done.
19543 Sep 22 23:14:27.874 INFO [2] 127.0.0.1:46338 task reports connection:true
19544 Sep 22 23:14:27.874 INFO Set check for repair
19545 Sep 22 23:14:27.874 INFO e94af85e-3796-4fab-91a1-f12add9c3020 Active Active Active
19546 Sep 22 23:14:27.874 INFO Set check for repair
19547 Sep 22 23:14:27.874 INFO [1] 127.0.0.1:43716 task reports connection:true
19548 Sep 22 23:14:27.874 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 Active Active Active
19549 Sep 22 23:14:27.874 INFO Set check for repair
19550 Sep 22 23:14:27.874 INFO [0] received reconcile message
19551 Sep 22 23:14:27.874 INFO [0] All repairs completed, exit
19552 Sep 22 23:14:27.874 INFO [2] 127.0.0.1:64857 task reports connection:true
19553 Sep 22 23:14:27.874 INFO 63fa212f-4948-4c7a-a14a-d7319688df16 Active Active Active
19554 Sep 22 23:14:27.874 INFO [0] Starts cmd_loop
19555 Sep 22 23:14:27.874 INFO Set check for repair
19556 Sep 22 23:14:27.874 INFO [1] received reconcile message
19557 Sep 22 23:14:27.874 INFO [0] received reconcile message
19558 Sep 22 23:14:27.874 INFO [1] All repairs completed, exit
19559 Sep 22 23:14:27.874 INFO [0] All repairs completed, exit
19560 Sep 22 23:14:27.874 INFO [1] Starts cmd_loop
19561 Sep 22 23:14:27.874 INFO [0] Starts cmd_loop
19562 Sep 22 23:14:27.874 INFO [2] received reconcile message
19563 Sep 22 23:14:27.874 INFO [1] received reconcile message
19564 Sep 22 23:14:27.874 INFO [2] All repairs completed, exit
19565 Sep 22 23:14:27.874 INFO [1] All repairs completed, exit
19566 Sep 22 23:14:27.874 INFO [2] Starts cmd_loop
19567 Sep 22 23:14:27.874 INFO [1] Starts cmd_loop
19568 The guest has finished waiting for activation
19569 Sep 22 23:14:27.874 INFO [2] received reconcile message
19570 Sep 22 23:14:27.874 INFO [2] All repairs completed, exit
19571 Sep 22 23:14:27.874 INFO [2] Starts cmd_loop
19572 The guest has finished waiting for activation
19573 Sep 22 23:14:27.874 INFO current number of open files limit 65536 is already the maximum
19574 Sep 22 23:14:27.875 INFO Created new region file "/tmp/downstairs-uSBD3d00/region.json"
19575 Sep 22 23:14:27.875 INFO UUID: 555bafbf-d9bb-408a-aa71-328bc8a8cc92
19576 Sep 22 23:14:27.875 INFO Blocks per extent:5 Total Extents: 2
19577 Sep 22 23:14:27.875 INFO Crucible Version: Crucible Version: 0.0.1
19578 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19579 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19580 rustc: 1.70.0 stable x86_64-unknown-illumos
19581 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19582 Sep 22 23:14:27.875 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19583 Sep 22 23:14:27.875 INFO Using address: 127.0.0.1:38389, task: main
19584 Sep 22 23:14:27.875 INFO Repair listens on 127.0.0.1:0, task: repair
19585 Sep 22 23:14:27.876 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46434, task: repair
19586 Sep 22 23:14:27.876 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46434, task: repair
19587 Sep 22 23:14:27.876 INFO listening, local_addr: 127.0.0.1:46434, task: repair
19588 Sep 22 23:14:27.876 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46434, task: repair
19589 Sep 22 23:14:27.876 INFO Using repair address: 127.0.0.1:46434, task: main
19590 Sep 22 23:14:27.876 INFO No SSL acceptor configured, task: main
19591 Sep 22 23:14:27.876 INFO current number of open files limit 65536 is already the maximum
19592 Sep 22 23:14:27.876 INFO Created new region file "/tmp/downstairs-51vT3sLU/region.json"
19593 Sep 22 23:14:27.877 DEBG IO Write 1000 has deps []
19594 Sep 22 23:14:27.877 DEBG up_ds_listen was notified
19595 Sep 22 23:14:27.877 DEBG up_ds_listen process 1000
19596 Sep 22 23:14:27.877 DEBG [A] ack job 1000:1, : downstairs
19597 Sep 22 23:14:27.877 DEBG up_ds_listen checked 1 jobs, back to waiting
19598 Sep 22 23:14:27.878 INFO current number of open files limit 65536 is already the maximum
19599 Sep 22 23:14:27.879 INFO Created new region file "/tmp/downstairs-08law0EE/region.json"
19600 Sep 22 23:14:27.880 INFO current number of open files limit 65536 is already the maximum
19601 Sep 22 23:14:27.880 INFO Opened existing region file "/tmp/downstairs-uSBD3d00/region.json"
19602 Sep 22 23:14:27.880 INFO Database read version 1
19603 Sep 22 23:14:27.880 INFO Database write version 1
19604 test test::integration_test_snapshot_backed_vol ... ok
19605 Sep 22 23:14:27.881 INFO current number of open files limit 65536 is already the maximum
19606 Sep 22 23:14:27.881 INFO Opened existing region file "/tmp/downstairs-n8rm3T5A/region.json"
19607 Sep 22 23:14:27.881 INFO Database read version 1
19608 Sep 22 23:14:27.881 INFO Database write version 1
19609 Sep 22 23:14:27.881 INFO current number of open files limit 65536 is already the maximum
19610 Sep 22 23:14:27.881 INFO Created new region file "/tmp/downstairs-C2d5BFdC/region.json"
19611 Sep 22 23:14:27.881 INFO UUID: 5dd2e15b-101c-44f7-b514-c1b4ac930978
19612 Sep 22 23:14:27.881 INFO Blocks per extent:5 Total Extents: 2
19613 Sep 22 23:14:27.881 INFO Crucible Version: Crucible Version: 0.0.1
19614 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19615 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19616 rustc: 1.70.0 stable x86_64-unknown-illumos
19617 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19618 Sep 22 23:14:27.882 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19619 Sep 22 23:14:27.882 INFO Using address: 127.0.0.1:47413, task: main
19620 Sep 22 23:14:27.882 INFO current number of open files limit 65536 is already the maximum
19621 Sep 22 23:14:27.882 INFO Opened existing region file "/tmp/downstairs-08law0EE/region.json"
19622 Sep 22 23:14:27.882 INFO Database read version 1
19623 Sep 22 23:14:27.882 INFO Database write version 1
19624 Sep 22 23:14:27.882 INFO Repair listens on 127.0.0.1:0, task: repair
19625 Sep 22 23:14:27.882 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43170, task: repair
19626 Sep 22 23:14:27.882 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43170, task: repair
19627 Sep 22 23:14:27.882 INFO listening, local_addr: 127.0.0.1:43170, task: repair
19628 Sep 22 23:14:27.882 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43170, task: repair
19629 Sep 22 23:14:27.882 INFO current number of open files limit 65536 is already the maximum
19630 Sep 22 23:14:27.882 INFO Opened existing region file "/tmp/downstairs-51vT3sLU/region.json"
19631 Sep 22 23:14:27.882 INFO Using repair address: 127.0.0.1:43170, task: main
19632 Sep 22 23:14:27.882 INFO Database read version 1
19633 Sep 22 23:14:27.882 INFO No SSL acceptor configured, task: main
19634 Sep 22 23:14:27.882 INFO Database write version 1
19635 Sep 22 23:14:27.883 INFO current number of open files limit 65536 is already the maximum
19636 Sep 22 23:14:27.883 INFO Created new region file "/tmp/downstairs-3q9fZVF8/region.json"
19637 Sep 22 23:14:27.884 INFO UUID: b7c0d6d3-333f-4423-864b-ebd0befef183
19638 Sep 22 23:14:27.884 INFO Blocks per extent:5 Total Extents: 2
19639 Sep 22 23:14:27.884 INFO Crucible Version: Crucible Version: 0.0.1
19640 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19641 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19642 rustc: 1.70.0 stable x86_64-unknown-illumos
19643 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19644 Sep 22 23:14:27.884 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19645 Sep 22 23:14:27.884 INFO Using address: 127.0.0.1:56689, task: main
19646 Sep 22 23:14:27.884 INFO Repair listens on 127.0.0.1:0, task: repair
19647 Sep 22 23:14:27.884 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47224, task: repair
19648 Sep 22 23:14:27.884 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47224, task: repair
19649 Sep 22 23:14:27.884 INFO listening, local_addr: 127.0.0.1:47224, task: repair
19650 Sep 22 23:14:27.884 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47224, task: repair
19651 Sep 22 23:14:27.884 INFO Using repair address: 127.0.0.1:47224, task: main
19652 Sep 22 23:14:27.884 INFO No SSL acceptor configured, task: main
19653 Sep 22 23:14:27.885 INFO UUID: 16b61f39-4c68-4f56-ae07-8508cf0adb07
19654 Sep 22 23:14:27.885 INFO Blocks per extent:5 Total Extents: 2
19655 Sep 22 23:14:27.885 INFO Crucible Version: Crucible Version: 0.0.1
19656 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19657 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19658 rustc: 1.70.0 stable x86_64-unknown-illumos
19659 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19660 Sep 22 23:14:27.885 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19661 Sep 22 23:14:27.885 INFO Using address: 127.0.0.1:58380, task: main
19662 Sep 22 23:14:27.885 INFO Repair listens on 127.0.0.1:0, task: repair
19663 Sep 22 23:14:27.885 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62376, task: repair
19664 Sep 22 23:14:27.885 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62376, task: repair
19665 Sep 22 23:14:27.885 INFO listening, local_addr: 127.0.0.1:62376, task: repair
19666 Sep 22 23:14:27.886 INFO listening on 127.0.0.1:0, task: main
19667 Sep 22 23:14:27.886 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62376, task: repair
19668 Sep 22 23:14:27.886 WARN e94af85e-3796-4fab-91a1-f12add9c3020 request to replace downstairs 127.0.0.1:52905 with 127.0.0.1:56689
19669 Sep 22 23:14:27.886 INFO e94af85e-3796-4fab-91a1-f12add9c3020 found old target: 127.0.0.1:52905 at 0
19670 Sep 22 23:14:27.886 INFO e94af85e-3796-4fab-91a1-f12add9c3020 replacing old: 127.0.0.1:52905 at 0
19671 Sep 22 23:14:27.886 INFO Using repair address: 127.0.0.1:62376, task: main
19672 Sep 22 23:14:27.886 INFO [0] client skip 1 in process jobs because fault, : downstairs
19673 Sep 22 23:14:27.886 INFO No SSL acceptor configured, task: main
19674 Sep 22 23:14:27.886 INFO [0] changed 1 jobs to fault skipped, : downstairs
19675 Sep 22 23:14:27.886 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) Active Active Active ds_transition to Replacing
19676 Sep 22 23:14:27.886 INFO [0] Transition from Active to Replacing
19677 Sep 22 23:14:27.886 INFO current number of open files limit 65536 is already the maximum
19678 Sep 22 23:14:27.886 INFO Opened existing region file "/tmp/downstairs-C2d5BFdC/region.json"
19679 Sep 22 23:14:27.886 INFO Database read version 1
19680 Sep 22 23:14:27.886 INFO Database write version 1
19681 Sep 22 23:14:27.886 INFO current number of open files limit 65536 is already the maximum
19682 Sep 22 23:14:27.886 DEBG Write :1000 deps:[] res:true
19683 Sep 22 23:14:27.886 INFO Created new region file "/tmp/downstairs-p0sBB51P/region.json"
19684 Sep 22 23:14:27.887 DEBG Write :1000 deps:[] res:true
19685 Sep 22 23:14:27.888 DEBG Write :1000 deps:[] res:true
19686 Sep 22 23:14:27.888 WARN [0] e94af85e-3796-4fab-91a1-f12add9c3020 WARNING finish job 1000 when downstairs state:Replacing
19687 Sep 22 23:14:27.888 WARN [0] Dropping already skipped job 1000, : downstairs
19688 Sep 22 23:14:27.888 WARN [0] will exit pm_task, this downstairs Replacing
19689 Sep 22 23:14:27.888 DEBG up_ds_listen was notified
19690 Sep 22 23:14:27.888 DEBG up_ds_listen checked 0 jobs, back to waiting
19691 Sep 22 23:14:27.888 ERRO 127.0.0.1:52905: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
19692 Sep 22 23:14:27.888 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 Gone missing, transition from Replacing to Replaced
19693 Sep 22 23:14:27.888 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 connection to 127.0.0.1:52905 closed, looper: 0
19694 Sep 22 23:14:27.888 INFO [0] 127.0.0.1:52905 task reports connection:false
19695 Sep 22 23:14:27.888 INFO e94af85e-3796-4fab-91a1-f12add9c3020 Replaced Active Active
19696 Sep 22 23:14:27.888 INFO [0] 127.0.0.1:52905 task reports offline
19697 Sep 22 23:14:27.888 WARN upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } disconnected, 0 jobs left, task: main
19698 Sep 22 23:14:27.888 WARN upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } was previously active, clearing, task: main
19699 Sep 22 23:14:27.888 INFO connection (127.0.0.1:64564): all done
19700 Sep 22 23:14:27.889 INFO current number of open files limit 65536 is already the maximum
19701 Sep 22 23:14:27.889 INFO UUID: da4b10d8-d31d-4fd9-811a-957d3b103709
19702 Sep 22 23:14:27.889 INFO Blocks per extent:5 Total Extents: 2
19703 Sep 22 23:14:27.889 INFO Opened existing region file "/tmp/downstairs-3q9fZVF8/region.json"
19704 Sep 22 23:14:27.889 INFO Database read version 1
19705 Sep 22 23:14:27.889 INFO Database write version 1
19706 Sep 22 23:14:27.889 INFO Crucible Version: Crucible Version: 0.0.1
19707 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19708 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19709 rustc: 1.70.0 stable x86_64-unknown-illumos
19710 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19711 Sep 22 23:14:27.889 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19712 Sep 22 23:14:27.889 INFO Using address: 127.0.0.1:62077, task: main
19713 Sep 22 23:14:27.889 INFO Repair listens on 127.0.0.1:0, task: repair
19714 Sep 22 23:14:27.889 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:34610, task: repair
19715 Sep 22 23:14:27.889 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:34610, task: repair
19716 Sep 22 23:14:27.890 INFO listening, local_addr: 127.0.0.1:34610, task: repair
19717 Sep 22 23:14:27.890 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:34610, task: repair
19718 Sep 22 23:14:27.890 INFO Using repair address: 127.0.0.1:34610, task: main
19719 Sep 22 23:14:27.890 INFO No SSL acceptor configured, task: main
19720 Sep 22 23:14:27.890 INFO current number of open files limit 65536 is already the maximum
19721 Sep 22 23:14:27.890 INFO Created new region file "/tmp/downstairs-NcuCFDzp/region.json"
19722 Sep 22 23:14:27.891 INFO UUID: 8db2395a-ec39-4e72-9a2f-a6d3e8a113ec
19723 Sep 22 23:14:27.891 INFO Blocks per extent:5 Total Extents: 2
19724 Sep 22 23:14:27.891 INFO Crucible Version: Crucible Version: 0.0.1
19725 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19726 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19727 rustc: 1.70.0 stable x86_64-unknown-illumos
19728 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19729 Sep 22 23:14:27.891 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19730 Sep 22 23:14:27.891 INFO Using address: 127.0.0.1:38707, task: main
19731 Sep 22 23:14:27.892 INFO Repair listens on 127.0.0.1:0, task: repair
19732 Sep 22 23:14:27.892 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65488, task: repair
19733 Sep 22 23:14:27.892 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65488, task: repair
19734 Sep 22 23:14:27.892 INFO listening, local_addr: 127.0.0.1:65488, task: repair
19735 Sep 22 23:14:27.892 INFO current number of open files limit 65536 is already the maximum
19736 Sep 22 23:14:27.892 INFO Opened existing region file "/tmp/downstairs-p0sBB51P/region.json"
19737 Sep 22 23:14:27.892 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65488, task: repair
19738 Sep 22 23:14:27.892 INFO Database read version 1
19739 Sep 22 23:14:27.892 INFO Database write version 1
19740 Sep 22 23:14:27.892 INFO Using repair address: 127.0.0.1:65488, task: main
19741 Sep 22 23:14:27.892 INFO No SSL acceptor configured, task: main
19742 Sep 22 23:14:27.893 INFO listening on 127.0.0.1:0, task: main
19743 Sep 22 23:14:27.893 INFO listening on 127.0.0.1:0, task: main
19744 Sep 22 23:14:27.893 WARN 63fa212f-4948-4c7a-a14a-d7319688df16 request to replace downstairs 127.0.0.1:47413 with 127.0.0.1:38707
19745 Sep 22 23:14:27.893 WARN 63fa212f-4948-4c7a-a14a-d7319688df16 downstairs 127.0.0.1:47413 not found
19746 Sep 22 23:14:27.895 INFO current number of open files limit 65536 is already the maximum
19747 Sep 22 23:14:27.895 INFO Opened existing region file "/tmp/downstairs-NcuCFDzp/region.json"
19748 Sep 22 23:14:27.895 INFO Database read version 1
19749 Sep 22 23:14:27.895 INFO Database write version 1
19750 Sep 22 23:14:27.895 INFO UUID: 0f3af6ad-c169-4a67-ae79-bc5859c15909
19751 Sep 22 23:14:27.895 INFO Blocks per extent:5 Total Extents: 2
19752 Sep 22 23:14:27.896 INFO Crucible Version: Crucible Version: 0.0.1
19753 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19754 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19755 rustc: 1.70.0 stable x86_64-unknown-illumos
19756 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19757 Sep 22 23:14:27.896 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19758 Sep 22 23:14:27.896 INFO Using address: 127.0.0.1:48636, task: main
19759 Sep 22 23:14:27.896 INFO Repair listens on 127.0.0.1:0, task: repair
19760 Sep 22 23:14:27.896 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59973, task: repair
19761 Sep 22 23:14:27.896 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59973, task: repair
19762 Sep 22 23:14:27.896 INFO listening, local_addr: 127.0.0.1:59973, task: repair
19763 Sep 22 23:14:27.896 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59973, task: repair
19764 Sep 22 23:14:27.896 INFO Using repair address: 127.0.0.1:59973, task: main
19765 Sep 22 23:14:27.896 INFO No SSL acceptor configured, task: main
19766 Sep 22 23:14:27.897 INFO current number of open files limit 65536 is already the maximum
19767 Sep 22 23:14:27.897 INFO Created new region file "/tmp/downstairs-mBHM8lj3/region.json"
19768 Sep 22 23:14:27.898 INFO UUID: a11fec66-3ce4-4c3c-bad2-f8da3b3c9b3e
19769 Sep 22 23:14:27.898 INFO Blocks per extent:5 Total Extents: 2
19770 Sep 22 23:14:27.898 INFO Crucible Version: Crucible Version: 0.0.1
19771 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19772 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19773 rustc: 1.70.0 stable x86_64-unknown-illumos
19774 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19775 Sep 22 23:14:27.898 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19776 Sep 22 23:14:27.898 INFO Using address: 127.0.0.1:55380, task: main
19777 Sep 22 23:14:27.898 INFO Repair listens on 127.0.0.1:0, task: repair
19778 Sep 22 23:14:27.898 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45964, task: repair
19779 Sep 22 23:14:27.898 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45964, task: repair
19780 Sep 22 23:14:27.898 INFO listening, local_addr: 127.0.0.1:45964, task: repair
19781 Sep 22 23:14:27.899 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45964, task: repair
19782 Sep 22 23:14:27.899 INFO Using repair address: 127.0.0.1:45964, task: main
19783 Sep 22 23:14:27.899 INFO No SSL acceptor configured, task: main
19784 Sep 22 23:14:27.899 INFO current number of open files limit 65536 is already the maximum
19785 Sep 22 23:14:27.899 INFO Created new region file "/tmp/downstairs-6VydVRSH/region.json"
19786 test test::integration_test_volume_replace_bad_downstairs ... ok
19787 Sep 22 23:14:27.899 INFO current number of open files limit 65536 is already the maximum
19788 Sep 22 23:14:27.900 INFO Created new region file "/tmp/downstairs-gsF9WdKL/region.json"
19789 Sep 22 23:14:27.901 INFO current number of open files limit 65536 is already the maximum
19790 Sep 22 23:14:27.901 INFO Opened existing region file "/tmp/downstairs-mBHM8lj3/region.json"
19791 Sep 22 23:14:27.901 INFO Database read version 1
19792 Sep 22 23:14:27.901 INFO Database write version 1
19793 Sep 22 23:14:27.903 INFO UUID: aef90a89-454d-4861-b15d-20c99780f2e8
19794 Sep 22 23:14:27.903 INFO Blocks per extent:5 Total Extents: 2
19795 Sep 22 23:14:27.903 INFO Crucible Version: Crucible Version: 0.0.1
19796 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19797 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19798 rustc: 1.70.0 stable x86_64-unknown-illumos
19799 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19800 Sep 22 23:14:27.903 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19801 Sep 22 23:14:27.903 INFO Using address: 127.0.0.1:59653, task: main
19802 Sep 22 23:14:27.903 INFO Repair listens on 127.0.0.1:0, task: repair
19803 Sep 22 23:14:27.903 INFO current number of open files limit 65536 is already the maximum
19804 Sep 22 23:14:27.903 INFO Opened existing region file "/tmp/downstairs-6VydVRSH/region.json"
19805 Sep 22 23:14:27.903 INFO Database read version 1
19806 Sep 22 23:14:27.903 INFO Database write version 1
19807 Sep 22 23:14:27.903 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51333, task: repair
19808 Sep 22 23:14:27.903 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51333, task: repair
19809 Sep 22 23:14:27.903 INFO listening, local_addr: 127.0.0.1:51333, task: repair
19810 Sep 22 23:14:27.903 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51333, task: repair
19811 Sep 22 23:14:27.903 INFO Using repair address: 127.0.0.1:51333, task: main
19812 Sep 22 23:14:27.904 INFO No SSL acceptor configured, task: main
19813 Sep 22 23:14:27.904 INFO current number of open files limit 65536 is already the maximum
19814 Sep 22 23:14:27.904 INFO current number of open files limit 65536 is already the maximum
19815 Sep 22 23:14:27.904 INFO Opened existing region file "/tmp/downstairs-gsF9WdKL/region.json"
19816 Sep 22 23:14:27.904 INFO Database read version 1
19817 Sep 22 23:14:27.904 INFO Database write version 1
19818 Sep 22 23:14:27.904 INFO Created new region file "/tmp/downstairs-nHU5oa1F/region.json"
19819 Sep 22 23:14:27.906 INFO UUID: 7d1add5d-5a13-414b-bc02-b8d357908557
19820 Sep 22 23:14:27.906 INFO Blocks per extent:5 Total Extents: 2
19821 Sep 22 23:14:27.907 INFO Crucible Version: Crucible Version: 0.0.1
19822 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19823 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19824 rustc: 1.70.0 stable x86_64-unknown-illumos
19825 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19826 Sep 22 23:14:27.907 INFO UUID: 9a08289f-c73f-4f3d-af8e-b9eb4fe46f56
19827 Sep 22 23:14:27.907 INFO Blocks per extent:5 Total Extents: 2
19828 Sep 22 23:14:27.907 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19829 Sep 22 23:14:27.907 INFO Using address: 127.0.0.1:41467, task: main
19830 Sep 22 23:14:27.907 INFO Crucible Version: Crucible Version: 0.0.1
19831 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19832 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19833 rustc: 1.70.0 stable x86_64-unknown-illumos
19834 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19835 Sep 22 23:14:27.907 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19836 Sep 22 23:14:27.907 INFO Using address: 127.0.0.1:63499, task: main
19837 Sep 22 23:14:27.907 INFO Repair listens on 127.0.0.1:0, task: repair
19838 Sep 22 23:14:27.907 INFO Repair listens on 127.0.0.1:0, task: repair
19839 Sep 22 23:14:27.907 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52641, task: repair
19840 Sep 22 23:14:27.907 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52641, task: repair
19841 Sep 22 23:14:27.907 INFO listening, local_addr: 127.0.0.1:52641, task: repair
19842 Sep 22 23:14:27.907 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56010, task: repair
19843 Sep 22 23:14:27.907 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56010, task: repair
19844 Sep 22 23:14:27.907 INFO listening, local_addr: 127.0.0.1:56010, task: repair
19845 Sep 22 23:14:27.907 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52641, task: repair
19846 Sep 22 23:14:27.907 INFO Using repair address: 127.0.0.1:52641, task: main
19847 Sep 22 23:14:27.907 INFO No SSL acceptor configured, task: main
19848 Sep 22 23:14:27.907 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56010, task: repair
19849 Sep 22 23:14:27.907 INFO Using repair address: 127.0.0.1:56010, task: main
19850 Sep 22 23:14:27.907 INFO No SSL acceptor configured, task: main
19851 Sep 22 23:14:27.908 INFO current number of open files limit 65536 is already the maximum
19852 Sep 22 23:14:27.908 INFO current number of open files limit 65536 is already the maximum
19853 Sep 22 23:14:27.908 INFO Created new region file "/tmp/downstairs-NOXcduXR/region.json"
19854 Sep 22 23:14:27.908 INFO Created new region file "/tmp/downstairs-An1Xd1ec/region.json"
19855 Sep 22 23:14:27.908 INFO current number of open files limit 65536 is already the maximum
19856 Sep 22 23:14:27.908 INFO Opened existing region file "/tmp/downstairs-nHU5oa1F/region.json"
19857 Sep 22 23:14:27.908 INFO Database read version 1
19858 Sep 22 23:14:27.908 INFO Database write version 1
19859 Sep 22 23:14:27.911 INFO UUID: 6085db72-2a98-4898-ac66-9af2876a589f
19860 Sep 22 23:14:27.911 INFO Blocks per extent:5 Total Extents: 2
19861 Sep 22 23:14:27.911 INFO Crucible Version: Crucible Version: 0.0.1
19862 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19863 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19864 rustc: 1.70.0 stable x86_64-unknown-illumos
19865 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19866 Sep 22 23:14:27.911 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19867 Sep 22 23:14:27.911 INFO Using address: 127.0.0.1:56727, task: main
19868 Sep 22 23:14:27.912 INFO Repair listens on 127.0.0.1:0, task: repair
19869 Sep 22 23:14:27.912 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50342, task: repair
19870 Sep 22 23:14:27.912 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50342, task: repair
19871 Sep 22 23:14:27.912 INFO listening, local_addr: 127.0.0.1:50342, task: repair
19872 Sep 22 23:14:27.912 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50342, task: repair
19873 Sep 22 23:14:27.912 INFO Using repair address: 127.0.0.1:50342, task: main
19874 Sep 22 23:14:27.912 INFO No SSL acceptor configured, task: main
19875 Sep 22 23:14:27.912 INFO Upstairs starts
19876 Sep 22 23:14:27.912 INFO Crucible Version: BuildInfo {
19877 version: "0.0.1",
19878 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19879 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19880 git_branch: "main",
19881 rustc_semver: "1.70.0",
19882 rustc_channel: "stable",
19883 rustc_host_triple: "x86_64-unknown-illumos",
19884 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19885 cargo_triple: "x86_64-unknown-illumos",
19886 debug: true,
19887 opt_level: 0,
19888 }
19889 Sep 22 23:14:27.912 INFO Upstairs <-> Downstairs Message Version: 4
19890 Sep 22 23:14:27.913 INFO Crucible stats registered with UUID: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3
19891 Sep 22 23:14:27.913 INFO Crucible 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 has session id: 165cc207-0c17-4a5d-a35b-263abf9ec78c
19892 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19893 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19894 Sep 22 23:14:27.913 INFO current number of open files limit 65536 is already the maximum
19895 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19896 Sep 22 23:14:27.913 INFO Opened existing region file "/tmp/downstairs-An1Xd1ec/region.json"
19897 Sep 22 23:14:27.913 INFO Database read version 1
19898 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19899 Sep 22 23:14:27.913 INFO Database write version 1
19900 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19901 Sep 22 23:14:27.913 INFO listening on 127.0.0.1:0, task: main
19902 Sep 22 23:14:27.913 INFO [0] connecting to 127.0.0.1:53467, looper: 0
19903 Sep 22 23:14:27.913 INFO [1] connecting to 127.0.0.1:38389, looper: 1
19904 Sep 22 23:14:27.913 INFO current number of open files limit 65536 is already the maximum
19905 Sep 22 23:14:27.913 INFO Opened existing region file "/tmp/downstairs-NOXcduXR/region.json"
19906 Sep 22 23:14:27.913 INFO Database read version 1
19907 Sep 22 23:14:27.913 INFO [2] connecting to 127.0.0.1:58380, looper: 2
19908 Sep 22 23:14:27.913 INFO Database write version 1
19909 Sep 22 23:14:27.913 INFO up_listen starts, task: up_listen
19910 Sep 22 23:14:27.913 INFO Wait for all three downstairs to come online
19911 Sep 22 23:14:27.913 INFO Flush timeout: 0.5
19912 Sep 22 23:14:27.913 INFO accepted connection from 127.0.0.1:43005, task: main
19913 Sep 22 23:14:27.913 INFO accepted connection from 127.0.0.1:48596, task: main
19914 Sep 22 23:14:27.913 INFO accepted connection from 127.0.0.1:56983, task: main
19915 Sep 22 23:14:27.914 INFO [0] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 looper connected, looper: 0
19916 Sep 22 23:14:27.914 INFO [0] Proc runs for 127.0.0.1:53467 in state New
19917 Sep 22 23:14:27.914 INFO [1] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 looper connected, looper: 1
19918 Sep 22 23:14:27.914 INFO [1] Proc runs for 127.0.0.1:38389 in state New
19919 Sep 22 23:14:27.914 INFO [2] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 looper connected, looper: 2
19920 Sep 22 23:14:27.914 INFO [2] Proc runs for 127.0.0.1:58380 in state New
19921 Sep 22 23:14:27.914 INFO Upstairs starts
19922 Sep 22 23:14:27.914 INFO Crucible Version: BuildInfo {
19923 version: "0.0.1",
19924 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
19925 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
19926 git_branch: "main",
19927 rustc_semver: "1.70.0",
19928 rustc_channel: "stable",
19929 rustc_host_triple: "x86_64-unknown-illumos",
19930 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
19931 cargo_triple: "x86_64-unknown-illumos",
19932 debug: true,
19933 opt_level: 0,
19934 }
19935 Sep 22 23:14:27.914 INFO Upstairs <-> Downstairs Message Version: 4
19936 Sep 22 23:14:27.914 INFO Crucible stats registered with UUID: 0b427989-df2d-43e0-a6c9-e66d936a0f2b
19937 Sep 22 23:14:27.914 INFO Crucible 0b427989-df2d-43e0-a6c9-e66d936a0f2b has session id: aad445e1-34d8-45f9-8dac-afbdaed159f9
19938 Sep 22 23:14:27.914 INFO Connection request from 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 with version 4, task: proc
19939 Sep 22 23:14:27.914 INFO upstairs UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } connected, version 4, task: proc
19940 Sep 22 23:14:27.914 INFO Connection request from 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 with version 4, task: proc
19941 Sep 22 23:14:27.915 INFO upstairs UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } connected, version 4, task: proc
19942 Sep 22 23:14:27.915 INFO Connection request from 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 with version 4, task: proc
19943 Sep 22 23:14:27.915 INFO upstairs UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } connected, version 4, task: proc
19944 Sep 22 23:14:27.915 INFO [0] connecting to 127.0.0.1:48636, looper: 0
19945 Sep 22 23:14:27.915 INFO [1] connecting to 127.0.0.1:59653, looper: 1
19946 Sep 22 23:14:27.915 INFO [2] connecting to 127.0.0.1:56727, looper: 2
19947 Sep 22 23:14:27.915 INFO up_listen starts, task: up_listen
19948 Sep 22 23:14:27.915 INFO Wait for all three downstairs to come online
19949 Sep 22 23:14:27.915 INFO Flush timeout: 0.5
19950 Sep 22 23:14:27.915 INFO accepted connection from 127.0.0.1:36075, task: main
19951 Sep 22 23:14:27.915 INFO accepted connection from 127.0.0.1:40019, task: main
19952 Sep 22 23:14:27.915 INFO accepted connection from 127.0.0.1:57920, task: main
19953 Sep 22 23:14:27.916 INFO [0] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) New New New ds_transition to WaitActive
19954 Sep 22 23:14:27.916 INFO [0] Transition from New to WaitActive
19955 Sep 22 23:14:27.916 INFO [1] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) WaitActive New New ds_transition to WaitActive
19956 Sep 22 23:14:27.916 INFO [1] Transition from New to WaitActive
19957 Sep 22 23:14:27.916 INFO [2] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) WaitActive WaitActive New ds_transition to WaitActive
19958 Sep 22 23:14:27.916 INFO [2] Transition from New to WaitActive
19959 Sep 22 23:14:27.916 INFO [0] 0b427989-df2d-43e0-a6c9-e66d936a0f2b looper connected, looper: 0
19960 Sep 22 23:14:27.916 INFO [0] Proc runs for 127.0.0.1:48636 in state New
19961 Sep 22 23:14:27.916 INFO UUID: 27e21158-4dda-4f80-89f0-fd433654b40d
19962 Sep 22 23:14:27.916 INFO [1] 0b427989-df2d-43e0-a6c9-e66d936a0f2b looper connected, looper: 1
19963 Sep 22 23:14:27.916 INFO Blocks per extent:5 Total Extents: 2
19964 Sep 22 23:14:27.916 INFO [1] Proc runs for 127.0.0.1:59653 in state New
19965 Sep 22 23:14:27.916 INFO [2] 0b427989-df2d-43e0-a6c9-e66d936a0f2b looper connected, looper: 2
19966 Sep 22 23:14:27.916 INFO Crucible Version: Crucible Version: 0.0.1
19967 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19968 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19969 rustc: 1.70.0 stable x86_64-unknown-illumos
19970 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19971 Sep 22 23:14:27.916 INFO [2] Proc runs for 127.0.0.1:56727 in state New
19972 Sep 22 23:14:27.916 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19973 Sep 22 23:14:27.916 INFO Using address: 127.0.0.1:35764, task: main
19974 Sep 22 23:14:27.916 INFO UUID: 2f70c471-1dde-474e-a4ed-18909de1eaf6
19975 Sep 22 23:14:27.916 INFO Blocks per extent:5 Total Extents: 2
19976 Sep 22 23:14:27.916 INFO Crucible Version: Crucible Version: 0.0.1
19977 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
19978 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
19979 rustc: 1.70.0 stable x86_64-unknown-illumos
19980 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
19981 Sep 22 23:14:27.916 INFO Upstairs <-> Downstairs Message Version: 4, task: main
19982 Sep 22 23:14:27.916 INFO Using address: 127.0.0.1:35263, task: main
19983 Sep 22 23:14:27.916 INFO Connection request from 0b427989-df2d-43e0-a6c9-e66d936a0f2b with version 4, task: proc
19984 Sep 22 23:14:27.916 INFO upstairs UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } connected, version 4, task: proc
19985 Sep 22 23:14:27.916 INFO Connection request from 0b427989-df2d-43e0-a6c9-e66d936a0f2b with version 4, task: proc
19986 Sep 22 23:14:27.916 INFO upstairs UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } connected, version 4, task: proc
19987 Sep 22 23:14:27.916 INFO Repair listens on 127.0.0.1:0, task: repair
19988 Sep 22 23:14:27.916 INFO Connection request from 0b427989-df2d-43e0-a6c9-e66d936a0f2b with version 4, task: proc
19989 Sep 22 23:14:27.916 INFO upstairs UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } connected, version 4, task: proc
19990 Sep 22 23:14:27.917 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33592, task: repair
19991 Sep 22 23:14:27.917 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33592, task: repair
19992 Sep 22 23:14:27.917 INFO Repair listens on 127.0.0.1:0, task: repair
19993 Sep 22 23:14:27.917 INFO listening, local_addr: 127.0.0.1:33592, task: repair
19994 Sep 22 23:14:27.917 INFO [0] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) New New New ds_transition to WaitActive
19995 Sep 22 23:14:27.917 INFO [0] Transition from New to WaitActive
19996 Sep 22 23:14:27.917 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47379, task: repair
19997 Sep 22 23:14:27.917 INFO [1] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) WaitActive New New ds_transition to WaitActive
19998 Sep 22 23:14:27.917 INFO [1] Transition from New to WaitActive
19999 Sep 22 23:14:27.917 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47379, task: repair
20000 Sep 22 23:14:27.917 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33592, task: repair
20001 Sep 22 23:14:27.917 INFO [2] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) WaitActive WaitActive New ds_transition to WaitActive
20002 Sep 22 23:14:27.917 INFO [2] Transition from New to WaitActive
20003 Sep 22 23:14:27.917 INFO Using repair address: 127.0.0.1:33592, task: main
20004 Sep 22 23:14:27.917 INFO No SSL acceptor configured, task: main
20005 Sep 22 23:14:27.917 INFO listening, local_addr: 127.0.0.1:47379, task: repair
20006 The guest has requested activation
20007 Sep 22 23:14:27.917 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 active request set
20008 Sep 22 23:14:27.917 INFO [0] received activate with gen 1
20009 Sep 22 23:14:27.917 INFO [0] client got ds_active_rx, promote! session 4916c75c-d050-4100-ba66-5ccbce289fef
20010 Sep 22 23:14:27.917 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47379, task: repair
20011 Sep 22 23:14:27.917 INFO current number of open files limit 65536 is already the maximum
20012 Sep 22 23:14:27.917 INFO [1] received activate with gen 1
20013 Sep 22 23:14:27.917 INFO [1] client got ds_active_rx, promote! session 4916c75c-d050-4100-ba66-5ccbce289fef
20014 Sep 22 23:14:27.917 INFO [2] received activate with gen 1
20015 Sep 22 23:14:27.917 INFO [2] client got ds_active_rx, promote! session 4916c75c-d050-4100-ba66-5ccbce289fef
20016 Sep 22 23:14:27.917 INFO Using repair address: 127.0.0.1:47379, task: main
20017 Sep 22 23:14:27.917 INFO Created new region file "/tmp/downstairs-xae6Zhcj/region.json"
20018 Sep 22 23:14:27.917 INFO No SSL acceptor configured, task: main
20019 Sep 22 23:14:27.917 INFO UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } is now active (read-write)
20020 Sep 22 23:14:27.917 INFO UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } is now active (read-write)
20021 Sep 22 23:14:27.917 INFO current number of open files limit 65536 is already the maximum
20022 Sep 22 23:14:27.917 INFO UpstairsConnection { upstairs_id: 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3, session_id: 4916c75c-d050-4100-ba66-5ccbce289fef, gen: 1 } is now active (read-write)
20023 Sep 22 23:14:27.918 INFO Created new region file "/tmp/downstairs-U79LijME/region.json"
20024 Sep 22 23:14:27.918 INFO [0] downstairs client at 127.0.0.1:53467 has UUID b78cfd9a-1100-4037-aec9-2c2cabfc0ce6
20025 Sep 22 23:14:27.918 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b78cfd9a-1100-4037-aec9-2c2cabfc0ce6, encrypted: true, database_read_version: 1, database_write_version: 1 }
20026 Sep 22 23:14:27.918 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 WaitActive WaitActive WaitActive
20027 Sep 22 23:14:27.918 INFO [1] downstairs client at 127.0.0.1:38389 has UUID 555bafbf-d9bb-408a-aa71-328bc8a8cc92
20028 Sep 22 23:14:27.918 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 555bafbf-d9bb-408a-aa71-328bc8a8cc92, encrypted: true, database_read_version: 1, database_write_version: 1 }
20029 Sep 22 23:14:27.918 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 WaitActive WaitActive WaitActive
20030 Sep 22 23:14:27.918 INFO [2] downstairs client at 127.0.0.1:58380 has UUID 16b61f39-4c68-4f56-ae07-8508cf0adb07
20031 Sep 22 23:14:27.918 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 16b61f39-4c68-4f56-ae07-8508cf0adb07, encrypted: true, database_read_version: 1, database_write_version: 1 }
20032 Sep 22 23:14:27.918 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 WaitActive WaitActive WaitActive
20033 Sep 22 23:14:27.918 INFO Current flush_numbers [0..12]: [0, 0]
20034 Sep 22 23:14:27.918 INFO Downstairs has completed Negotiation, task: proc
20035 Sep 22 23:14:27.919 INFO Current flush_numbers [0..12]: [0, 0]
20036 Sep 22 23:14:27.919 INFO Downstairs has completed Negotiation, task: proc
20037 Sep 22 23:14:27.919 INFO Current flush_numbers [0..12]: [0, 0]
20038 Sep 22 23:14:27.919 INFO Downstairs has completed Negotiation, task: proc
20039 Sep 22 23:14:27.919 INFO [0] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20040 Sep 22 23:14:27.919 INFO [0] Transition from WaitActive to WaitQuorum
20041 Sep 22 23:14:27.919 WARN [0] new RM replaced this: None
20042 Sep 22 23:14:27.919 INFO [0] Starts reconcile loop
20043 Sep 22 23:14:27.919 INFO [1] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20044 Sep 22 23:14:27.919 INFO [1] Transition from WaitActive to WaitQuorum
20045 Sep 22 23:14:27.919 WARN [1] new RM replaced this: None
20046 Sep 22 23:14:27.919 INFO [1] Starts reconcile loop
20047 Sep 22 23:14:27.919 INFO [2] 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 (4916c75c-d050-4100-ba66-5ccbce289fef) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20048 Sep 22 23:14:27.919 INFO [2] Transition from WaitActive to WaitQuorum
20049 Sep 22 23:14:27.919 WARN [2] new RM replaced this: None
20050 Sep 22 23:14:27.919 INFO [2] Starts reconcile loop
20051 Sep 22 23:14:27.920 INFO [0] 127.0.0.1:53467 task reports connection:true
20052 Sep 22 23:14:27.920 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 WaitQuorum WaitQuorum WaitQuorum
20053 Sep 22 23:14:27.920 INFO [0]R flush_numbers: [0, 0]
20054 Sep 22 23:14:27.920 INFO [0]R generation: [0, 0]
20055 Sep 22 23:14:27.920 INFO [0]R dirty: [false, false]
20056 Sep 22 23:14:27.920 INFO [1]R flush_numbers: [0, 0]
20057 Sep 22 23:14:27.920 INFO [1]R generation: [0, 0]
20058 Sep 22 23:14:27.920 INFO [1]R dirty: [false, false]
20059 Sep 22 23:14:27.920 INFO [2]R flush_numbers: [0, 0]
20060 Sep 22 23:14:27.920 INFO [2]R generation: [0, 0]
20061 Sep 22 23:14:27.920 INFO [2]R dirty: [false, false]
20062 Sep 22 23:14:27.920 INFO Max found gen is 1
20063 Sep 22 23:14:27.920 INFO Generation requested: 1 >= found:1
20064 Sep 22 23:14:27.920 INFO Next flush: 1
20065 Sep 22 23:14:27.920 INFO All extents match
20066 Sep 22 23:14:27.920 INFO No downstairs repair required
20067 Sep 22 23:14:27.920 INFO No initial repair work was required
20068 Sep 22 23:14:27.920 INFO Set Downstairs and Upstairs active
20069 Sep 22 23:14:27.920 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 is now active with session: 4916c75c-d050-4100-ba66-5ccbce289fef
20070 Sep 22 23:14:27.920 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 Set Active after no repair
20071 Sep 22 23:14:27.920 INFO Notify all downstairs, region set compare is done.
20072 Sep 22 23:14:27.920 INFO Set check for repair
20073 Sep 22 23:14:27.920 INFO [1] 127.0.0.1:38389 task reports connection:true
20074 Sep 22 23:14:27.920 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 Active Active Active
20075 Sep 22 23:14:27.920 INFO Set check for repair
20076 Sep 22 23:14:27.920 INFO [2] 127.0.0.1:58380 task reports connection:true
20077 Sep 22 23:14:27.920 INFO 93c2fcb7-2c23-478d-80cc-ee6a7af6fab3 Active Active Active
20078 Sep 22 23:14:27.920 INFO Set check for repair
20079 Sep 22 23:14:27.920 INFO [0] received reconcile message
20080 Sep 22 23:14:27.920 INFO [0] All repairs completed, exit
20081 Sep 22 23:14:27.920 INFO [0] Starts cmd_loop
20082 Sep 22 23:14:27.920 INFO [1] received reconcile message
20083 Sep 22 23:14:27.920 INFO [1] All repairs completed, exit
20084 Sep 22 23:14:27.920 INFO [1] Starts cmd_loop
20085 Sep 22 23:14:27.920 INFO [2] received reconcile message
20086 Sep 22 23:14:27.920 INFO [2] All repairs completed, exit
20087 Sep 22 23:14:27.920 INFO [2] Starts cmd_loop
20088 The guest has finished waiting for activation
20089 The guest has requested activation
20090 Sep 22 23:14:27.921 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b active request set
20091 Sep 22 23:14:27.921 INFO [0] received activate with gen 1
20092 Sep 22 23:14:27.921 INFO [0] client got ds_active_rx, promote! session b04125f1-41ca-469f-96f2-8c8ce78343c3
20093 Sep 22 23:14:27.921 INFO [1] received activate with gen 1
20094 Sep 22 23:14:27.921 INFO [1] client got ds_active_rx, promote! session b04125f1-41ca-469f-96f2-8c8ce78343c3
20095 Sep 22 23:14:27.921 INFO [2] received activate with gen 1
20096 Sep 22 23:14:27.921 INFO [2] client got ds_active_rx, promote! session b04125f1-41ca-469f-96f2-8c8ce78343c3
20097 Sep 22 23:14:27.921 INFO UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } is now active (read-write)
20098 Sep 22 23:14:27.921 INFO current number of open files limit 65536 is already the maximum
20099 Sep 22 23:14:27.921 INFO Opened existing region file "/tmp/downstairs-xae6Zhcj/region.json"
20100 Sep 22 23:14:27.921 INFO Database read version 1
20101 Sep 22 23:14:27.921 INFO UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } is now active (read-write)
20102 Sep 22 23:14:27.921 INFO Database write version 1
20103 Sep 22 23:14:27.921 INFO UpstairsConnection { upstairs_id: 0b427989-df2d-43e0-a6c9-e66d936a0f2b, session_id: b04125f1-41ca-469f-96f2-8c8ce78343c3, gen: 1 } is now active (read-write)
20104 Sep 22 23:14:27.921 INFO current number of open files limit 65536 is already the maximum
20105 Sep 22 23:14:27.921 INFO Opened existing region file "/tmp/downstairs-U79LijME/region.json"
20106 Sep 22 23:14:27.921 INFO Database read version 1
20107 Sep 22 23:14:27.921 INFO Database write version 1
20108 Sep 22 23:14:27.921 INFO [0] downstairs client at 127.0.0.1:48636 has UUID 0f3af6ad-c169-4a67-ae79-bc5859c15909
20109 Sep 22 23:14:27.921 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0f3af6ad-c169-4a67-ae79-bc5859c15909, encrypted: true, database_read_version: 1, database_write_version: 1 }
20110 Sep 22 23:14:27.921 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b WaitActive WaitActive WaitActive
20111 Sep 22 23:14:27.922 INFO [1] downstairs client at 127.0.0.1:59653 has UUID aef90a89-454d-4861-b15d-20c99780f2e8
20112 Sep 22 23:14:27.922 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: aef90a89-454d-4861-b15d-20c99780f2e8, encrypted: true, database_read_version: 1, database_write_version: 1 }
20113 Sep 22 23:14:27.922 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b WaitActive WaitActive WaitActive
20114 Sep 22 23:14:27.922 INFO [2] downstairs client at 127.0.0.1:56727 has UUID 6085db72-2a98-4898-ac66-9af2876a589f
20115 Sep 22 23:14:27.922 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6085db72-2a98-4898-ac66-9af2876a589f, encrypted: true, database_read_version: 1, database_write_version: 1 }
20116 Sep 22 23:14:27.922 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b WaitActive WaitActive WaitActive
20117 Sep 22 23:14:27.922 INFO Current flush_numbers [0..12]: [0, 0]
20118 Sep 22 23:14:27.922 INFO Downstairs has completed Negotiation, task: proc
20119 Sep 22 23:14:27.922 INFO Current flush_numbers [0..12]: [0, 0]
20120 Sep 22 23:14:27.922 INFO Downstairs has completed Negotiation, task: proc
20121 Sep 22 23:14:27.922 INFO Current flush_numbers [0..12]: [0, 0]
20122 Sep 22 23:14:27.923 INFO Downstairs has completed Negotiation, task: proc
20123 Sep 22 23:14:27.923 INFO [0] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20124 Sep 22 23:14:27.923 INFO [0] Transition from WaitActive to WaitQuorum
20125 Sep 22 23:14:27.923 WARN [0] new RM replaced this: None
20126 Sep 22 23:14:27.923 INFO [0] Starts reconcile loop
20127 Sep 22 23:14:27.923 INFO [1] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20128 Sep 22 23:14:27.923 INFO [1] Transition from WaitActive to WaitQuorum
20129 Sep 22 23:14:27.923 WARN [1] new RM replaced this: None
20130 Sep 22 23:14:27.923 INFO [1] Starts reconcile loop
20131 Sep 22 23:14:27.923 INFO [2] 0b427989-df2d-43e0-a6c9-e66d936a0f2b (b04125f1-41ca-469f-96f2-8c8ce78343c3) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20132 Sep 22 23:14:27.923 INFO [2] Transition from WaitActive to WaitQuorum
20133 Sep 22 23:14:27.923 WARN [2] new RM replaced this: None
20134 Sep 22 23:14:27.923 INFO [2] Starts reconcile loop
20135 Sep 22 23:14:27.923 INFO [0] 127.0.0.1:48636 task reports connection:true
20136 Sep 22 23:14:27.923 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b WaitQuorum WaitQuorum WaitQuorum
20137 Sep 22 23:14:27.923 INFO [0]R flush_numbers: [0, 0]
20138 Sep 22 23:14:27.923 INFO [0]R generation: [0, 0]
20139 Sep 22 23:14:27.923 INFO [0]R dirty: [false, false]
20140 Sep 22 23:14:27.923 INFO [1]R flush_numbers: [0, 0]
20141 Sep 22 23:14:27.923 INFO [1]R generation: [0, 0]
20142 Sep 22 23:14:27.923 INFO [1]R dirty: [false, false]
20143 Sep 22 23:14:27.923 INFO [2]R flush_numbers: [0, 0]
20144 Sep 22 23:14:27.923 INFO [2]R generation: [0, 0]
20145 Sep 22 23:14:27.923 INFO [2]R dirty: [false, false]
20146 Sep 22 23:14:27.923 INFO Max found gen is 1
20147 Sep 22 23:14:27.923 INFO Generation requested: 1 >= found:1
20148 Sep 22 23:14:27.923 INFO Next flush: 1
20149 Sep 22 23:14:27.923 INFO All extents match
20150 Sep 22 23:14:27.923 INFO No downstairs repair required
20151 Sep 22 23:14:27.923 INFO No initial repair work was required
20152 Sep 22 23:14:27.923 INFO Set Downstairs and Upstairs active
20153 Sep 22 23:14:27.923 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b is now active with session: b04125f1-41ca-469f-96f2-8c8ce78343c3
20154 Sep 22 23:14:27.923 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b Set Active after no repair
20155 Sep 22 23:14:27.923 INFO Notify all downstairs, region set compare is done.
20156 Sep 22 23:14:27.923 INFO Set check for repair
20157 Sep 22 23:14:27.923 INFO [1] 127.0.0.1:59653 task reports connection:true
20158 Sep 22 23:14:27.923 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b Active Active Active
20159 Sep 22 23:14:27.923 INFO Set check for repair
20160 Sep 22 23:14:27.923 INFO [2] 127.0.0.1:56727 task reports connection:true
20161 Sep 22 23:14:27.923 INFO 0b427989-df2d-43e0-a6c9-e66d936a0f2b Active Active Active
20162 Sep 22 23:14:27.923 INFO Set check for repair
20163 Sep 22 23:14:27.923 INFO [0] received reconcile message
20164 Sep 22 23:14:27.923 INFO [0] All repairs completed, exit
20165 Sep 22 23:14:27.924 INFO [0] Starts cmd_loop
20166 Sep 22 23:14:27.924 INFO [1] received reconcile message
20167 Sep 22 23:14:27.924 INFO [1] All repairs completed, exit
20168 Sep 22 23:14:27.924 INFO [1] Starts cmd_loop
20169 Sep 22 23:14:27.924 INFO [2] received reconcile message
20170 Sep 22 23:14:27.924 INFO [2] All repairs completed, exit
20171 Sep 22 23:14:27.924 INFO [2] Starts cmd_loop
20172 The guest has finished waiting for activation
20173 Sep 22 23:14:27.924 INFO UUID: 52a150fb-f4af-4b8f-9c89-001d2d0a9421
20174 Sep 22 23:14:27.924 INFO Blocks per extent:5 Total Extents: 2
20175 Sep 22 23:14:27.924 INFO UUID: c1d22958-e424-406b-a939-8b7b5f9f6177
20176 Sep 22 23:14:27.924 INFO Blocks per extent:5 Total Extents: 2
20177 Sep 22 23:14:27.924 INFO Crucible Version: Crucible Version: 0.0.1
20178 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20179 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20180 rustc: 1.70.0 stable x86_64-unknown-illumos
20181 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20182 Sep 22 23:14:27.924 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20183 Sep 22 23:14:27.924 INFO Crucible Version: Crucible Version: 0.0.1
20184 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20185 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20186 rustc: 1.70.0 stable x86_64-unknown-illumos
20187 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20188 Sep 22 23:14:27.924 INFO Using address: 127.0.0.1:61372, task: main
20189 Sep 22 23:14:27.924 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20190 Sep 22 23:14:27.924 INFO Using address: 127.0.0.1:43569, task: main
20191 Sep 22 23:14:27.924 DEBG IO Write 1000 has deps []
20192 Sep 22 23:14:27.925 DEBG up_ds_listen was notified
20193 Sep 22 23:14:27.925 DEBG up_ds_listen process 1000
20194 Sep 22 23:14:27.925 INFO Repair listens on 127.0.0.1:0, task: repair
20195 Sep 22 23:14:27.925 DEBG [A] ack job 1000:1, : downstairs
20196 Sep 22 23:14:27.925 DEBG up_ds_listen checked 1 jobs, back to waiting
20197 Sep 22 23:14:27.925 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33546, task: repair
20198 Sep 22 23:14:27.925 INFO Repair listens on 127.0.0.1:0, task: repair
20199 Sep 22 23:14:27.925 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33546, task: repair
20200 Sep 22 23:14:27.925 INFO listening, local_addr: 127.0.0.1:33546, task: repair
20201 Sep 22 23:14:27.925 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41883, task: repair
20202 Sep 22 23:14:27.925 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41883, task: repair
20203 Sep 22 23:14:27.925 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33546, task: repair
20204 Sep 22 23:14:27.925 INFO listening, local_addr: 127.0.0.1:41883, task: repair
20205 Sep 22 23:14:27.925 INFO Using repair address: 127.0.0.1:33546, task: main
20206 Sep 22 23:14:27.925 INFO No SSL acceptor configured, task: main
20207 Sep 22 23:14:27.925 INFO current number of open files limit 65536 is already the maximum
20208 Sep 22 23:14:27.925 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41883, task: repair
20209 Sep 22 23:14:27.925 INFO Created new region file "/tmp/downstairs-Cj3zW210/region.json"
20210 Sep 22 23:14:27.925 INFO Using repair address: 127.0.0.1:41883, task: main
20211 Sep 22 23:14:27.925 INFO No SSL acceptor configured, task: main
20212 Sep 22 23:14:27.926 DEBG Write :1000 deps:[] res:true
20213 Sep 22 23:14:27.926 INFO Upstairs starts
20214 Sep 22 23:14:27.926 INFO Crucible Version: BuildInfo {
20215 version: "0.0.1",
20216 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20217 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20218 git_branch: "main",
20219 rustc_semver: "1.70.0",
20220 rustc_channel: "stable",
20221 rustc_host_triple: "x86_64-unknown-illumos",
20222 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20223 cargo_triple: "x86_64-unknown-illumos",
20224 debug: true,
20225 opt_level: 0,
20226 }
20227 Sep 22 23:14:27.926 INFO Upstairs <-> Downstairs Message Version: 4
20228 Sep 22 23:14:27.926 INFO Crucible stats registered with UUID: c29f404c-9fcb-4abe-97d9-77d5448a344e
20229 Sep 22 23:14:27.926 INFO Crucible c29f404c-9fcb-4abe-97d9-77d5448a344e has session id: 224aedb5-8127-4463-8d4f-ad8a5dc531ba
20230 Sep 22 23:14:27.926 DEBG Write :1000 deps:[] res:true
20231 Sep 22 23:14:27.926 INFO listening on 127.0.0.1:0, task: main
20232 Sep 22 23:14:27.926 INFO listening on 127.0.0.1:0, task: main
20233 Sep 22 23:14:27.927 INFO listening on 127.0.0.1:0, task: main
20234 Sep 22 23:14:27.927 INFO [0] connecting to 127.0.0.1:63499, looper: 0
20235 Sep 22 23:14:27.927 INFO [1] connecting to 127.0.0.1:35263, looper: 1
20236 Sep 22 23:14:27.927 DEBG Write :1000 deps:[] res:true
20237 Sep 22 23:14:27.927 INFO [2] connecting to 127.0.0.1:61372, looper: 2
20238 Sep 22 23:14:27.927 INFO up_listen starts, task: up_listen
20239 Sep 22 23:14:27.927 INFO Wait for all three downstairs to come online
20240 Sep 22 23:14:27.927 INFO Flush timeout: 0.5
20241 Sep 22 23:14:27.927 INFO accepted connection from 127.0.0.1:36199, task: main
20242 Sep 22 23:14:27.927 DEBG IO Write 1000 has deps []
20243 Sep 22 23:14:27.927 INFO accepted connection from 127.0.0.1:46428, task: main
20244 Sep 22 23:14:27.928 INFO accepted connection from 127.0.0.1:42343, task: main
20245 Sep 22 23:14:27.928 DEBG up_ds_listen was notified
20246 Sep 22 23:14:27.928 DEBG up_ds_listen process 1000
20247 Sep 22 23:14:27.928 INFO [0] c29f404c-9fcb-4abe-97d9-77d5448a344e looper connected, looper: 0
20248 Sep 22 23:14:27.928 DEBG [A] ack job 1000:1, : downstairs
20249 Sep 22 23:14:27.928 INFO [0] Proc runs for 127.0.0.1:63499 in state New
20250 Sep 22 23:14:27.928 DEBG up_ds_listen checked 1 jobs, back to waiting
20251 Sep 22 23:14:27.928 INFO [1] c29f404c-9fcb-4abe-97d9-77d5448a344e looper connected, looper: 1
20252 Sep 22 23:14:27.928 INFO [1] Proc runs for 127.0.0.1:35263 in state New
20253 Sep 22 23:14:27.928 INFO [2] c29f404c-9fcb-4abe-97d9-77d5448a344e looper connected, looper: 2
20254 Sep 22 23:14:27.928 INFO [2] Proc runs for 127.0.0.1:61372 in state New
20255 Sep 22 23:14:27.928 INFO Connection request from c29f404c-9fcb-4abe-97d9-77d5448a344e with version 4, task: proc
20256 Sep 22 23:14:27.928 INFO upstairs UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } connected, version 4, task: proc
20257 Sep 22 23:14:27.928 INFO Connection request from c29f404c-9fcb-4abe-97d9-77d5448a344e with version 4, task: proc
20258 Sep 22 23:14:27.928 INFO upstairs UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } connected, version 4, task: proc
20259 Sep 22 23:14:27.928 INFO Connection request from c29f404c-9fcb-4abe-97d9-77d5448a344e with version 4, task: proc
20260 Sep 22 23:14:27.928 INFO upstairs UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } connected, version 4, task: proc
20261 The guest has requested activation
20262 Sep 22 23:14:27.929 INFO [0] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) New New New ds_transition to WaitActive
20263 Sep 22 23:14:27.929 INFO [0] Transition from New to WaitActive
20264 Sep 22 23:14:27.929 INFO [1] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) WaitActive New New ds_transition to WaitActive
20265 Sep 22 23:14:27.929 INFO [1] Transition from New to WaitActive
20266 Sep 22 23:14:27.929 INFO [2] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) WaitActive WaitActive New ds_transition to WaitActive
20267 Sep 22 23:14:27.929 INFO [2] Transition from New to WaitActive
20268 Sep 22 23:14:27.929 DEBG Write :1000 deps:[] res:true
20269 Sep 22 23:14:27.929 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e active request set
20270 Sep 22 23:14:27.929 INFO current number of open files limit 65536 is already the maximum
20271 Sep 22 23:14:27.929 INFO Opened existing region file "/tmp/downstairs-Cj3zW210/region.json"
20272 Sep 22 23:14:27.929 INFO [0] received activate with gen 1
20273 Sep 22 23:14:27.929 INFO Database read version 1
20274 Sep 22 23:14:27.929 INFO [0] client got ds_active_rx, promote! session d5786959-64ba-4a45-85e5-b539511f4f59
20275 Sep 22 23:14:27.929 INFO Database write version 1
20276 Sep 22 23:14:27.929 INFO [1] received activate with gen 1
20277 Sep 22 23:14:27.929 INFO [1] client got ds_active_rx, promote! session d5786959-64ba-4a45-85e5-b539511f4f59
20278 Sep 22 23:14:27.929 INFO [2] received activate with gen 1
20279 Sep 22 23:14:27.929 INFO [2] client got ds_active_rx, promote! session d5786959-64ba-4a45-85e5-b539511f4f59
20280 Sep 22 23:14:27.929 DEBG Write :1000 deps:[] res:true
20281 Sep 22 23:14:27.929 INFO UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } is now active (read-write)
20282 Sep 22 23:14:27.930 INFO UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } is now active (read-write)
20283 Sep 22 23:14:27.930 INFO UpstairsConnection { upstairs_id: c29f404c-9fcb-4abe-97d9-77d5448a344e, session_id: d5786959-64ba-4a45-85e5-b539511f4f59, gen: 1 } is now active (read-write)
20284 Sep 22 23:14:27.930 DEBG Write :1000 deps:[] res:true
20285 Sep 22 23:14:27.930 DEBG IO Read 1001 has deps [JobId(1000)]
20286 Sep 22 23:14:27.930 INFO [0] downstairs client at 127.0.0.1:63499 has UUID 9a08289f-c73f-4f3d-af8e-b9eb4fe46f56
20287 Sep 22 23:14:27.930 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9a08289f-c73f-4f3d-af8e-b9eb4fe46f56, encrypted: true, database_read_version: 1, database_write_version: 1 }
20288 Sep 22 23:14:27.930 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e WaitActive WaitActive WaitActive
20289 Sep 22 23:14:27.930 INFO [1] downstairs client at 127.0.0.1:35263 has UUID 2f70c471-1dde-474e-a4ed-18909de1eaf6
20290 Sep 22 23:14:27.930 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2f70c471-1dde-474e-a4ed-18909de1eaf6, encrypted: true, database_read_version: 1, database_write_version: 1 }
20291 Sep 22 23:14:27.930 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e WaitActive WaitActive WaitActive
20292 Sep 22 23:14:27.930 INFO [2] downstairs client at 127.0.0.1:61372 has UUID 52a150fb-f4af-4b8f-9c89-001d2d0a9421
20293 Sep 22 23:14:27.930 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 52a150fb-f4af-4b8f-9c89-001d2d0a9421, encrypted: true, database_read_version: 1, database_write_version: 1 }
20294 Sep 22 23:14:27.930 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e WaitActive WaitActive WaitActive
20295 Sep 22 23:14:27.931 INFO Current flush_numbers [0..12]: [0, 0]
20296 Sep 22 23:14:27.931 INFO Downstairs has completed Negotiation, task: proc
20297 Sep 22 23:14:27.931 DEBG Read :1001 deps:[JobId(1000)] res:true
20298 Sep 22 23:14:27.931 INFO Current flush_numbers [0..12]: [0, 0]
20299 Sep 22 23:14:27.931 DEBG Read :1001 deps:[JobId(1000)] res:true
20300 Sep 22 23:14:27.931 INFO Downstairs has completed Negotiation, task: proc
20301 Sep 22 23:14:27.931 DEBG Read :1001 deps:[JobId(1000)] res:true
20302 Sep 22 23:14:27.931 INFO UUID: 484c6303-bd26-472d-a8db-93e975b34d58
20303 Sep 22 23:14:27.931 INFO Blocks per extent:5 Total Extents: 2
20304 Sep 22 23:14:27.931 INFO Current flush_numbers [0..12]: [0, 0]
20305 Sep 22 23:14:27.932 INFO Crucible Version: Crucible Version: 0.0.1
20306 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20307 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20308 rustc: 1.70.0 stable x86_64-unknown-illumos
20309 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20310 Sep 22 23:14:27.932 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20311 Sep 22 23:14:27.932 INFO Using address: 127.0.0.1:41379, task: main
20312 Sep 22 23:14:27.932 INFO Downstairs has completed Negotiation, task: proc
20313 Sep 22 23:14:27.932 INFO [0] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20314 Sep 22 23:14:27.932 INFO [0] Transition from WaitActive to WaitQuorum
20315 Sep 22 23:14:27.932 WARN [0] new RM replaced this: None
20316 Sep 22 23:14:27.932 INFO Repair listens on 127.0.0.1:0, task: repair
20317 Sep 22 23:14:27.932 INFO [0] Starts reconcile loop
20318 Sep 22 23:14:27.932 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52744, task: repair
20319 Sep 22 23:14:27.932 INFO [1] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20320 Sep 22 23:14:27.932 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52744, task: repair
20321 Sep 22 23:14:27.932 INFO [1] Transition from WaitActive to WaitQuorum
20322 Sep 22 23:14:27.932 WARN [1] new RM replaced this: None
20323 Sep 22 23:14:27.932 INFO listening, local_addr: 127.0.0.1:52744, task: repair
20324 Sep 22 23:14:27.932 INFO [1] Starts reconcile loop
20325 Sep 22 23:14:27.932 DEBG [0] Read AckReady 1001, : downstairs
20326 Sep 22 23:14:27.932 INFO [2] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20327 Sep 22 23:14:27.932 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52744, task: repair
20328 Sep 22 23:14:27.932 INFO [2] Transition from WaitActive to WaitQuorum
20329 Sep 22 23:14:27.932 WARN [2] new RM replaced this: None
20330 Sep 22 23:14:27.932 INFO Using repair address: 127.0.0.1:52744, task: main
20331 Sep 22 23:14:27.932 INFO [2] Starts reconcile loop
20332 Sep 22 23:14:27.932 INFO No SSL acceptor configured, task: main
20333 Sep 22 23:14:27.932 INFO [0] 127.0.0.1:63499 task reports connection:true
20334 Sep 22 23:14:27.932 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e WaitQuorum WaitQuorum WaitQuorum
20335 Sep 22 23:14:27.932 INFO [0]R flush_numbers: [0, 0]
20336 Sep 22 23:14:27.932 INFO [0]R generation: [0, 0]
20337 Sep 22 23:14:27.932 INFO [0]R dirty: [false, false]
20338 Sep 22 23:14:27.932 INFO [1]R flush_numbers: [0, 0]
20339 Sep 22 23:14:27.932 INFO [1]R generation: [0, 0]
20340 Sep 22 23:14:27.932 INFO [1]R dirty: [false, false]
20341 Sep 22 23:14:27.932 INFO [2]R flush_numbers: [0, 0]
20342 Sep 22 23:14:27.932 INFO [2]R generation: [0, 0]
20343 Sep 22 23:14:27.932 DEBG [1] Read already AckReady 1001, : downstairs
20344 Sep 22 23:14:27.932 INFO [2]R dirty: [false, false]
20345 Sep 22 23:14:27.933 INFO Max found gen is 1
20346 Sep 22 23:14:27.933 INFO Generation requested: 1 >= found:1
20347 Sep 22 23:14:27.933 INFO Next flush: 1
20348 Sep 22 23:14:27.933 INFO Upstairs starts
20349 Sep 22 23:14:27.933 INFO All extents match
20350 Sep 22 23:14:27.933 INFO Crucible Version: BuildInfo {
20351 version: "0.0.1",
20352 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20353 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20354 git_branch: "main",
20355 rustc_semver: "1.70.0",
20356 rustc_channel: "stable",
20357 rustc_host_triple: "x86_64-unknown-illumos",
20358 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20359 cargo_triple: "x86_64-unknown-illumos",
20360 debug: true,
20361 opt_level: 0,
20362 }
20363 Sep 22 23:14:27.933 INFO No downstairs repair required
20364 Sep 22 23:14:27.933 INFO Upstairs <-> Downstairs Message Version: 4
20365 Sep 22 23:14:27.933 INFO No initial repair work was required
20366 Sep 22 23:14:27.933 INFO Set Downstairs and Upstairs active
20367 Sep 22 23:14:27.933 INFO Crucible stats registered with UUID: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab
20368 Sep 22 23:14:27.933 INFO Crucible f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab has session id: 86fb1b98-1b29-4954-8159-960709b7882f
20369 Sep 22 23:14:27.933 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e is now active with session: d5786959-64ba-4a45-85e5-b539511f4f59
20370 Sep 22 23:14:27.933 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e Set Active after no repair
20371 Sep 22 23:14:27.933 INFO Notify all downstairs, region set compare is done.
20372 Sep 22 23:14:27.933 INFO Set check for repair
20373 Sep 22 23:14:27.933 INFO [1] 127.0.0.1:35263 task reports connection:true
20374 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20375 Sep 22 23:14:27.933 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e Active Active Active
20376 Sep 22 23:14:27.933 INFO Set check for repair
20377 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20378 Sep 22 23:14:27.933 DEBG [2] Read already AckReady 1001, : downstairs
20379 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20380 Sep 22 23:14:27.933 INFO [2] 127.0.0.1:61372 task reports connection:true
20381 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20382 Sep 22 23:14:27.933 DEBG up_ds_listen was notified
20383 Sep 22 23:14:27.933 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e Active Active Active
20384 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20385 Sep 22 23:14:27.933 INFO Set check for repair
20386 Sep 22 23:14:27.933 DEBG up_ds_listen process 1001
20387 Sep 22 23:14:27.933 INFO listening on 127.0.0.1:0, task: main
20388 Sep 22 23:14:27.933 DEBG [A] ack job 1001:2, : downstairs
20389 Sep 22 23:14:27.933 INFO [0] connecting to 127.0.0.1:62077, looper: 0
20390 Sep 22 23:14:27.933 INFO [0] received reconcile message
20391 Sep 22 23:14:27.933 INFO [0] All repairs completed, exit
20392 Sep 22 23:14:27.933 INFO [0] Starts cmd_loop
20393 Sep 22 23:14:27.933 INFO [1] connecting to 127.0.0.1:55380, looper: 1
20394 Sep 22 23:14:27.933 INFO [1] received reconcile message
20395 Sep 22 23:14:27.933 INFO [2] connecting to 127.0.0.1:41467, looper: 2
20396 Sep 22 23:14:27.933 DEBG up_ds_listen checked 1 jobs, back to waiting
20397 Sep 22 23:14:27.933 INFO [1] All repairs completed, exit
20398 Sep 22 23:14:27.933 INFO up_listen starts, task: up_listen
20399 Sep 22 23:14:27.933 INFO Wait for all three downstairs to come online
20400 Sep 22 23:14:27.933 INFO Flush timeout: 0.5
20401 Sep 22 23:14:27.933 INFO [1] Starts cmd_loop
20402 Sep 22 23:14:27.933 INFO [2] received reconcile message
20403 Sep 22 23:14:27.933 INFO [2] All repairs completed, exit
20404 Sep 22 23:14:27.933 INFO [2] Starts cmd_loop
20405 Sep 22 23:14:27.933 INFO accepted connection from 127.0.0.1:33010, task: main
20406 The guest has finished waiting for activation
20407 Sep 22 23:14:27.934 INFO accepted connection from 127.0.0.1:48696, task: main
20408 Sep 22 23:14:27.934 INFO accepted connection from 127.0.0.1:40107, task: main
20409 Sep 22 23:14:27.934 DEBG IO Read 1001 has deps [JobId(1000)]
20410 Sep 22 23:14:27.934 INFO [0] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab looper connected, looper: 0
20411 Sep 22 23:14:27.934 INFO [0] Proc runs for 127.0.0.1:62077 in state New
20412 Sep 22 23:14:27.934 INFO current number of open files limit 65536 is already the maximum
20413 Sep 22 23:14:27.934 INFO [1] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab looper connected, looper: 1
20414 Sep 22 23:14:27.934 INFO [1] Proc runs for 127.0.0.1:55380 in state New
20415 Sep 22 23:14:27.934 INFO Created new region file "/tmp/downstairs-aQ0wdqbq/region.json"
20416 Sep 22 23:14:27.934 INFO [2] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab looper connected, looper: 2
20417 Sep 22 23:14:27.934 INFO [2] Proc runs for 127.0.0.1:41467 in state New
20418 Sep 22 23:14:27.934 INFO Upstairs starts
20419 Sep 22 23:14:27.934 INFO Crucible Version: BuildInfo {
20420 version: "0.0.1",
20421 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20422 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20423 git_branch: "main",
20424 rustc_semver: "1.70.0",
20425 rustc_channel: "stable",
20426 rustc_host_triple: "x86_64-unknown-illumos",
20427 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20428 cargo_triple: "x86_64-unknown-illumos",
20429 debug: true,
20430 opt_level: 0,
20431 }
20432 Sep 22 23:14:27.934 INFO Upstairs <-> Downstairs Message Version: 4
20433 Sep 22 23:14:27.934 INFO Crucible stats registered with UUID: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63
20434 Sep 22 23:14:27.934 INFO Crucible e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 has session id: af64e58b-86df-4d1e-a59b-7e30583b77a9
20435 Sep 22 23:14:27.934 DEBG Read :1001 deps:[JobId(1000)] res:true
20436 Sep 22 23:14:27.934 INFO Connection request from f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab with version 4, task: proc
20437 Sep 22 23:14:27.934 INFO upstairs UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } connected, version 4, task: proc
20438 Sep 22 23:14:27.934 INFO Connection request from f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab with version 4, task: proc
20439 Sep 22 23:14:27.935 INFO upstairs UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } connected, version 4, task: proc
20440 Sep 22 23:14:27.935 INFO Connection request from f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab with version 4, task: proc
20441 Sep 22 23:14:27.935 INFO upstairs UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } connected, version 4, task: proc
20442 Sep 22 23:14:27.935 INFO [0] connecting to 127.0.0.1:35764, looper: 0
20443 Sep 22 23:14:27.935 DEBG Read :1001 deps:[JobId(1000)] res:true
20444 Sep 22 23:14:27.935 INFO [1] connecting to 127.0.0.1:43569, looper: 1
20445 Sep 22 23:14:27.935 INFO [2] connecting to 127.0.0.1:41379, looper: 2
20446 Sep 22 23:14:27.935 DEBG Read :1001 deps:[JobId(1000)] res:true
20447 Sep 22 23:14:27.935 INFO up_listen starts, task: up_listen
20448 Sep 22 23:14:27.935 INFO Wait for all three downstairs to come online
20449 Sep 22 23:14:27.935 INFO Flush timeout: 0.5
20450 Sep 22 23:14:27.935 INFO accepted connection from 127.0.0.1:59309, task: main
20451 Sep 22 23:14:27.935 INFO accepted connection from 127.0.0.1:47302, task: main
20452 Sep 22 23:14:27.935 INFO accepted connection from 127.0.0.1:42539, task: main
20453 Sep 22 23:14:27.936 INFO [0] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) New New New ds_transition to WaitActive
20454 Sep 22 23:14:27.936 INFO [0] Transition from New to WaitActive
20455 Sep 22 23:14:27.936 INFO [1] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) WaitActive New New ds_transition to WaitActive
20456 Sep 22 23:14:27.936 DEBG [0] Read AckReady 1001, : downstairs
20457 Sep 22 23:14:27.936 INFO [1] Transition from New to WaitActive
20458 Sep 22 23:14:27.936 INFO [2] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) WaitActive WaitActive New ds_transition to WaitActive
20459 Sep 22 23:14:27.936 INFO [2] Transition from New to WaitActive
20460 Sep 22 23:14:27.936 INFO [0] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 looper connected, looper: 0
20461 Sep 22 23:14:27.936 INFO [0] Proc runs for 127.0.0.1:35764 in state New
20462 Sep 22 23:14:27.936 INFO [1] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 looper connected, looper: 1
20463 Sep 22 23:14:27.936 INFO [1] Proc runs for 127.0.0.1:43569 in state New
20464 Sep 22 23:14:27.936 DEBG [1] Read already AckReady 1001, : downstairs
20465 Sep 22 23:14:27.936 INFO [2] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 looper connected, looper: 2
20466 Sep 22 23:14:27.936 INFO [2] Proc runs for 127.0.0.1:41379 in state New
20467 Sep 22 23:14:27.936 DEBG [2] Read already AckReady 1001, : downstairs
20468 Sep 22 23:14:27.936 DEBG up_ds_listen was notified
20469 Sep 22 23:14:27.936 DEBG up_ds_listen process 1001
20470 Sep 22 23:14:27.937 DEBG [A] ack job 1001:2, : downstairs
20471 Sep 22 23:14:27.937 DEBG up_ds_listen checked 1 jobs, back to waiting
20472 Sep 22 23:14:27.937 INFO Connection request from e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 with version 4, task: proc
20473 Sep 22 23:14:27.937 INFO upstairs UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } connected, version 4, task: proc
20474 Sep 22 23:14:27.937 INFO Connection request from e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 with version 4, task: proc
20475 Sep 22 23:14:27.937 INFO upstairs UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } connected, version 4, task: proc
20476 Sep 22 23:14:27.937 INFO Connection request from e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 with version 4, task: proc
20477 Sep 22 23:14:27.937 INFO upstairs UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } connected, version 4, task: proc
20478 Sep 22 23:14:27.937 INFO current number of open files limit 65536 is already the maximum
20479 Sep 22 23:14:27.937 INFO Opened existing region file "/tmp/downstairs-aQ0wdqbq/region.json"
20480 Sep 22 23:14:27.937 INFO Database read version 1
20481 Sep 22 23:14:27.937 INFO Database write version 1
20482 Sep 22 23:14:27.937 INFO [0] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) New New New ds_transition to WaitActive
20483 Sep 22 23:14:27.937 INFO [0] Transition from New to WaitActive
20484 Sep 22 23:14:27.937 INFO [1] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) WaitActive New New ds_transition to WaitActive
20485 Sep 22 23:14:27.937 INFO [1] Transition from New to WaitActive
20486 Sep 22 23:14:27.937 INFO [2] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) WaitActive WaitActive New ds_transition to WaitActive
20487 Sep 22 23:14:27.937 INFO [2] Transition from New to WaitActive
20488 Sep 22 23:14:27.937 DEBG IO Write 1002 has deps []
20489 The guest has requested activation
20490 Sep 22 23:14:27.938 DEBG up_ds_listen was notified
20491 Sep 22 23:14:27.938 DEBG up_ds_listen process 1002
20492 Sep 22 23:14:27.938 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab active request set
20493 Sep 22 23:14:27.938 DEBG [A] ack job 1002:3, : downstairs
20494 Sep 22 23:14:27.938 INFO [0] received activate with gen 1
20495 Sep 22 23:14:27.938 DEBG up_ds_listen checked 1 jobs, back to waiting
20496 Sep 22 23:14:27.938 INFO [0] client got ds_active_rx, promote! session 3b6d6787-7c4f-4800-a80e-7e17426e2512
20497 Sep 22 23:14:27.938 INFO [1] received activate with gen 1
20498 Sep 22 23:14:27.938 INFO [1] client got ds_active_rx, promote! session 3b6d6787-7c4f-4800-a80e-7e17426e2512
20499 Sep 22 23:14:27.938 INFO [2] received activate with gen 1
20500 Sep 22 23:14:27.938 INFO [2] client got ds_active_rx, promote! session 3b6d6787-7c4f-4800-a80e-7e17426e2512
20501 Sep 22 23:14:27.938 INFO UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } is now active (read-write)
20502 Sep 22 23:14:27.938 INFO Scrub check for bcb46126-f008-485e-8ce7-38ca9e1e6dc2
20503 Sep 22 23:14:27.938 INFO Scrub for bcb46126-f008-485e-8ce7-38ca9e1e6dc2 begins
20504 Sep 22 23:14:27.938 INFO UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } is now active (read-write)
20505 Sep 22 23:14:27.938 INFO Scrub with total_size:5120 block_size:512
20506 Sep 22 23:14:27.938 INFO Scrubs from block 0 to 10 in (256) 131072 size IOs pm:0
20507 Sep 22 23:14:27.938 INFO Adjust block_count to 10 at offset 0
20508 Sep 22 23:14:27.938 INFO UpstairsConnection { upstairs_id: f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab, session_id: 3b6d6787-7c4f-4800-a80e-7e17426e2512, gen: 1 } is now active (read-write)
20509 Sep 22 23:14:27.939 INFO [0] downstairs client at 127.0.0.1:62077 has UUID da4b10d8-d31d-4fd9-811a-957d3b103709
20510 Sep 22 23:14:27.939 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: da4b10d8-d31d-4fd9-811a-957d3b103709, encrypted: true, database_read_version: 1, database_write_version: 1 }
20511 Sep 22 23:14:27.939 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab WaitActive WaitActive WaitActive
20512 Sep 22 23:14:27.939 INFO [1] downstairs client at 127.0.0.1:55380 has UUID a11fec66-3ce4-4c3c-bad2-f8da3b3c9b3e
20513 Sep 22 23:14:27.939 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a11fec66-3ce4-4c3c-bad2-f8da3b3c9b3e, encrypted: true, database_read_version: 1, database_write_version: 1 }
20514 Sep 22 23:14:27.939 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab WaitActive WaitActive WaitActive
20515 Sep 22 23:14:27.939 INFO [2] downstairs client at 127.0.0.1:41467 has UUID 7d1add5d-5a13-414b-bc02-b8d357908557
20516 Sep 22 23:14:27.939 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7d1add5d-5a13-414b-bc02-b8d357908557, encrypted: true, database_read_version: 1, database_write_version: 1 }
20517 Sep 22 23:14:27.939 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab WaitActive WaitActive WaitActive
20518 Sep 22 23:14:27.939 INFO Current flush_numbers [0..12]: [0, 0]
20519 Sep 22 23:14:27.939 INFO UUID: a9656490-6146-4f17-b472-6f478edd5fc4
20520 Sep 22 23:14:27.939 INFO Blocks per extent:5 Total Extents: 2
20521 Sep 22 23:14:27.939 INFO Crucible Version: Crucible Version: 0.0.1
20522 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20523 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20524 rustc: 1.70.0 stable x86_64-unknown-illumos
20525 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20526 Sep 22 23:14:27.939 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20527 Sep 22 23:14:27.939 INFO Using address: 127.0.0.1:58389, task: main
20528 Sep 22 23:14:27.939 INFO Downstairs has completed Negotiation, task: proc
20529 Sep 22 23:14:27.940 INFO Repair listens on 127.0.0.1:0, task: repair
20530 Sep 22 23:14:27.940 INFO Current flush_numbers [0..12]: [0, 0]
20531 Sep 22 23:14:27.940 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57311, task: repair
20532 Sep 22 23:14:27.940 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57311, task: repair
20533 Sep 22 23:14:27.940 INFO Downstairs has completed Negotiation, task: proc
20534 Sep 22 23:14:27.940 DEBG Write :1002 deps:[] res:true
20535 Sep 22 23:14:27.940 INFO listening, local_addr: 127.0.0.1:57311, task: repair
20536 Sep 22 23:14:27.940 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57311, task: repair
20537 Sep 22 23:14:27.940 INFO Using repair address: 127.0.0.1:57311, task: main
20538 Sep 22 23:14:27.940 INFO No SSL acceptor configured, task: main
20539 Sep 22 23:14:27.940 INFO Current flush_numbers [0..12]: [0, 0]
20540 Sep 22 23:14:27.940 DEBG Write :1002 deps:[] res:true
20541 Sep 22 23:14:27.941 INFO Downstairs has completed Negotiation, task: proc
20542 Sep 22 23:14:27.941 DEBG Write :1002 deps:[] res:true
20543 Sep 22 23:14:27.941 INFO listening on 127.0.0.1:0, task: main
20544 Sep 22 23:14:27.941 INFO [0] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20545 Sep 22 23:14:27.941 INFO [0] Transition from WaitActive to WaitQuorum
20546 Sep 22 23:14:27.941 WARN c29f404c-9fcb-4abe-97d9-77d5448a344e request to replace downstairs 127.0.0.1:63499 with 127.0.0.1:58389
20547 Sep 22 23:14:27.941 WARN [0] new RM replaced this: None
20548 Sep 22 23:14:27.941 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e found old target: 127.0.0.1:63499 at 0
20549 Sep 22 23:14:27.941 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e replacing old: 127.0.0.1:63499 at 0
20550 Sep 22 23:14:27.941 INFO [0] Starts reconcile loop
20551 Sep 22 23:14:27.941 INFO [0] client skip 0 in process jobs because fault, : downstairs
20552 Sep 22 23:14:27.941 INFO [0] changed 0 jobs to fault skipped, : downstairs
20553 Sep 22 23:14:27.941 INFO [0] c29f404c-9fcb-4abe-97d9-77d5448a344e (d5786959-64ba-4a45-85e5-b539511f4f59) Active Active Active ds_transition to Replacing
20554 Sep 22 23:14:27.941 INFO [0] Transition from Active to Replacing
20555 Sep 22 23:14:27.941 INFO [1] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20556 Sep 22 23:14:27.941 INFO [1] Transition from WaitActive to WaitQuorum
20557 Sep 22 23:14:27.941 WARN [1] new RM replaced this: None
20558 Sep 22 23:14:27.941 WARN c29f404c-9fcb-4abe-97d9-77d5448a344e request to replace downstairs 127.0.0.1:63499 with 127.0.0.1:58389
20559 Sep 22 23:14:27.941 INFO [1] Starts reconcile loop
20560 Sep 22 23:14:27.941 INFO c29f404c-9fcb-4abe-97d9-77d5448a344e found new target: 127.0.0.1:58389 at 0
20561 Sep 22 23:14:27.941 INFO [2] f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab (3b6d6787-7c4f-4800-a80e-7e17426e2512) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20562 Sep 22 23:14:27.941 INFO [2] Transition from WaitActive to WaitQuorum
20563 Sep 22 23:14:27.941 WARN [2] new RM replaced this: None
20564 Sep 22 23:14:27.941 INFO [2] Starts reconcile loop
20565 Sep 22 23:14:27.941 INFO [0] 127.0.0.1:62077 task reports connection:true
20566 Sep 22 23:14:27.941 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab WaitQuorum WaitQuorum WaitQuorum
20567 Sep 22 23:14:27.941 INFO [0]R flush_numbers: [0, 0]
20568 Sep 22 23:14:27.941 INFO [0]R generation: [0, 0]
20569 Sep 22 23:14:27.941 INFO [0]R dirty: [false, false]
20570 Sep 22 23:14:27.941 INFO [1]R flush_numbers: [0, 0]
20571 Sep 22 23:14:27.941 INFO [1]R generation: [0, 0]
20572 Sep 22 23:14:27.941 INFO [1]R dirty: [false, false]
20573 Sep 22 23:14:27.941 INFO [2]R flush_numbers: [0, 0]
20574 Sep 22 23:14:27.941 INFO [2]R generation: [0, 0]
20575 Sep 22 23:14:27.941 INFO [2]R dirty: [false, false]
20576 Sep 22 23:14:27.941 INFO Max found gen is 1
20577 Sep 22 23:14:27.941 INFO Generation requested: 1 >= found:1
20578 Sep 22 23:14:27.941 INFO Next flush: 1
20579 Sep 22 23:14:27.942 INFO All extents match
20580 Sep 22 23:14:27.942 INFO No downstairs repair required
20581 Sep 22 23:14:27.942 INFO No initial repair work was required
20582 Sep 22 23:14:27.942 INFO Set Downstairs and Upstairs active
20583 Sep 22 23:14:27.942 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab is now active with session: 3b6d6787-7c4f-4800-a80e-7e17426e2512
20584 Sep 22 23:14:27.942 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab Set Active after no repair
20585 Sep 22 23:14:27.942 INFO Notify all downstairs, region set compare is done.
20586 Sep 22 23:14:27.942 INFO Set check for repair
20587 Sep 22 23:14:27.942 INFO [1] 127.0.0.1:55380 task reports connection:true
20588 Sep 22 23:14:27.942 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab Active Active Active
20589 Sep 22 23:14:27.942 INFO Set check for repair
20590 Sep 22 23:14:27.942 INFO [2] 127.0.0.1:41467 task reports connection:true
20591 Sep 22 23:14:27.942 INFO f94aa62c-b9ca-4a22-b94a-357bd8d6b5ab Active Active Active
20592 Sep 22 23:14:27.942 INFO Set check for repair
20593 Sep 22 23:14:27.942 INFO [0] received reconcile message
20594 Sep 22 23:14:27.942 INFO [0] All repairs completed, exit
20595 Sep 22 23:14:27.942 INFO [0] Starts cmd_loop
20596 Sep 22 23:14:27.942 INFO [1] received reconcile message
20597 Sep 22 23:14:27.942 INFO [1] All repairs completed, exit
20598 Sep 22 23:14:27.942 INFO [1] Starts cmd_loop
20599 Sep 22 23:14:27.942 INFO [2] received reconcile message
20600 Sep 22 23:14:27.942 INFO [2] All repairs completed, exit
20601 Sep 22 23:14:27.942 INFO [2] Starts cmd_loop
20602 The guest has finished waiting for activation
20603 Sep 22 23:14:27.943 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
20604 The guest has requested activation
20605 Sep 22 23:14:27.943 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 active request set
20606 Sep 22 23:14:27.943 INFO [0] received activate with gen 1
20607 Sep 22 23:14:27.943 INFO [0] client got ds_active_rx, promote! session 2024cbe9-f560-42ab-89c7-6b05d6eff703
20608 Sep 22 23:14:27.943 INFO [1] received activate with gen 1
20609 Sep 22 23:14:27.943 INFO [1] client got ds_active_rx, promote! session 2024cbe9-f560-42ab-89c7-6b05d6eff703
20610 Sep 22 23:14:27.943 INFO [2] received activate with gen 1
20611 Sep 22 23:14:27.943 INFO [2] client got ds_active_rx, promote! session 2024cbe9-f560-42ab-89c7-6b05d6eff703
20612 Sep 22 23:14:27.943 INFO UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } is now active (read-write)
20613 Sep 22 23:14:27.943 INFO UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } is now active (read-write)
20614 Sep 22 23:14:27.943 INFO UpstairsConnection { upstairs_id: e61fa5a6-7bbf-4a27-8d67-7620a71d6c63, session_id: 2024cbe9-f560-42ab-89c7-6b05d6eff703, gen: 1 } is now active (read-write)
20615 Sep 22 23:14:27.944 INFO [0] downstairs client at 127.0.0.1:35764 has UUID 27e21158-4dda-4f80-89f0-fd433654b40d
20616 Sep 22 23:14:27.944 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 27e21158-4dda-4f80-89f0-fd433654b40d, encrypted: true, database_read_version: 1, database_write_version: 1 }
20617 Sep 22 23:14:27.944 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 WaitActive WaitActive WaitActive
20618 Sep 22 23:14:27.944 INFO [1] downstairs client at 127.0.0.1:43569 has UUID c1d22958-e424-406b-a939-8b7b5f9f6177
20619 Sep 22 23:14:27.944 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c1d22958-e424-406b-a939-8b7b5f9f6177, encrypted: true, database_read_version: 1, database_write_version: 1 }
20620 Sep 22 23:14:27.944 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 WaitActive WaitActive WaitActive
20621 Sep 22 23:14:27.944 INFO [2] downstairs client at 127.0.0.1:41379 has UUID 484c6303-bd26-472d-a8db-93e975b34d58
20622 Sep 22 23:14:27.944 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 484c6303-bd26-472d-a8db-93e975b34d58, encrypted: true, database_read_version: 1, database_write_version: 1 }
20623 Sep 22 23:14:27.944 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 WaitActive WaitActive WaitActive
20624 Sep 22 23:14:27.944 INFO Current flush_numbers [0..12]: [0, 0]
20625 Sep 22 23:14:27.944 INFO Downstairs has completed Negotiation, task: proc
20626 Sep 22 23:14:27.945 INFO Current flush_numbers [0..12]: [0, 0]
20627 test test::integration_test_volume_twice_replace_downstairs ... ok
20628 Sep 22 23:14:27.945 INFO Downstairs has completed Negotiation, task: proc
20629 Sep 22 23:14:27.945 INFO current number of open files limit 65536 is already the maximum
20630 Sep 22 23:14:27.945 INFO Current flush_numbers [0..12]: [0, 0]
20631 Sep 22 23:14:27.945 INFO Created new region file "/tmp/downstairs-wIUBV7jK/region.json"
20632 Sep 22 23:14:27.945 INFO Downstairs has completed Negotiation, task: proc
20633 Sep 22 23:14:27.945 INFO [0] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20634 Sep 22 23:14:27.945 INFO [0] Transition from WaitActive to WaitQuorum
20635 Sep 22 23:14:27.945 WARN [0] new RM replaced this: None
20636 Sep 22 23:14:27.945 INFO [0] Starts reconcile loop
20637 Sep 22 23:14:27.945 INFO [1] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
20638 Sep 22 23:14:27.945 INFO [1] Transition from WaitActive to WaitQuorum
20639 Sep 22 23:14:27.945 WARN [1] new RM replaced this: None
20640 Sep 22 23:14:27.945 INFO [1] Starts reconcile loop
20641 Sep 22 23:14:27.945 INFO [2] e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 (2024cbe9-f560-42ab-89c7-6b05d6eff703) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20642 Sep 22 23:14:27.945 INFO [2] Transition from WaitActive to WaitQuorum
20643 Sep 22 23:14:27.945 WARN [2] new RM replaced this: None
20644 Sep 22 23:14:27.945 INFO [2] Starts reconcile loop
20645 Sep 22 23:14:27.945 INFO [0] 127.0.0.1:35764 task reports connection:true
20646 Sep 22 23:14:27.945 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 WaitQuorum WaitQuorum WaitQuorum
20647 Sep 22 23:14:27.946 INFO [0]R flush_numbers: [0, 0]
20648 Sep 22 23:14:27.946 INFO [0]R generation: [0, 0]
20649 Sep 22 23:14:27.946 INFO [0]R dirty: [false, false]
20650 Sep 22 23:14:27.946 INFO [1]R flush_numbers: [0, 0]
20651 Sep 22 23:14:27.946 INFO [1]R generation: [0, 0]
20652 Sep 22 23:14:27.946 INFO [1]R dirty: [false, false]
20653 Sep 22 23:14:27.946 INFO [2]R flush_numbers: [0, 0]
20654 Sep 22 23:14:27.946 INFO [2]R generation: [0, 0]
20655 Sep 22 23:14:27.946 INFO [2]R dirty: [false, false]
20656 Sep 22 23:14:27.946 INFO Max found gen is 1
20657 Sep 22 23:14:27.946 INFO Generation requested: 1 >= found:1
20658 Sep 22 23:14:27.946 INFO Next flush: 1
20659 Sep 22 23:14:27.946 INFO All extents match
20660 Sep 22 23:14:27.946 INFO No downstairs repair required
20661 Sep 22 23:14:27.946 INFO No initial repair work was required
20662 Sep 22 23:14:27.946 INFO Set Downstairs and Upstairs active
20663 Sep 22 23:14:27.946 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 is now active with session: 2024cbe9-f560-42ab-89c7-6b05d6eff703
20664 Sep 22 23:14:27.946 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 Set Active after no repair
20665 Sep 22 23:14:27.946 INFO Notify all downstairs, region set compare is done.
20666 Sep 22 23:14:27.946 INFO Set check for repair
20667 Sep 22 23:14:27.946 INFO [1] 127.0.0.1:43569 task reports connection:true
20668 Sep 22 23:14:27.946 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 Active Active Active
20669 Sep 22 23:14:27.946 INFO Set check for repair
20670 Sep 22 23:14:27.946 INFO [2] 127.0.0.1:41379 task reports connection:true
20671 Sep 22 23:14:27.946 INFO e61fa5a6-7bbf-4a27-8d67-7620a71d6c63 Active Active Active
20672 Sep 22 23:14:27.946 INFO Set check for repair
20673 Sep 22 23:14:27.946 INFO [0] received reconcile message
20674 Sep 22 23:14:27.946 INFO [0] All repairs completed, exit
20675 Sep 22 23:14:27.946 INFO [0] Starts cmd_loop
20676 Sep 22 23:14:27.946 INFO [1] received reconcile message
20677 Sep 22 23:14:27.946 INFO [1] All repairs completed, exit
20678 Sep 22 23:14:27.946 INFO [1] Starts cmd_loop
20679 Sep 22 23:14:27.946 INFO [2] received reconcile message
20680 Sep 22 23:14:27.946 INFO [2] All repairs completed, exit
20681 Sep 22 23:14:27.946 INFO [2] Starts cmd_loop
20682 The guest has finished waiting for activation
20683 Sep 22 23:14:27.946 DEBG up_ds_listen was notified
20684 Sep 22 23:14:27.946 DEBG up_ds_listen process 1003
20685 Sep 22 23:14:27.946 DEBG [A] ack job 1003:4, : downstairs
20686 Sep 22 23:14:27.946 DEBG up_ds_listen checked 1 jobs, back to waiting
20687 Sep 22 23:14:27.946 INFO Scrub at offset 10/10 sp:10
20688 Sep 22 23:14:27.947 DEBG IO Write 1000 has deps []
20689 Sep 22 23:14:27.947 DEBG up_ds_listen was notified
20690 Sep 22 23:14:27.947 DEBG up_ds_listen process 1000
20691 Sep 22 23:14:27.947 DEBG [A] ack job 1000:1, : downstairs
20692 Sep 22 23:14:27.947 DEBG up_ds_listen checked 1 jobs, back to waiting
20693 Sep 22 23:14:27.948 INFO Scrub bcb46126-f008-485e-8ce7-38ca9e1e6dc2 done in 0 seconds. Retries:0 scrub_size:5120 size:10 pause_milli:0
20694 Sep 22 23:14:27.948 DEBG Write :1000 deps:[] res:true
20695 Sep 22 23:14:27.948 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
20696 Sep 22 23:14:27.948 INFO current number of open files limit 65536 is already the maximum
20697 Sep 22 23:14:27.948 INFO Opened existing region file "/tmp/downstairs-wIUBV7jK/region.json"
20698 Sep 22 23:14:27.948 INFO Database read version 1
20699 Sep 22 23:14:27.948 INFO Database write version 1
20700 Sep 22 23:14:27.948 DEBG Write :1000 deps:[] res:true
20701 Sep 22 23:14:27.949 DEBG Write :1000 deps:[] res:true
20702 Sep 22 23:14:27.949 DEBG IO Write 1000 has deps []
20703 Sep 22 23:14:27.949 DEBG up_ds_listen was notified
20704 Sep 22 23:14:27.949 DEBG up_ds_listen process 1000
20705 Sep 22 23:14:27.949 DEBG [A] ack job 1000:1, : downstairs
20706 Sep 22 23:14:27.949 DEBG up_ds_listen checked 1 jobs, back to waiting
20707 Sep 22 23:14:27.950 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20708 Sep 22 23:14:27.950 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20709 Sep 22 23:14:27.950 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
20710 Sep 22 23:14:27.950 INFO UUID: 3b4bb3c0-2112-401e-871c-3ba820d1a25e
20711 Sep 22 23:14:27.950 INFO Blocks per extent:5 Total Extents: 2
20712 Sep 22 23:14:27.950 INFO Crucible Version: Crucible Version: 0.0.1
20713 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20714 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20715 rustc: 1.70.0 stable x86_64-unknown-illumos
20716 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20717 Sep 22 23:14:27.950 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20718 Sep 22 23:14:27.950 INFO Using address: 127.0.0.1:56875, task: main
20719 Sep 22 23:14:27.950 DEBG up_ds_listen was notified
20720 Sep 22 23:14:27.950 DEBG Write :1000 deps:[] res:true
20721 Sep 22 23:14:27.950 DEBG up_ds_listen process 1004
20722 Sep 22 23:14:27.950 DEBG [A] ack job 1004:5, : downstairs
20723 Sep 22 23:14:27.950 INFO Repair listens on 127.0.0.1:0, task: repair
20724 Sep 22 23:14:27.950 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
20725 Sep 22 23:14:27.950 DEBG up_ds_listen checked 1 jobs, back to waiting
20726 Sep 22 23:14:27.950 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46993, task: repair
20727 Sep 22 23:14:27.950 DEBG Write :1000 deps:[] res:true
20728 Sep 22 23:14:27.950 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46993, task: repair
20729 Sep 22 23:14:27.950 DEBG IO Flush 1002 has deps [JobId(1001), JobId(1000)]
20730 Sep 22 23:14:27.950 INFO listening, local_addr: 127.0.0.1:46993, task: repair
20731 Sep 22 23:14:27.951 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46993, task: repair
20732 Sep 22 23:14:27.951 INFO Using repair address: 127.0.0.1:46993, task: main
20733 Sep 22 23:14:27.951 INFO No SSL acceptor configured, task: main
20734 Sep 22 23:14:27.951 INFO current number of open files limit 65536 is already the maximum
20735 Sep 22 23:14:27.951 INFO Created new region file "/tmp/downstairs-Qg6sZndH/region.json"
20736 Sep 22 23:14:27.951 DEBG Write :1000 deps:[] res:true
20737 Sep 22 23:14:27.951 DEBG IO Read 1001 has deps [JobId(1000)]
20738 Sep 22 23:14:27.951 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20739 Sep 22 23:14:27.952 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20740 Sep 22 23:14:27.952 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
20741 Sep 22 23:14:27.952 DEBG up_ds_listen was notified
20742 Sep 22 23:14:27.952 DEBG up_ds_listen process 1002
20743 Sep 22 23:14:27.952 DEBG [A] ack job 1002:3, : downstairs
20744 Sep 22 23:14:27.952 DEBG [rc] retire 1002 clears [JobId(1000), JobId(1001), JobId(1002)], : downstairs
20745 Sep 22 23:14:27.952 DEBG up_ds_listen checked 1 jobs, back to waiting
20746 Sep 22 23:14:27.952 DEBG Read :1001 deps:[JobId(1000)] res:true
20747 Sep 22 23:14:27.952 DEBG IO Read 1005 has deps []
20748 Sep 22 23:14:27.952 DEBG Read :1001 deps:[JobId(1000)] res:true
20749 Sep 22 23:14:27.952 DEBG Read :1001 deps:[JobId(1000)] res:true
20750 Sep 22 23:14:27.953 DEBG Read :1005 deps:[] res:true
20751 Sep 22 23:14:27.953 DEBG Read :1005 deps:[] res:true
20752 Sep 22 23:14:27.953 DEBG [0] Read AckReady 1001, : downstairs
20753 Sep 22 23:14:27.953 DEBG Read :1005 deps:[] res:true
20754 Sep 22 23:14:27.954 DEBG [1] Read already AckReady 1001, : downstairs
20755 Sep 22 23:14:27.954 INFO current number of open files limit 65536 is already the maximum
20756 Sep 22 23:14:27.954 INFO Opened existing region file "/tmp/downstairs-Qg6sZndH/region.json"
20757 Sep 22 23:14:27.954 INFO Database read version 1
20758 Sep 22 23:14:27.954 INFO Database write version 1
20759 Sep 22 23:14:27.954 DEBG [2] Read already AckReady 1001, : downstairs
20760 Sep 22 23:14:27.954 DEBG up_ds_listen was notified
20761 Sep 22 23:14:27.954 DEBG up_ds_listen process 1001
20762 Sep 22 23:14:27.954 DEBG [A] ack job 1001:2, : downstairs
20763 Sep 22 23:14:27.954 DEBG up_ds_listen checked 1 jobs, back to waiting
20764 Sep 22 23:14:27.955 DEBG IO Read 1001 has deps [JobId(1000)]
20765 Sep 22 23:14:27.955 DEBG Read :1001 deps:[JobId(1000)] res:true
20766 Sep 22 23:14:27.956 DEBG Read :1001 deps:[JobId(1000)] res:true
20767 Sep 22 23:14:27.956 INFO UUID: e2279262-aa71-4426-b934-e18abcaf8d84
20768 Sep 22 23:14:27.956 INFO Blocks per extent:5 Total Extents: 2
20769 Sep 22 23:14:27.956 INFO Crucible Version: Crucible Version: 0.0.1
20770 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20771 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20772 rustc: 1.70.0 stable x86_64-unknown-illumos
20773 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20774 Sep 22 23:14:27.956 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20775 Sep 22 23:14:27.956 INFO Using address: 127.0.0.1:59924, task: main
20776 Sep 22 23:14:27.956 DEBG Read :1001 deps:[JobId(1000)] res:true
20777 Sep 22 23:14:27.956 INFO Repair listens on 127.0.0.1:0, task: repair
20778 Sep 22 23:14:27.956 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58902, task: repair
20779 Sep 22 23:14:27.956 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58902, task: repair
20780 Sep 22 23:14:27.956 INFO listening, local_addr: 127.0.0.1:58902, task: repair
20781 Sep 22 23:14:27.956 DEBG [0] Read AckReady 1005, : downstairs
20782 Sep 22 23:14:27.957 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58902, task: repair
20783 Sep 22 23:14:27.957 INFO Using repair address: 127.0.0.1:58902, task: main
20784 Sep 22 23:14:27.957 INFO No SSL acceptor configured, task: main
20785 Sep 22 23:14:27.957 INFO current number of open files limit 65536 is already the maximum
20786 Sep 22 23:14:27.957 DEBG [0] Read AckReady 1001, : downstairs
20787 Sep 22 23:14:27.957 INFO Created new region file "/tmp/downstairs-gHHLudLJ/region.json"
20788 Sep 22 23:14:27.957 DEBG [1] Read already AckReady 1001, : downstairs
20789 Sep 22 23:14:27.957 DEBG [2] Read already AckReady 1001, : downstairs
20790 Sep 22 23:14:27.957 DEBG up_ds_listen was notified
20791 Sep 22 23:14:27.958 DEBG up_ds_listen process 1001
20792 Sep 22 23:14:27.958 DEBG [A] ack job 1001:2, : downstairs
20793 Sep 22 23:14:27.958 DEBG up_ds_listen checked 1 jobs, back to waiting
20794 Sep 22 23:14:27.958 DEBG [1] Read already AckReady 1005, : downstairs
20795 Sep 22 23:14:27.959 DEBG IO Write 1002 has deps []
20796 Sep 22 23:14:27.959 DEBG up_ds_listen was notified
20797 Sep 22 23:14:27.959 DEBG up_ds_listen process 1002
20798 Sep 22 23:14:27.959 DEBG [A] ack job 1002:3, : downstairs
20799 Sep 22 23:14:27.959 DEBG up_ds_listen checked 1 jobs, back to waiting
20800 Sep 22 23:14:27.959 INFO Connection request from da9f63ff-0d34-4427-ba79-865cdebab321 with version 4, task: proc
20801 Sep 22 23:14:27.959 INFO upstairs UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } connected, version 4, task: proc
20802 Sep 22 23:14:27.959 INFO Connection request from da9f63ff-0d34-4427-ba79-865cdebab321 with version 4, task: proc
20803 Sep 22 23:14:27.959 INFO upstairs UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } connected, version 4, task: proc
20804 Sep 22 23:14:27.959 INFO Connection request from da9f63ff-0d34-4427-ba79-865cdebab321 with version 4, task: proc
20805 Sep 22 23:14:27.959 INFO upstairs UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } connected, version 4, task: proc
20806 Sep 22 23:14:27.960 INFO [1] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) New New New ds_transition to WaitActive
20807 Sep 22 23:14:27.960 INFO [1] Transition from New to WaitActive
20808 Sep 22 23:14:27.960 DEBG [2] Read already AckReady 1005, : downstairs
20809 Sep 22 23:14:27.960 INFO [0] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) New WaitActive New ds_transition to WaitActive
20810 Sep 22 23:14:27.960 DEBG up_ds_listen was notified
20811 Sep 22 23:14:27.960 INFO [0] Transition from New to WaitActive
20812 Sep 22 23:14:27.960 DEBG up_ds_listen process 1005
20813 Sep 22 23:14:27.960 DEBG [A] ack job 1005:6, : downstairs
20814 Sep 22 23:14:27.960 INFO [2] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) WaitActive WaitActive New ds_transition to WaitActive
20815 Sep 22 23:14:27.960 INFO [2] Transition from New to WaitActive
20816 Sep 22 23:14:27.960 DEBG Write :1002 deps:[] res:true
20817 Sep 22 23:14:27.960 DEBG up_ds_listen checked 1 jobs, back to waiting
20818 Sep 22 23:14:27.960 INFO current number of open files limit 65536 is already the maximum
20819 Sep 22 23:14:27.960 INFO Opened existing region file "/tmp/downstairs-gHHLudLJ/region.json"
20820 Sep 22 23:14:27.960 INFO Database read version 1
20821 Sep 22 23:14:27.960 INFO Database write version 1
20822 Sep 22 23:14:27.961 DEBG IO Read 1003 has deps []
20823 Sep 22 23:14:27.961 DEBG Write :1002 deps:[] res:true
20824 Sep 22 23:14:27.961 DEBG Read :1003 deps:[] res:true
20825 Sep 22 23:14:27.961 DEBG Write :1002 deps:[] res:true
20826 The guest has requested activation
20827 Sep 22 23:14:27.962 INFO da9f63ff-0d34-4427-ba79-865cdebab321 active request set
20828 Sep 22 23:14:27.962 DEBG IO Write 1002 has deps []
20829 Sep 22 23:14:27.962 INFO [0] received activate with gen 1
20830 Sep 22 23:14:27.962 DEBG Read :1003 deps:[] res:true
20831 Sep 22 23:14:27.962 INFO [0] client got ds_active_rx, promote! session 5502d8a4-3689-41e6-8dea-ec66f52f7613
20832 Sep 22 23:14:27.962 DEBG up_ds_listen was notified
20833 Sep 22 23:14:27.962 INFO [1] received activate with gen 1
20834 Sep 22 23:14:27.962 INFO [1] client got ds_active_rx, promote! session 5502d8a4-3689-41e6-8dea-ec66f52f7613
20835 Sep 22 23:14:27.962 DEBG up_ds_listen process 1002
20836 Sep 22 23:14:27.962 DEBG [A] ack job 1002:3, : downstairs
20837 Sep 22 23:14:27.962 DEBG up_ds_listen checked 1 jobs, back to waiting
20838 Sep 22 23:14:27.962 INFO [2] received activate with gen 1
20839 Sep 22 23:14:27.962 INFO [2] client got ds_active_rx, promote! session 5502d8a4-3689-41e6-8dea-ec66f52f7613
20840 Sep 22 23:14:27.962 DEBG Read :1003 deps:[] res:true
20841 Sep 22 23:14:27.962 INFO UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } is now active (read-write)
20842 Sep 22 23:14:27.962 INFO Scrub check for 22457d3a-4c86-4e03-b495-3e59aee64613
20843 Sep 22 23:14:27.962 INFO UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } is now active (read-write)
20844 Sep 22 23:14:27.962 INFO Scrub for 22457d3a-4c86-4e03-b495-3e59aee64613 begins
20845 Sep 22 23:14:27.962 INFO Scrub with total_size:7680 block_size:512
20846 Sep 22 23:14:27.962 INFO Scrubs from block 0 to 15 in (256) 131072 size IOs pm:0
20847 Sep 22 23:14:27.962 INFO Adjust block_count to 15 at offset 0
20848 Sep 22 23:14:27.962 INFO UpstairsConnection { upstairs_id: da9f63ff-0d34-4427-ba79-865cdebab321, session_id: 5502d8a4-3689-41e6-8dea-ec66f52f7613, gen: 1 } is now active (read-write)
20849 Sep 22 23:14:27.963 INFO [1] downstairs client at 127.0.0.1:46036 has UUID 5d23d6e0-25c6-4cbe-ad9c-d04399dce2ff
20850 Sep 22 23:14:27.963 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 5d23d6e0-25c6-4cbe-ad9c-d04399dce2ff, encrypted: true, database_read_version: 1, database_write_version: 1 }
20851 Sep 22 23:14:27.963 INFO da9f63ff-0d34-4427-ba79-865cdebab321 WaitActive WaitActive WaitActive
20852 Sep 22 23:14:27.963 INFO [0] downstairs client at 127.0.0.1:39253 has UUID 231089c9-45a4-4798-b0b3-3fef94d32b3b
20853 Sep 22 23:14:27.963 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 231089c9-45a4-4798-b0b3-3fef94d32b3b, encrypted: true, database_read_version: 1, database_write_version: 1 }
20854 Sep 22 23:14:27.963 DEBG [0] Read AckReady 1003, : downstairs
20855 Sep 22 23:14:27.963 INFO da9f63ff-0d34-4427-ba79-865cdebab321 WaitActive WaitActive WaitActive
20856 Sep 22 23:14:27.963 INFO [2] downstairs client at 127.0.0.1:37718 has UUID e165f739-3242-442c-822a-5ed698df44b4
20857 Sep 22 23:14:27.963 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e165f739-3242-442c-822a-5ed698df44b4, encrypted: true, database_read_version: 1, database_write_version: 1 }
20858 Sep 22 23:14:27.963 INFO da9f63ff-0d34-4427-ba79-865cdebab321 WaitActive WaitActive WaitActive
20859 Sep 22 23:14:27.963 INFO UUID: a97b4e0c-86a6-4238-838d-e198926e4567
20860 Sep 22 23:14:27.963 INFO Blocks per extent:5 Total Extents: 2
20861 Sep 22 23:14:27.963 DEBG [1] Read already AckReady 1003, : downstairs
20862 Sep 22 23:14:27.963 INFO Current flush_numbers [0..12]: [0, 0]
20863 Sep 22 23:14:27.963 INFO Crucible Version: Crucible Version: 0.0.1
20864 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
20865 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
20866 rustc: 1.70.0 stable x86_64-unknown-illumos
20867 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
20868 Sep 22 23:14:27.964 INFO Upstairs <-> Downstairs Message Version: 4, task: main
20869 Sep 22 23:14:27.964 INFO Using address: 127.0.0.1:44361, task: main
20870 Sep 22 23:14:27.964 DEBG [2] Read already AckReady 1003, : downstairs
20871 Sep 22 23:14:27.964 INFO Downstairs has completed Negotiation, task: proc
20872 Sep 22 23:14:27.964 DEBG up_ds_listen was notified
20873 Sep 22 23:14:27.964 DEBG up_ds_listen process 1003
20874 Sep 22 23:14:27.964 DEBG [A] ack job 1003:4, : downstairs
20875 Sep 22 23:14:27.964 DEBG Write :1002 deps:[] res:true
20876 Sep 22 23:14:27.964 INFO Repair listens on 127.0.0.1:0, task: repair
20877 Sep 22 23:14:27.964 INFO Current flush_numbers [0..12]: [0, 0]
20878 Sep 22 23:14:27.964 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40957, task: repair
20879 Sep 22 23:14:27.964 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40957, task: repair
20880 Sep 22 23:14:27.964 INFO listening, local_addr: 127.0.0.1:40957, task: repair
20881 Sep 22 23:14:27.964 DEBG up_ds_listen checked 1 jobs, back to waiting
20882 Sep 22 23:14:27.964 INFO Downstairs has completed Negotiation, task: proc
20883 Sep 22 23:14:27.964 DEBG Write :1002 deps:[] res:true
20884 Sep 22 23:14:27.964 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40957, task: repair
20885 Sep 22 23:14:27.964 INFO Using repair address: 127.0.0.1:40957, task: main
20886 Sep 22 23:14:27.964 INFO No SSL acceptor configured, task: main
20887 Sep 22 23:14:27.964 INFO Current flush_numbers [0..12]: [0, 0]
20888 Sep 22 23:14:27.965 INFO Downstairs has completed Negotiation, task: proc
20889 Sep 22 23:14:27.965 DEBG Write :1002 deps:[] res:true
20890 Sep 22 23:14:27.965 INFO [1] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
20891 Sep 22 23:14:27.965 INFO [1] Transition from WaitActive to WaitQuorum
20892 Sep 22 23:14:27.965 WARN [1] new RM replaced this: None
20893 Sep 22 23:14:27.965 INFO [1] Starts reconcile loop
20894 Sep 22 23:14:27.965 INFO Upstairs starts
20895 Sep 22 23:14:27.965 INFO Crucible Version: BuildInfo {
20896 version: "0.0.1",
20897 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
20898 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
20899 git_branch: "main",
20900 rustc_semver: "1.70.0",
20901 rustc_channel: "stable",
20902 rustc_host_triple: "x86_64-unknown-illumos",
20903 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
20904 cargo_triple: "x86_64-unknown-illumos",
20905 debug: true,
20906 opt_level: 0,
20907 }
20908 Sep 22 23:14:27.965 INFO [0] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
20909 Sep 22 23:14:27.965 INFO Upstairs <-> Downstairs Message Version: 4
20910 Sep 22 23:14:27.965 INFO [0] Transition from WaitActive to WaitQuorum
20911 Sep 22 23:14:27.965 WARN [0] new RM replaced this: None
20912 Sep 22 23:14:27.965 INFO Crucible stats registered with UUID: 6759ea0c-86e6-48d3-9651-28d14390b7c1
20913 Sep 22 23:14:27.965 INFO [0] Starts reconcile loop
20914 Sep 22 23:14:27.965 INFO Crucible 6759ea0c-86e6-48d3-9651-28d14390b7c1 has session id: 35b1cde4-1665-4791-808b-231481a45998
20915 Sep 22 23:14:27.965 INFO [2] da9f63ff-0d34-4427-ba79-865cdebab321 (5502d8a4-3689-41e6-8dea-ec66f52f7613) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
20916 Sep 22 23:14:27.965 INFO [2] Transition from WaitActive to WaitQuorum
20917 Sep 22 23:14:27.965 WARN [2] new RM replaced this: None
20918 Sep 22 23:14:27.965 INFO [2] Starts reconcile loop
20919 Sep 22 23:14:27.965 INFO listening on 127.0.0.1:0, task: main
20920 Sep 22 23:14:27.965 INFO [1] 127.0.0.1:46036 task reports connection:true
20921 Sep 22 23:14:27.965 INFO da9f63ff-0d34-4427-ba79-865cdebab321 WaitQuorum WaitQuorum WaitQuorum
20922 Sep 22 23:14:27.965 INFO listening on 127.0.0.1:0, task: main
20923 Sep 22 23:14:27.965 INFO [0]R flush_numbers: [0, 0]
20924 Sep 22 23:14:27.965 INFO listening on 127.0.0.1:0, task: main
20925 Sep 22 23:14:27.965 INFO [0]R generation: [0, 0]
20926 Sep 22 23:14:27.965 INFO [0]R dirty: [false, false]
20927 Sep 22 23:14:27.965 INFO [1]R flush_numbers: [0, 0]
20928 Sep 22 23:14:27.965 INFO [1]R generation: [0, 0]
20929 Sep 22 23:14:27.965 INFO [0] connecting to 127.0.0.1:56875, looper: 0
20930 Sep 22 23:14:27.965 INFO [1]R dirty: [false, false]
20931 Sep 22 23:14:27.965 INFO [2]R flush_numbers: [0, 0]
20932 Sep 22 23:14:27.965 INFO [2]R generation: [0, 0]
20933 Sep 22 23:14:27.965 INFO [2]R dirty: [false, false]
20934 Sep 22 23:14:27.966 INFO Max found gen is 1
20935 Sep 22 23:14:27.966 INFO Generation requested: 1 >= found:1
20936 Sep 22 23:14:27.966 INFO Next flush: 1
20937 Sep 22 23:14:27.966 INFO All extents match
20938 Sep 22 23:14:27.966 INFO No downstairs repair required
20939 Sep 22 23:14:27.966 INFO No initial repair work was required
20940 Sep 22 23:14:27.966 INFO Set Downstairs and Upstairs active
20941 Sep 22 23:14:27.966 INFO [1] connecting to 127.0.0.1:59924, looper: 1
20942 Sep 22 23:14:27.966 INFO da9f63ff-0d34-4427-ba79-865cdebab321 is now active with session: 5502d8a4-3689-41e6-8dea-ec66f52f7613
20943 Sep 22 23:14:27.966 INFO da9f63ff-0d34-4427-ba79-865cdebab321 Set Active after no repair
20944 Sep 22 23:14:27.966 INFO Notify all downstairs, region set compare is done.
20945 Sep 22 23:14:27.966 INFO Set check for repair
20946 Sep 22 23:14:27.966 INFO [0] 127.0.0.1:39253 task reports connection:true
20947 Sep 22 23:14:27.966 INFO da9f63ff-0d34-4427-ba79-865cdebab321 Active Active Active
20948 Sep 22 23:14:27.966 INFO Set check for repair
20949 Sep 22 23:14:27.966 INFO [2] connecting to 127.0.0.1:44361, looper: 2
20950 Sep 22 23:14:27.966 INFO [2] 127.0.0.1:37718 task reports connection:true
20951 Sep 22 23:14:27.966 INFO da9f63ff-0d34-4427-ba79-865cdebab321 Active Active Active
20952 Sep 22 23:14:27.966 INFO Set check for repair
20953 Sep 22 23:14:27.966 INFO up_listen starts, task: up_listen
20954 Sep 22 23:14:27.966 INFO [0] received reconcile message
20955 Sep 22 23:14:27.966 INFO Wait for all three downstairs to come online
20956 Sep 22 23:14:27.966 INFO [0] All repairs completed, exit
20957 Sep 22 23:14:27.966 INFO Flush timeout: 0.5
20958 Sep 22 23:14:27.966 INFO [0] Starts cmd_loop
20959 Sep 22 23:14:27.966 INFO [1] received reconcile message
20960 Sep 22 23:14:27.966 INFO [1] All repairs completed, exit
20961 Sep 22 23:14:27.966 INFO [1] Starts cmd_loop
20962 Sep 22 23:14:27.966 INFO [0] 6759ea0c-86e6-48d3-9651-28d14390b7c1 looper connected, looper: 0
20963 Sep 22 23:14:27.966 INFO [2] received reconcile message
20964 Sep 22 23:14:27.966 INFO [2] All repairs completed, exit
20965 Sep 22 23:14:27.966 INFO [0] Proc runs for 127.0.0.1:56875 in state New
20966 Sep 22 23:14:27.966 INFO [2] Starts cmd_loop
20967 The guest has finished waiting for activation
20968 Sep 22 23:14:27.966 INFO [1] 6759ea0c-86e6-48d3-9651-28d14390b7c1 looper connected, looper: 1
20969 Sep 22 23:14:27.966 INFO [1] Proc runs for 127.0.0.1:59924 in state New
20970 Sep 22 23:14:27.966 INFO [2] 6759ea0c-86e6-48d3-9651-28d14390b7c1 looper connected, looper: 2
20971 Sep 22 23:14:27.966 INFO [2] Proc runs for 127.0.0.1:44361 in state New
20972 Sep 22 23:14:27.967 INFO accepted connection from 127.0.0.1:58618, task: main
20973 Sep 22 23:14:27.967 DEBG IO Read 1000 has deps []
20974 Sep 22 23:14:27.967 INFO accepted connection from 127.0.0.1:46006, task: main
20975 Sep 22 23:14:27.967 INFO accepted connection from 127.0.0.1:42838, task: main
20976 Sep 22 23:14:27.967 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
20977 Sep 22 23:14:27.967 INFO Connection request from 6759ea0c-86e6-48d3-9651-28d14390b7c1 with version 4, task: proc
20978 Sep 22 23:14:27.967 INFO upstairs UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } connected, version 4, task: proc
20979 Sep 22 23:14:27.967 INFO Connection request from 6759ea0c-86e6-48d3-9651-28d14390b7c1 with version 4, task: proc
20980 Sep 22 23:14:27.967 INFO upstairs UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } connected, version 4, task: proc
20981 Sep 22 23:14:27.967 INFO Connection request from 6759ea0c-86e6-48d3-9651-28d14390b7c1 with version 4, task: proc
20982 Sep 22 23:14:27.967 INFO upstairs UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } connected, version 4, task: proc
20983 Sep 22 23:14:27.967 DEBG Read :1000 deps:[] res:true
20984 Sep 22 23:14:27.968 INFO [0] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) New New New ds_transition to WaitActive
20985 Sep 22 23:14:27.968 INFO [0] Transition from New to WaitActive
20986 Sep 22 23:14:27.968 DEBG Read :1000 deps:[] res:true
20987 Sep 22 23:14:27.968 INFO [1] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) WaitActive New New ds_transition to WaitActive
20988 Sep 22 23:14:27.968 INFO [1] Transition from New to WaitActive
20989 Sep 22 23:14:27.968 INFO [2] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) WaitActive WaitActive New ds_transition to WaitActive
20990 Sep 22 23:14:27.968 INFO [2] Transition from New to WaitActive
20991 The guest has requested activation
20992 Sep 22 23:14:27.968 DEBG Read :1000 deps:[] res:true
20993 Sep 22 23:14:27.968 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 active request set
20994 Sep 22 23:14:27.968 INFO [0] received activate with gen 1
20995 Sep 22 23:14:27.968 INFO [0] client got ds_active_rx, promote! session 64036d42-d0be-4120-9d55-87badff38ec8
20996 Sep 22 23:14:27.968 INFO [1] received activate with gen 1
20997 Sep 22 23:14:27.968 INFO [1] client got ds_active_rx, promote! session 64036d42-d0be-4120-9d55-87badff38ec8
20998 Sep 22 23:14:27.968 INFO [2] received activate with gen 1
20999 Sep 22 23:14:27.968 INFO [2] client got ds_active_rx, promote! session 64036d42-d0be-4120-9d55-87badff38ec8
21000 Sep 22 23:14:27.968 INFO UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } is now active (read-write)
21001 Sep 22 23:14:27.969 INFO UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } is now active (read-write)
21002 Sep 22 23:14:27.969 INFO UpstairsConnection { upstairs_id: 6759ea0c-86e6-48d3-9651-28d14390b7c1, session_id: 64036d42-d0be-4120-9d55-87badff38ec8, gen: 1 } is now active (read-write)
21003 Sep 22 23:14:27.969 DEBG [1] Read AckReady 1000, : downstairs
21004 Sep 22 23:14:27.969 DEBG [0] Read already AckReady 1000, : downstairs
21005 Sep 22 23:14:27.969 DEBG [2] Read already AckReady 1000, : downstairs
21006 Sep 22 23:14:27.969 DEBG up_ds_listen was notified
21007 Sep 22 23:14:27.969 DEBG up_ds_listen process 1000
21008 Sep 22 23:14:27.969 DEBG [A] ack job 1000:1, : downstairs
21009 Sep 22 23:14:27.969 INFO [0] downstairs client at 127.0.0.1:56875 has UUID 3b4bb3c0-2112-401e-871c-3ba820d1a25e
21010 Sep 22 23:14:27.969 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3b4bb3c0-2112-401e-871c-3ba820d1a25e, encrypted: true, database_read_version: 1, database_write_version: 1 }
21011 Sep 22 23:14:27.969 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 WaitActive WaitActive WaitActive
21012 Sep 22 23:14:27.969 DEBG up_ds_listen checked 1 jobs, back to waiting
21013 Sep 22 23:14:27.969 INFO [1] downstairs client at 127.0.0.1:59924 has UUID e2279262-aa71-4426-b934-e18abcaf8d84
21014 Sep 22 23:14:27.969 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e2279262-aa71-4426-b934-e18abcaf8d84, encrypted: true, database_read_version: 1, database_write_version: 1 }
21015 Sep 22 23:14:27.969 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 WaitActive WaitActive WaitActive
21016 Sep 22 23:14:27.969 INFO [2] downstairs client at 127.0.0.1:44361 has UUID a97b4e0c-86a6-4238-838d-e198926e4567
21017 Sep 22 23:14:27.969 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a97b4e0c-86a6-4238-838d-e198926e4567, encrypted: true, database_read_version: 1, database_write_version: 1 }
21018 Sep 22 23:14:27.969 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 WaitActive WaitActive WaitActive
21019 Sep 22 23:14:27.970 INFO Current flush_numbers [0..12]: [0, 0]
21020 Sep 22 23:14:27.970 INFO Downstairs has completed Negotiation, task: proc
21021 Sep 22 23:14:27.971 DEBG IO Write 1001 has deps [JobId(1000)]
21022 Sep 22 23:14:27.971 DEBG up_ds_listen was notified
21023 Sep 22 23:14:27.971 DEBG up_ds_listen process 1001
21024 Sep 22 23:14:27.971 DEBG [A] ack job 1001:2, : downstairs
21025 Sep 22 23:14:27.971 INFO Current flush_numbers [0..12]: [0, 0]
21026 Sep 22 23:14:27.971 DEBG up_ds_listen checked 1 jobs, back to waiting
21027 Sep 22 23:14:27.971 INFO Downstairs has completed Negotiation, task: proc
21028 Sep 22 23:14:27.971 INFO Current flush_numbers [0..12]: [0, 0]
21029 test test::integration_test_volume_subvols_parent_scrub_sparse ... ok
21030 Sep 22 23:14:27.972 INFO Downstairs has completed Negotiation, task: proc
21031 Sep 22 23:14:27.972 INFO current number of open files limit 65536 is already the maximum
21032 Sep 22 23:14:27.972 INFO [0] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21033 Sep 22 23:14:27.972 INFO [0] Transition from WaitActive to WaitQuorum
21034 Sep 22 23:14:27.972 WARN [0] new RM replaced this: None
21035 Sep 22 23:14:27.972 INFO [0] Starts reconcile loop
21036 Sep 22 23:14:27.972 INFO Created new region file "/tmp/downstairs-CjWA24Kr/region.json"
21037 Sep 22 23:14:27.972 INFO [1] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21038 Sep 22 23:14:27.972 INFO [1] Transition from WaitActive to WaitQuorum
21039 Sep 22 23:14:27.972 WARN [1] new RM replaced this: None
21040 Sep 22 23:14:27.972 INFO [1] Starts reconcile loop
21041 Sep 22 23:14:27.972 DEBG Write :1001 deps:[JobId(1000)] res:true
21042 Sep 22 23:14:27.972 INFO [2] 6759ea0c-86e6-48d3-9651-28d14390b7c1 (64036d42-d0be-4120-9d55-87badff38ec8) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21043 Sep 22 23:14:27.972 INFO [2] Transition from WaitActive to WaitQuorum
21044 Sep 22 23:14:27.972 WARN [2] new RM replaced this: None
21045 Sep 22 23:14:27.972 INFO [2] Starts reconcile loop
21046 Sep 22 23:14:27.972 DEBG up_ds_listen was notified
21047 Sep 22 23:14:27.972 INFO [0] 127.0.0.1:56875 task reports connection:true
21048 Sep 22 23:14:27.972 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 WaitQuorum WaitQuorum WaitQuorum
21049 Sep 22 23:14:27.972 DEBG up_ds_listen process 1003
21050 Sep 22 23:14:27.972 INFO [0]R flush_numbers: [0, 0]
21051 Sep 22 23:14:27.972 INFO [0]R generation: [0, 0]
21052 Sep 22 23:14:27.972 DEBG [A] ack job 1003:4, : downstairs
21053 Sep 22 23:14:27.972 INFO [0]R dirty: [false, false]
21054 Sep 22 23:14:27.972 INFO [1]R flush_numbers: [0, 0]
21055 Sep 22 23:14:27.972 INFO [1]R generation: [0, 0]
21056 Sep 22 23:14:27.972 INFO [1]R dirty: [false, false]
21057 Sep 22 23:14:27.972 INFO [2]R flush_numbers: [0, 0]
21058 Sep 22 23:14:27.972 INFO [2]R generation: [0, 0]
21059 Sep 22 23:14:27.972 INFO [2]R dirty: [false, false]
21060 Sep 22 23:14:27.972 DEBG up_ds_listen checked 1 jobs, back to waiting
21061 Sep 22 23:14:27.972 INFO Max found gen is 1
21062 Sep 22 23:14:27.972 INFO Generation requested: 1 >= found:1
21063 Sep 22 23:14:27.972 INFO Next flush: 1
21064 Sep 22 23:14:27.972 INFO All extents match
21065 Sep 22 23:14:27.972 INFO No downstairs repair required
21066 Sep 22 23:14:27.972 INFO No initial repair work was required
21067 Sep 22 23:14:27.972 INFO Set Downstairs and Upstairs active
21068 Sep 22 23:14:27.972 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 is now active with session: 64036d42-d0be-4120-9d55-87badff38ec8
21069 Sep 22 23:14:27.972 DEBG Write :1001 deps:[JobId(1000)] res:true
21070 Sep 22 23:14:27.972 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 Set Active after no repair
21071 Sep 22 23:14:27.972 INFO Notify all downstairs, region set compare is done.
21072 Sep 22 23:14:27.972 INFO Set check for repair
21073 Sep 22 23:14:27.972 INFO [1] 127.0.0.1:59924 task reports connection:true
21074 Sep 22 23:14:27.972 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 Active Active Active
21075 Sep 22 23:14:27.972 INFO Set check for repair
21076 Sep 22 23:14:27.973 INFO [2] 127.0.0.1:44361 task reports connection:true
21077 Sep 22 23:14:27.973 INFO 6759ea0c-86e6-48d3-9651-28d14390b7c1 Active Active Active
21078 Sep 22 23:14:27.973 INFO Set check for repair
21079 Sep 22 23:14:27.973 INFO [0] received reconcile message
21080 Sep 22 23:14:27.973 INFO [0] All repairs completed, exit
21081 Sep 22 23:14:27.973 INFO [0] Starts cmd_loop
21082 Sep 22 23:14:27.973 INFO [1] received reconcile message
21083 Sep 22 23:14:27.973 INFO [1] All repairs completed, exit
21084 Sep 22 23:14:27.973 INFO [1] Starts cmd_loop
21085 Sep 22 23:14:27.973 INFO [2] received reconcile message
21086 Sep 22 23:14:27.973 DEBG Write :1001 deps:[JobId(1000)] res:true
21087 Sep 22 23:14:27.973 INFO [2] All repairs completed, exit
21088 Sep 22 23:14:27.973 INFO [2] Starts cmd_loop
21089 The guest has finished waiting for activation
21090 Sep 22 23:14:27.973 DEBG IO Read 1002 has deps [JobId(1001)]
21091 Sep 22 23:14:27.974 DEBG IO Write 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
21092 Sep 22 23:14:27.974 DEBG Read :1002 deps:[JobId(1001)] res:true
21093 Sep 22 23:14:27.974 DEBG Read :1002 deps:[JobId(1001)] res:true
21094 Sep 22 23:14:27.974 DEBG Read :1002 deps:[JobId(1001)] res:true
21095 Sep 22 23:14:27.975 INFO current number of open files limit 65536 is already the maximum
21096 Sep 22 23:14:27.975 INFO Opened existing region file "/tmp/downstairs-CjWA24Kr/region.json"
21097 Sep 22 23:14:27.975 INFO Database read version 1
21098 Sep 22 23:14:27.975 DEBG IO Write 1000 has deps []
21099 Sep 22 23:14:27.975 INFO Database write version 1
21100 Sep 22 23:14:27.975 DEBG up_ds_listen was notified
21101 Sep 22 23:14:27.975 DEBG up_ds_listen process 1000
21102 Sep 22 23:14:27.975 DEBG [A] ack job 1000:1, : downstairs
21103 Sep 22 23:14:27.975 DEBG [1] Read AckReady 1002, : downstairs
21104 Sep 22 23:14:27.975 DEBG up_ds_listen checked 1 jobs, back to waiting
21105 Sep 22 23:14:27.975 DEBG [0] Read already AckReady 1002, : downstairs
21106 Sep 22 23:14:27.976 DEBG [2] Read already AckReady 1002, : downstairs
21107 Sep 22 23:14:27.976 DEBG up_ds_listen was notified
21108 Sep 22 23:14:27.976 DEBG up_ds_listen process 1002
21109 Sep 22 23:14:27.976 DEBG [A] ack job 1002:3, : downstairs
21110 Sep 22 23:14:27.976 DEBG up_ds_listen checked 1 jobs, back to waiting
21111 Sep 22 23:14:27.976 DEBG up_ds_listen was notified
21112 Sep 22 23:14:27.976 DEBG up_ds_listen process 1003
21113 Sep 22 23:14:27.976 DEBG [A] ack job 1003:4, : downstairs
21114 Sep 22 23:14:27.976 DEBG up_ds_listen checked 1 jobs, back to waiting
21115 Sep 22 23:14:27.977 INFO Scrub at offset 15/15 sp:15
21116 Sep 22 23:14:27.977 INFO UUID: 989c51b5-eaa4-4274-9bd1-0900d0b855e1
21117 Sep 22 23:14:27.977 INFO Blocks per extent:5 Total Extents: 2
21118 Sep 22 23:14:27.977 INFO Crucible Version: Crucible Version: 0.0.1
21119 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21120 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21121 rustc: 1.70.0 stable x86_64-unknown-illumos
21122 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21123 Sep 22 23:14:27.977 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21124 Sep 22 23:14:27.977 INFO Using address: 127.0.0.1:62045, task: main
21125 Sep 22 23:14:27.977 INFO Repair listens on 127.0.0.1:0, task: repair
21126 Sep 22 23:14:27.977 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:44279, task: repair
21127 Sep 22 23:14:27.977 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:44279, task: repair
21128 Sep 22 23:14:27.977 INFO listening, local_addr: 127.0.0.1:44279, task: repair
21129 Sep 22 23:14:27.977 DEBG Write :1000 deps:[] res:true
21130 Sep 22 23:14:27.977 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:44279, task: repair
21131 Sep 22 23:14:27.978 INFO Using repair address: 127.0.0.1:44279, task: main
21132 Sep 22 23:14:27.978 INFO No SSL acceptor configured, task: main
21133 Sep 22 23:14:27.978 INFO Scrub 22457d3a-4c86-4e03-b495-3e59aee64613 done in 0 seconds. Retries:0 scrub_size:7680 size:15 pause_milli:0
21134 Sep 22 23:14:27.978 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
21135 Sep 22 23:14:27.978 INFO current number of open files limit 65536 is already the maximum
21136 Sep 22 23:14:27.978 INFO Created new region file "/tmp/downstairs-rUNKYH5o/region.json"
21137 Sep 22 23:14:27.978 DEBG Write :1000 deps:[] res:true
21138 Sep 22 23:14:27.979 DEBG Write :1000 deps:[] res:true
21139 Sep 22 23:14:27.979 DEBG IO Read 1001 has deps [JobId(1000)]
21140 Sep 22 23:14:27.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21141 Sep 22 23:14:27.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21142 Sep 22 23:14:27.979 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21143 Sep 22 23:14:27.980 DEBG up_ds_listen was notified
21144 Sep 22 23:14:27.980 DEBG up_ds_listen process 1004
21145 Sep 22 23:14:27.980 DEBG [A] ack job 1004:5, : downstairs
21146 Sep 22 23:14:27.980 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
21147 Sep 22 23:14:27.980 DEBG up_ds_listen checked 1 jobs, back to waiting
21148 Sep 22 23:14:27.980 DEBG IO Flush 1004 has deps [JobId(1003), JobId(1002), JobId(1001), JobId(1000)]
21149 Sep 22 23:14:27.980 DEBG Read :1001 deps:[JobId(1000)] res:true
21150 Sep 22 23:14:27.981 DEBG Read :1001 deps:[JobId(1000)] res:true
21151 Sep 22 23:14:27.982 DEBG Read :1001 deps:[JobId(1000)] res:true
21152 Sep 22 23:14:27.982 INFO current number of open files limit 65536 is already the maximum
21153 Sep 22 23:14:27.982 INFO Opened existing region file "/tmp/downstairs-rUNKYH5o/region.json"
21154 Sep 22 23:14:27.982 INFO Database read version 1
21155 Sep 22 23:14:27.982 INFO Database write version 1
21156 Sep 22 23:14:27.982 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21157 Sep 22 23:14:27.982 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21158 Sep 22 23:14:27.982 DEBG Flush :1004 extent_limit None deps:[JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
21159 Sep 22 23:14:27.983 DEBG up_ds_listen was notified
21160 Sep 22 23:14:27.983 DEBG up_ds_listen process 1004
21161 Sep 22 23:14:27.983 DEBG [A] ack job 1004:5, : downstairs
21162 Sep 22 23:14:27.983 DEBG [rc] retire 1004 clears [JobId(1000), JobId(1001), JobId(1002), JobId(1003), JobId(1004)], : downstairs
21163 Sep 22 23:14:27.983 DEBG up_ds_listen checked 1 jobs, back to waiting
21164 Sep 22 23:14:27.983 DEBG IO Read 1005 has deps []
21165 Sep 22 23:14:27.983 INFO UUID: 0dbe2b78-0264-4d8b-ad44-c519f3295e35
21166 Sep 22 23:14:27.983 INFO Blocks per extent:5 Total Extents: 2
21167 Sep 22 23:14:27.983 INFO Crucible Version: Crucible Version: 0.0.1
21168 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21169 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21170 rustc: 1.70.0 stable x86_64-unknown-illumos
21171 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21172 test test::integration_test_url ... ok
21173 Sep 22 23:14:27.984 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21174 Sep 22 23:14:27.984 INFO Using address: 127.0.0.1:37528, task: main
21175 Sep 22 23:14:27.984 DEBG Read :1005 deps:[] res:true
21176 Sep 22 23:14:27.984 INFO Repair listens on 127.0.0.1:0, task: repair
21177 Sep 22 23:14:27.984 INFO current number of open files limit 65536 is already the maximum
21178 Sep 22 23:14:27.984 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57545, task: repair
21179 Sep 22 23:14:27.984 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57545, task: repair
21180 Sep 22 23:14:27.984 INFO Created new region file "/tmp/downstairs-ILagCh4E/region.json"
21181 Sep 22 23:14:27.984 INFO listening, local_addr: 127.0.0.1:57545, task: repair
21182 Sep 22 23:14:27.984 DEBG Read :1005 deps:[] res:true
21183 Sep 22 23:14:27.984 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57545, task: repair
21184 Sep 22 23:14:27.984 INFO Using repair address: 127.0.0.1:57545, task: main
21185 Sep 22 23:14:27.984 INFO No SSL acceptor configured, task: main
21186 Sep 22 23:14:27.985 INFO current number of open files limit 65536 is already the maximum
21187 Sep 22 23:14:27.985 DEBG [0] Read AckReady 1001, : downstairs
21188 Sep 22 23:14:27.985 DEBG Read :1005 deps:[] res:true
21189 Sep 22 23:14:27.985 INFO Created new region file "/tmp/downstairs-G1TA2uOj/region.json"
21190 Sep 22 23:14:27.986 DEBG [1] Read already AckReady 1001, : downstairs
21191 Sep 22 23:14:27.988 DEBG [0] Read AckReady 1005, : downstairs
21192 Sep 22 23:14:27.988 INFO current number of open files limit 65536 is already the maximum
21193 Sep 22 23:14:27.988 INFO Opened existing region file "/tmp/downstairs-ILagCh4E/region.json"
21194 Sep 22 23:14:27.988 INFO Database read version 1
21195 Sep 22 23:14:27.988 INFO Database write version 1
21196 Sep 22 23:14:27.988 DEBG [2] Read already AckReady 1001, : downstairs
21197 Sep 22 23:14:27.988 DEBG up_ds_listen was notified
21198 Sep 22 23:14:27.988 DEBG up_ds_listen process 1001
21199 Sep 22 23:14:27.988 DEBG [A] ack job 1001:2, : downstairs
21200 Sep 22 23:14:27.988 DEBG up_ds_listen checked 1 jobs, back to waiting
21201 Sep 22 23:14:27.989 INFO current number of open files limit 65536 is already the maximum
21202 Sep 22 23:14:27.989 INFO Opened existing region file "/tmp/downstairs-G1TA2uOj/region.json"
21203 Sep 22 23:14:27.989 INFO Database read version 1
21204 Sep 22 23:14:27.989 INFO Database write version 1
21205 Sep 22 23:14:27.990 DEBG [1] Read already AckReady 1005, : downstairs
21206 Sep 22 23:14:27.990 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
21207 Sep 22 23:14:27.991 INFO UUID: 177b9038-d4da-4321-bb6f-83528c5df2bb
21208 Sep 22 23:14:27.991 INFO Blocks per extent:5 Total Extents: 2
21209 Sep 22 23:14:27.991 INFO Crucible Version: Crucible Version: 0.0.1
21210 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21211 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21212 rustc: 1.70.0 stable x86_64-unknown-illumos
21213 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21214 Sep 22 23:14:27.991 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21215 Sep 22 23:14:27.991 INFO Using address: 127.0.0.1:53498, task: main
21216 Sep 22 23:14:27.991 INFO UUID: 8d12fb73-3125-4add-9524-088d1d513bcf
21217 Sep 22 23:14:27.991 INFO Blocks per extent:5 Total Extents: 2
21218 Sep 22 23:14:27.991 INFO Crucible Version: Crucible Version: 0.0.1
21219 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21220 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21221 rustc: 1.70.0 stable x86_64-unknown-illumos
21222 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21223 Sep 22 23:14:27.991 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21224 Sep 22 23:14:27.991 INFO Using address: 127.0.0.1:59233, task: main
21225 Sep 22 23:14:27.991 INFO Repair listens on 127.0.0.1:0, task: repair
21226 Sep 22 23:14:27.991 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33162, task: repair
21227 Sep 22 23:14:27.991 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33162, task: repair
21228 Sep 22 23:14:27.991 INFO listening, local_addr: 127.0.0.1:33162, task: repair
21229 Sep 22 23:14:27.991 INFO Repair listens on 127.0.0.1:0, task: repair
21230 Sep 22 23:14:27.991 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60052, task: repair
21231 Sep 22 23:14:27.991 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60052, task: repair
21232 Sep 22 23:14:27.991 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33162, task: repair
21233 Sep 22 23:14:27.991 INFO listening, local_addr: 127.0.0.1:60052, task: repair
21234 Sep 22 23:14:27.991 INFO Using repair address: 127.0.0.1:33162, task: main
21235 Sep 22 23:14:27.991 INFO No SSL acceptor configured, task: main
21236 Sep 22 23:14:27.992 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60052, task: repair
21237 Sep 22 23:14:27.992 INFO Using repair address: 127.0.0.1:60052, task: main
21238 Sep 22 23:14:27.992 INFO No SSL acceptor configured, task: main
21239 Sep 22 23:14:27.992 INFO current number of open files limit 65536 is already the maximum
21240 Sep 22 23:14:27.992 INFO Created new region file "/tmp/downstairs-8mPvIugn/region.json"
21241 Sep 22 23:14:27.992 DEBG [2] Read already AckReady 1005, : downstairs
21242 Sep 22 23:14:27.992 INFO Upstairs starts
21243 Sep 22 23:14:27.992 INFO Crucible Version: BuildInfo {
21244 version: "0.0.1",
21245 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
21246 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
21247 git_branch: "main",
21248 rustc_semver: "1.70.0",
21249 rustc_channel: "stable",
21250 rustc_host_triple: "x86_64-unknown-illumos",
21251 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
21252 cargo_triple: "x86_64-unknown-illumos",
21253 debug: true,
21254 opt_level: 0,
21255 }
21256 Sep 22 23:14:27.992 DEBG up_ds_listen was notified
21257 Sep 22 23:14:27.992 INFO Upstairs <-> Downstairs Message Version: 4
21258 Sep 22 23:14:27.992 DEBG up_ds_listen process 1005
21259 Sep 22 23:14:27.992 INFO Crucible stats registered with UUID: d294d936-1c70-474e-8080-a4dab42d6778
21260 Sep 22 23:14:27.992 DEBG [A] ack job 1005:6, : downstairs
21261 Sep 22 23:14:27.992 INFO Crucible d294d936-1c70-474e-8080-a4dab42d6778 has session id: 16d0bb5f-a970-4c72-8e04-70e4a93316bd
21262 Sep 22 23:14:27.992 INFO listening on 127.0.0.1:0, task: main
21263 Sep 22 23:14:27.992 INFO listening on 127.0.0.1:0, task: main
21264 Sep 22 23:14:27.992 INFO listening on 127.0.0.1:0, task: main
21265 Sep 22 23:14:27.992 INFO [0] connecting to 127.0.0.1:62045, looper: 0
21266 Sep 22 23:14:27.992 DEBG up_ds_listen checked 1 jobs, back to waiting
21267 Sep 22 23:14:27.992 INFO [1] connecting to 127.0.0.1:37528, looper: 1
21268 Sep 22 23:14:27.992 INFO [2] connecting to 127.0.0.1:59233, looper: 2
21269 Sep 22 23:14:27.993 INFO up_listen starts, task: up_listen
21270 Sep 22 23:14:27.993 INFO Wait for all three downstairs to come online
21271 Sep 22 23:14:27.993 DEBG IO Read 1005 has deps []
21272 Sep 22 23:14:27.993 INFO Flush timeout: 0.5
21273 Sep 22 23:14:27.993 INFO accepted connection from 127.0.0.1:41831, task: main
21274 Sep 22 23:14:27.993 INFO accepted connection from 127.0.0.1:54062, task: main
21275 Sep 22 23:14:27.993 INFO [0] d294d936-1c70-474e-8080-a4dab42d6778 looper connected, looper: 0
21276 Sep 22 23:14:27.993 INFO [0] Proc runs for 127.0.0.1:62045 in state New
21277 Sep 22 23:14:27.993 INFO accepted connection from 127.0.0.1:57271, task: main
21278 Sep 22 23:14:27.993 INFO [1] d294d936-1c70-474e-8080-a4dab42d6778 looper connected, looper: 1
21279 Sep 22 23:14:27.993 INFO [1] Proc runs for 127.0.0.1:37528 in state New
21280 Sep 22 23:14:27.993 INFO [2] d294d936-1c70-474e-8080-a4dab42d6778 looper connected, looper: 2
21281 Sep 22 23:14:27.993 INFO [2] Proc runs for 127.0.0.1:59233 in state New
21282 Sep 22 23:14:27.993 INFO Connection request from d294d936-1c70-474e-8080-a4dab42d6778 with version 4, task: proc
21283 Sep 22 23:14:27.993 INFO upstairs UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } connected, version 4, task: proc
21284 Sep 22 23:14:27.993 INFO Connection request from d294d936-1c70-474e-8080-a4dab42d6778 with version 4, task: proc
21285 Sep 22 23:14:27.993 INFO upstairs UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } connected, version 4, task: proc
21286 Sep 22 23:14:27.994 INFO Connection request from d294d936-1c70-474e-8080-a4dab42d6778 with version 4, task: proc
21287 Sep 22 23:14:27.994 DEBG Read :1005 deps:[] res:true
21288 Sep 22 23:14:27.994 INFO upstairs UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } connected, version 4, task: proc
21289 Sep 22 23:14:27.994 DEBG up_ds_listen was notified
21290 Sep 22 23:14:27.994 DEBG up_ds_listen process 1002
21291 Sep 22 23:14:27.994 DEBG [A] ack job 1002:3, : downstairs
21292 Sep 22 23:14:27.994 DEBG up_ds_listen checked 1 jobs, back to waiting
21293 Sep 22 23:14:27.994 INFO [0] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) New New New ds_transition to WaitActive
21294 Sep 22 23:14:27.994 INFO [0] Transition from New to WaitActive
21295 Sep 22 23:14:27.994 INFO [1] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) WaitActive New New ds_transition to WaitActive
21296 Sep 22 23:14:27.994 INFO [1] Transition from New to WaitActive
21297 Sep 22 23:14:27.994 INFO [2] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) WaitActive WaitActive New ds_transition to WaitActive
21298 Sep 22 23:14:27.994 INFO [2] Transition from New to WaitActive
21299 Sep 22 23:14:27.994 DEBG Read :1005 deps:[] res:true
21300 Sep 22 23:14:27.994 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
21301 The guest has requested activation
21302 Sep 22 23:14:27.994 INFO d294d936-1c70-474e-8080-a4dab42d6778 active request set
21303 Sep 22 23:14:27.994 INFO [0] received activate with gen 1
21304 Sep 22 23:14:27.994 INFO [0] client got ds_active_rx, promote! session eff1e9a9-bbb5-477e-acde-f6036995e381
21305 Sep 22 23:14:27.995 INFO [1] received activate with gen 1
21306 Sep 22 23:14:27.995 INFO [1] client got ds_active_rx, promote! session eff1e9a9-bbb5-477e-acde-f6036995e381
21307 Sep 22 23:14:27.995 DEBG Read :1005 deps:[] res:true
21308 Sep 22 23:14:27.995 INFO [2] received activate with gen 1
21309 Sep 22 23:14:27.995 INFO [2] client got ds_active_rx, promote! session eff1e9a9-bbb5-477e-acde-f6036995e381
21310 Sep 22 23:14:27.995 INFO UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } is now active (read-write)
21311 Sep 22 23:14:27.995 INFO UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } is now active (read-write)
21312 Sep 22 23:14:27.995 INFO UpstairsConnection { upstairs_id: d294d936-1c70-474e-8080-a4dab42d6778, session_id: eff1e9a9-bbb5-477e-acde-f6036995e381, gen: 1 } is now active (read-write)
21313 Sep 22 23:14:27.995 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21314 Sep 22 23:14:27.995 INFO current number of open files limit 65536 is already the maximum
21315 Sep 22 23:14:27.995 INFO Opened existing region file "/tmp/downstairs-8mPvIugn/region.json"
21316 Sep 22 23:14:27.995 INFO Database read version 1
21317 Sep 22 23:14:27.995 INFO Database write version 1
21318 Sep 22 23:14:27.995 INFO [0] downstairs client at 127.0.0.1:62045 has UUID 989c51b5-eaa4-4274-9bd1-0900d0b855e1
21319 Sep 22 23:14:27.995 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21320 Sep 22 23:14:27.996 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 989c51b5-eaa4-4274-9bd1-0900d0b855e1, encrypted: true, database_read_version: 1, database_write_version: 1 }
21321 Sep 22 23:14:27.996 INFO d294d936-1c70-474e-8080-a4dab42d6778 WaitActive WaitActive WaitActive
21322 Sep 22 23:14:27.996 INFO [1] downstairs client at 127.0.0.1:37528 has UUID 0dbe2b78-0264-4d8b-ad44-c519f3295e35
21323 Sep 22 23:14:27.996 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0dbe2b78-0264-4d8b-ad44-c519f3295e35, encrypted: true, database_read_version: 1, database_write_version: 1 }
21324 Sep 22 23:14:27.996 INFO d294d936-1c70-474e-8080-a4dab42d6778 WaitActive WaitActive WaitActive
21325 Sep 22 23:14:27.996 INFO [2] downstairs client at 127.0.0.1:59233 has UUID 8d12fb73-3125-4add-9524-088d1d513bcf
21326 Sep 22 23:14:27.996 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8d12fb73-3125-4add-9524-088d1d513bcf, encrypted: true, database_read_version: 1, database_write_version: 1 }
21327 Sep 22 23:14:27.996 INFO d294d936-1c70-474e-8080-a4dab42d6778 WaitActive WaitActive WaitActive
21328 Sep 22 23:14:27.996 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21329 Sep 22 23:14:27.996 INFO Current flush_numbers [0..12]: [0, 0]
21330 Sep 22 23:14:27.996 INFO Downstairs has completed Negotiation, task: proc
21331 Sep 22 23:14:27.997 INFO Current flush_numbers [0..12]: [0, 0]
21332 Sep 22 23:14:27.997 INFO Downstairs has completed Negotiation, task: proc
21333 Sep 22 23:14:27.997 INFO Current flush_numbers [0..12]: [0, 0]
21334 Sep 22 23:14:27.997 INFO UUID: 7e66c791-8f61-4b2b-80e3-9850920f90c5
21335 Sep 22 23:14:27.997 INFO Blocks per extent:5 Total Extents: 2
21336 Sep 22 23:14:27.997 INFO Downstairs has completed Negotiation, task: proc
21337 Sep 22 23:14:27.997 INFO Crucible Version: Crucible Version: 0.0.1
21338 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21339 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21340 rustc: 1.70.0 stable x86_64-unknown-illumos
21341 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21342 Sep 22 23:14:27.997 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21343 Sep 22 23:14:27.997 INFO Using address: 127.0.0.1:35121, task: main
21344 Sep 22 23:14:27.997 INFO [0] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21345 Sep 22 23:14:27.997 INFO [0] Transition from WaitActive to WaitQuorum
21346 Sep 22 23:14:27.997 WARN [0] new RM replaced this: None
21347 Sep 22 23:14:27.997 INFO [0] Starts reconcile loop
21348 Sep 22 23:14:27.997 INFO Repair listens on 127.0.0.1:0, task: repair
21349 Sep 22 23:14:27.997 INFO [1] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21350 Sep 22 23:14:27.997 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33106, task: repair
21351 Sep 22 23:14:27.997 INFO [1] Transition from WaitActive to WaitQuorum
21352 Sep 22 23:14:27.997 WARN [1] new RM replaced this: None
21353 Sep 22 23:14:27.997 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33106, task: repair
21354 Sep 22 23:14:27.998 INFO [1] Starts reconcile loop
21355 Sep 22 23:14:27.998 INFO listening, local_addr: 127.0.0.1:33106, task: repair
21356 Sep 22 23:14:27.998 INFO [2] d294d936-1c70-474e-8080-a4dab42d6778 (eff1e9a9-bbb5-477e-acde-f6036995e381) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
21357 Sep 22 23:14:27.998 INFO [2] Transition from WaitActive to WaitQuorum
21358 Sep 22 23:14:27.998 WARN [2] new RM replaced this: None
21359 Sep 22 23:14:27.998 INFO [2] Starts reconcile loop
21360 Sep 22 23:14:27.998 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33106, task: repair
21361 Sep 22 23:14:27.998 INFO Using repair address: 127.0.0.1:33106, task: main
21362 Sep 22 23:14:27.998 INFO [0] 127.0.0.1:62045 task reports connection:true
21363 Sep 22 23:14:27.998 INFO No SSL acceptor configured, task: main
21364 Sep 22 23:14:27.998 INFO d294d936-1c70-474e-8080-a4dab42d6778 WaitQuorum WaitQuorum WaitQuorum
21365 Sep 22 23:14:27.998 INFO [0]R flush_numbers: [0, 0]
21366 Sep 22 23:14:27.998 INFO [0]R generation: [0, 0]
21367 Sep 22 23:14:27.998 DEBG [0] Read AckReady 1005, : downstairs
21368 Sep 22 23:14:27.998 INFO [0]R dirty: [false, false]
21369 Sep 22 23:14:27.998 INFO [1]R flush_numbers: [0, 0]
21370 Sep 22 23:14:27.998 INFO [1]R generation: [0, 0]
21371 Sep 22 23:14:27.998 INFO [1]R dirty: [false, false]
21372 Sep 22 23:14:27.998 INFO [2]R flush_numbers: [0, 0]
21373 Sep 22 23:14:27.998 INFO [2]R generation: [0, 0]
21374 Sep 22 23:14:27.998 INFO [2]R dirty: [false, false]
21375 Sep 22 23:14:27.998 INFO Max found gen is 1
21376 Sep 22 23:14:27.998 INFO Generation requested: 1 >= found:1
21377 Sep 22 23:14:27.998 INFO Next flush: 1
21378 Sep 22 23:14:27.998 INFO All extents match
21379 Sep 22 23:14:27.998 INFO No downstairs repair required
21380 Sep 22 23:14:27.998 INFO No initial repair work was required
21381 Sep 22 23:14:27.998 INFO Set Downstairs and Upstairs active
21382 Sep 22 23:14:27.998 INFO d294d936-1c70-474e-8080-a4dab42d6778 is now active with session: eff1e9a9-bbb5-477e-acde-f6036995e381
21383 Sep 22 23:14:27.998 INFO d294d936-1c70-474e-8080-a4dab42d6778 Set Active after no repair
21384 Sep 22 23:14:27.998 INFO Notify all downstairs, region set compare is done.
21385 Sep 22 23:14:27.998 INFO Set check for repair
21386 Sep 22 23:14:27.998 INFO current number of open files limit 65536 is already the maximum
21387 Sep 22 23:14:27.998 INFO [1] 127.0.0.1:37528 task reports connection:true
21388 Sep 22 23:14:27.998 INFO d294d936-1c70-474e-8080-a4dab42d6778 Active Active Active
21389 Sep 22 23:14:27.998 INFO Created new region file "/tmp/downstairs-HbhwudPm/region.json"
21390 Sep 22 23:14:27.998 INFO Set check for repair
21391 Sep 22 23:14:27.998 INFO [2] 127.0.0.1:59233 task reports connection:true
21392 Sep 22 23:14:27.998 INFO d294d936-1c70-474e-8080-a4dab42d6778 Active Active Active
21393 Sep 22 23:14:27.998 INFO Set check for repair
21394 Sep 22 23:14:27.998 INFO [0] received reconcile message
21395 Sep 22 23:14:27.998 INFO [0] All repairs completed, exit
21396 Sep 22 23:14:27.998 INFO [0] Starts cmd_loop
21397 Sep 22 23:14:27.999 INFO [1] received reconcile message
21398 Sep 22 23:14:27.999 INFO [1] All repairs completed, exit
21399 Sep 22 23:14:27.999 INFO [1] Starts cmd_loop
21400 Sep 22 23:14:27.999 INFO [2] received reconcile message
21401 Sep 22 23:14:27.999 INFO [2] All repairs completed, exit
21402 Sep 22 23:14:27.999 INFO [2] Starts cmd_loop
21403 The guest has finished waiting for activation
21404 Sep 22 23:14:28.000 DEBG [1] Read already AckReady 1005, : downstairs
21405 Sep 22 23:14:28.000 DEBG [0] Read AckReady 1003, : downstairs
21406 Sep 22 23:14:28.001 INFO current number of open files limit 65536 is already the maximum
21407 Sep 22 23:14:28.001 INFO Opened existing region file "/tmp/downstairs-HbhwudPm/region.json"
21408 Sep 22 23:14:28.001 INFO Database read version 1
21409 Sep 22 23:14:28.001 INFO Database write version 1
21410 Sep 22 23:14:28.001 DEBG [2] Read already AckReady 1005, : downstairs
21411 Sep 22 23:14:28.001 DEBG up_ds_listen was notified
21412 Sep 22 23:14:28.001 DEBG up_ds_listen process 1005
21413 Sep 22 23:14:28.002 DEBG [A] ack job 1005:6, : downstairs
21414 Sep 22 23:14:28.002 DEBG up_ds_listen checked 1 jobs, back to waiting
21415 Sep 22 23:14:28.002 DEBG IO Write 1000 has deps []
21416 Sep 22 23:14:28.003 DEBG [1] Read already AckReady 1003, : downstairs
21417 Sep 22 23:14:28.003 INFO UUID: 1c71ccd3-6e2a-48af-bbbe-c8c45ccbac25
21418 Sep 22 23:14:28.003 INFO Blocks per extent:5 Total Extents: 2
21419 Sep 22 23:14:28.003 INFO Crucible Version: Crucible Version: 0.0.1
21420 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21421 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21422 rustc: 1.70.0 stable x86_64-unknown-illumos
21423 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21424 Sep 22 23:14:28.003 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21425 Sep 22 23:14:28.003 INFO Using address: 127.0.0.1:58813, task: main
21426 Sep 22 23:14:28.004 INFO Repair listens on 127.0.0.1:0, task: repair
21427 Sep 22 23:14:28.004 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49899, task: repair
21428 Sep 22 23:14:28.004 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49899, task: repair
21429 Sep 22 23:14:28.004 INFO listening, local_addr: 127.0.0.1:49899, task: repair
21430 Sep 22 23:14:28.004 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49899, task: repair
21431 Sep 22 23:14:28.004 INFO Using repair address: 127.0.0.1:49899, task: main
21432 Sep 22 23:14:28.004 INFO No SSL acceptor configured, task: main
21433 Sep 22 23:14:28.004 INFO Upstairs starts
21434 Sep 22 23:14:28.004 INFO Crucible Version: BuildInfo {
21435 version: "0.0.1",
21436 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
21437 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
21438 git_branch: "main",
21439 rustc_semver: "1.70.0",
21440 rustc_channel: "stable",
21441 rustc_host_triple: "x86_64-unknown-illumos",
21442 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
21443 cargo_triple: "x86_64-unknown-illumos",
21444 debug: true,
21445 opt_level: 0,
21446 }
21447 Sep 22 23:14:28.004 INFO Upstairs <-> Downstairs Message Version: 4
21448 Sep 22 23:14:28.005 INFO Crucible stats registered with UUID: eb72ed16-1c89-4d06-98f5-0c7000baa884
21449 Sep 22 23:14:28.005 INFO Crucible eb72ed16-1c89-4d06-98f5-0c7000baa884 has session id: 2487323e-caaf-4119-8b10-1ac34246a659
21450 Sep 22 23:14:28.005 INFO listening on 127.0.0.1:0, task: main
21451 Sep 22 23:14:28.005 INFO listening on 127.0.0.1:0, task: main
21452 Sep 22 23:14:28.005 INFO listening on 127.0.0.1:0, task: main
21453 Sep 22 23:14:28.005 INFO [0] connecting to 127.0.0.1:53498, looper: 0
21454 Sep 22 23:14:28.005 INFO [1] connecting to 127.0.0.1:35121, looper: 1
21455 Sep 22 23:14:28.005 INFO [2] connecting to 127.0.0.1:58813, looper: 2
21456 Sep 22 23:14:28.005 DEBG [2] Read already AckReady 1003, : downstairs
21457 Sep 22 23:14:28.005 INFO up_listen starts, task: up_listen
21458 Sep 22 23:14:28.005 INFO Wait for all three downstairs to come online
21459 Sep 22 23:14:28.005 INFO Flush timeout: 0.5
21460 Sep 22 23:14:28.005 DEBG up_ds_listen was notified
21461 Sep 22 23:14:28.005 DEBG up_ds_listen process 1003
21462 Sep 22 23:14:28.005 DEBG [A] ack job 1003:4, : downstairs
21463 Sep 22 23:14:28.006 INFO [0] eb72ed16-1c89-4d06-98f5-0c7000baa884 looper connected, looper: 0
21464 Sep 22 23:14:28.006 INFO [0] Proc runs for 127.0.0.1:53498 in state New
21465 Sep 22 23:14:28.006 INFO [2] eb72ed16-1c89-4d06-98f5-0c7000baa884 looper connected, looper: 2
21466 Sep 22 23:14:28.006 INFO [2] Proc runs for 127.0.0.1:58813 in state New
21467 Sep 22 23:14:28.006 INFO [1] eb72ed16-1c89-4d06-98f5-0c7000baa884 looper connected, looper: 1
21468 Sep 22 23:14:28.006 INFO [1] Proc runs for 127.0.0.1:35121 in state New
21469 Sep 22 23:14:28.006 DEBG up_ds_listen checked 1 jobs, back to waiting
21470 Sep 22 23:14:28.006 INFO accepted connection from 127.0.0.1:59909, task: main
21471 Sep 22 23:14:28.006 INFO accepted connection from 127.0.0.1:56686, task: main
21472 Sep 22 23:14:28.006 INFO accepted connection from 127.0.0.1:59801, task: main
21473 Sep 22 23:14:28.006 INFO Connection request from eb72ed16-1c89-4d06-98f5-0c7000baa884 with version 4, task: proc
21474 Sep 22 23:14:28.006 INFO upstairs UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } connected, version 4, task: proc
21475 Sep 22 23:14:28.007 INFO Connection request from eb72ed16-1c89-4d06-98f5-0c7000baa884 with version 4, task: proc
21476 Sep 22 23:14:28.007 INFO upstairs UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } connected, version 4, task: proc
21477 Sep 22 23:14:28.007 INFO Connection request from eb72ed16-1c89-4d06-98f5-0c7000baa884 with version 4, task: proc
21478 Sep 22 23:14:28.007 INFO upstairs UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } connected, version 4, task: proc
21479 Sep 22 23:14:28.007 INFO [0] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) New New New ds_transition to WaitActive
21480 Sep 22 23:14:28.007 INFO [0] Transition from New to WaitActive
21481 Sep 22 23:14:28.007 INFO [2] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) WaitActive New New ds_transition to WaitActive
21482 Sep 22 23:14:28.007 INFO [2] Transition from New to WaitActive
21483 Sep 22 23:14:28.007 INFO [1] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) WaitActive New WaitActive ds_transition to WaitActive
21484 Sep 22 23:14:28.007 INFO [1] Transition from New to WaitActive
21485 The guest has requested activation
21486 Sep 22 23:14:28.007 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 active request set
21487 Sep 22 23:14:28.008 INFO [0] received activate with gen 1
21488 Sep 22 23:14:28.008 INFO [0] client got ds_active_rx, promote! session 69193141-8135-481f-a830-960928a8721b
21489 Sep 22 23:14:28.008 INFO [1] received activate with gen 1
21490 Sep 22 23:14:28.008 INFO [1] client got ds_active_rx, promote! session 69193141-8135-481f-a830-960928a8721b
21491 Sep 22 23:14:28.008 INFO [2] received activate with gen 1
21492 Sep 22 23:14:28.008 INFO [2] client got ds_active_rx, promote! session 69193141-8135-481f-a830-960928a8721b
21493 Sep 22 23:14:28.008 INFO UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } is now active (read-write)
21494 Sep 22 23:14:28.008 INFO UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } is now active (read-write)
21495 Sep 22 23:14:28.008 INFO UpstairsConnection { upstairs_id: eb72ed16-1c89-4d06-98f5-0c7000baa884, session_id: 69193141-8135-481f-a830-960928a8721b, gen: 1 } is now active (read-write)
21496 Sep 22 23:14:28.009 INFO [0] downstairs client at 127.0.0.1:53498 has UUID 177b9038-d4da-4321-bb6f-83528c5df2bb
21497 Sep 22 23:14:28.009 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 177b9038-d4da-4321-bb6f-83528c5df2bb, encrypted: true, database_read_version: 1, database_write_version: 1 }
21498 Sep 22 23:14:28.009 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 WaitActive WaitActive WaitActive
21499 Sep 22 23:14:28.009 INFO [2] downstairs client at 127.0.0.1:58813 has UUID 1c71ccd3-6e2a-48af-bbbe-c8c45ccbac25
21500 Sep 22 23:14:28.009 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1c71ccd3-6e2a-48af-bbbe-c8c45ccbac25, encrypted: true, database_read_version: 1, database_write_version: 1 }
21501 Sep 22 23:14:28.009 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 WaitActive WaitActive WaitActive
21502 Sep 22 23:14:28.009 INFO [1] downstairs client at 127.0.0.1:35121 has UUID 7e66c791-8f61-4b2b-80e3-9850920f90c5
21503 Sep 22 23:14:28.009 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 7e66c791-8f61-4b2b-80e3-9850920f90c5, encrypted: true, database_read_version: 1, database_write_version: 1 }
21504 Sep 22 23:14:28.009 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 WaitActive WaitActive WaitActive
21505 test test::integration_test_volume_write_unwritten_1 ... ok
21506 Sep 22 23:14:28.010 INFO Current flush_numbers [0..12]: [0, 0]
21507 Sep 22 23:14:28.010 INFO current number of open files limit 65536 is already the maximum
21508 Sep 22 23:14:28.010 INFO Created new region file "/tmp/downstairs-GWyoXIoW/region.json"
21509 Sep 22 23:14:28.010 INFO Downstairs has completed Negotiation, task: proc
21510 Sep 22 23:14:28.011 INFO Current flush_numbers [0..12]: [0, 0]
21511 Sep 22 23:14:28.011 INFO Downstairs has completed Negotiation, task: proc
21512 Sep 22 23:14:28.011 DEBG up_ds_listen was notified
21513 test test::integration_test_volume_subvols_parent_scrub_sparse_2 ... Sep 22 23:14:28.011 DEBG up_ds_listen process 1000
21514 ok
21515 Sep 22 23:14:28.011 DEBG [A] ack job 1000:1, : downstairs
21516 Sep 22 23:14:28.011 INFO Current flush_numbers [0..12]: [0, 0]
21517 Sep 22 23:14:28.011 DEBG up_ds_listen checked 1 jobs, back to waiting
21518 Sep 22 23:14:28.011 INFO Downstairs has completed Negotiation, task: proc
21519 Sep 22 23:14:28.011 INFO current number of open files limit 65536 is already the maximum
21520 Sep 22 23:14:28.011 INFO [0] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
21521 Sep 22 23:14:28.011 INFO Created new region file "/tmp/downstairs-OIXCD2JG/region.json"
21522 Sep 22 23:14:28.011 DEBG IO Read 1001 has deps [JobId(1000)]
21523 Sep 22 23:14:28.011 INFO [0] Transition from WaitActive to WaitQuorum
21524 Sep 22 23:14:28.011 WARN [0] new RM replaced this: None
21525 Sep 22 23:14:28.011 INFO [0] Starts reconcile loop
21526 Sep 22 23:14:28.011 INFO [2] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
21527 Sep 22 23:14:28.011 INFO [2] Transition from WaitActive to WaitQuorum
21528 Sep 22 23:14:28.011 WARN [2] new RM replaced this: None
21529 Sep 22 23:14:28.011 INFO [2] Starts reconcile loop
21530 Sep 22 23:14:28.012 INFO [1] eb72ed16-1c89-4d06-98f5-0c7000baa884 (69193141-8135-481f-a830-960928a8721b) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
21531 Sep 22 23:14:28.012 INFO [1] Transition from WaitActive to WaitQuorum
21532 Sep 22 23:14:28.012 WARN [1] new RM replaced this: None
21533 Sep 22 23:14:28.012 INFO [1] Starts reconcile loop
21534 Sep 22 23:14:28.012 INFO [0] 127.0.0.1:53498 task reports connection:true
21535 Sep 22 23:14:28.012 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 WaitQuorum WaitQuorum WaitQuorum
21536 Sep 22 23:14:28.012 INFO [0]R flush_numbers: [0, 0]
21537 Sep 22 23:14:28.012 INFO [0]R generation: [0, 0]
21538 Sep 22 23:14:28.012 INFO [0]R dirty: [false, false]
21539 Sep 22 23:14:28.012 INFO [1]R flush_numbers: [0, 0]
21540 Sep 22 23:14:28.012 INFO [1]R generation: [0, 0]
21541 Sep 22 23:14:28.012 INFO [1]R dirty: [false, false]
21542 Sep 22 23:14:28.012 INFO [2]R flush_numbers: [0, 0]
21543 Sep 22 23:14:28.012 INFO [2]R generation: [0, 0]
21544 Sep 22 23:14:28.012 INFO [2]R dirty: [false, false]
21545 Sep 22 23:14:28.012 INFO Max found gen is 1
21546 Sep 22 23:14:28.012 INFO Generation requested: 1 >= found:1
21547 Sep 22 23:14:28.012 INFO Next flush: 1
21548 Sep 22 23:14:28.012 INFO All extents match
21549 Sep 22 23:14:28.012 INFO No downstairs repair required
21550 Sep 22 23:14:28.012 INFO No initial repair work was required
21551 Sep 22 23:14:28.012 INFO Set Downstairs and Upstairs active
21552 Sep 22 23:14:28.012 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 is now active with session: 69193141-8135-481f-a830-960928a8721b
21553 Sep 22 23:14:28.012 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 Set Active after no repair
21554 Sep 22 23:14:28.012 INFO Notify all downstairs, region set compare is done.
21555 Sep 22 23:14:28.012 INFO Set check for repair
21556 Sep 22 23:14:28.012 INFO [2] 127.0.0.1:58813 task reports connection:true
21557 Sep 22 23:14:28.012 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 Active Active Active
21558 Sep 22 23:14:28.012 DEBG Read :1001 deps:[JobId(1000)] res:true
21559 Sep 22 23:14:28.012 INFO Set check for repair
21560 Sep 22 23:14:28.012 INFO [1] 127.0.0.1:35121 task reports connection:true
21561 Sep 22 23:14:28.012 INFO eb72ed16-1c89-4d06-98f5-0c7000baa884 Active Active Active
21562 Sep 22 23:14:28.012 INFO Set check for repair
21563 Sep 22 23:14:28.012 INFO [0] received reconcile message
21564 Sep 22 23:14:28.012 INFO [0] All repairs completed, exit
21565 Sep 22 23:14:28.012 INFO [0] Starts cmd_loop
21566 Sep 22 23:14:28.013 INFO [1] received reconcile message
21567 Sep 22 23:14:28.013 INFO [1] All repairs completed, exit
21568 Sep 22 23:14:28.013 INFO [1] Starts cmd_loop
21569 Sep 22 23:14:28.013 DEBG Read :1001 deps:[JobId(1000)] res:true
21570 Sep 22 23:14:28.013 INFO [2] received reconcile message
21571 Sep 22 23:14:28.013 INFO [2] All repairs completed, exit
21572 Sep 22 23:14:28.013 INFO [2] Starts cmd_loop
21573 The guest has finished waiting for activation
21574 Sep 22 23:14:28.013 DEBG Read :1001 deps:[JobId(1000)] res:true
21575 Sep 22 23:14:28.013 DEBG IO Write 1000 has deps []
21576 Sep 22 23:14:28.013 DEBG up_ds_listen was notified
21577 Sep 22 23:14:28.014 DEBG up_ds_listen process 1000
21578 Sep 22 23:14:28.014 DEBG [A] ack job 1000:1, : downstairs
21579 Sep 22 23:14:28.014 DEBG up_ds_listen checked 1 jobs, back to waiting
21580 Sep 22 23:14:28.014 INFO current number of open files limit 65536 is already the maximum
21581 Sep 22 23:14:28.014 INFO Opened existing region file "/tmp/downstairs-GWyoXIoW/region.json"
21582 Sep 22 23:14:28.014 INFO Database read version 1
21583 Sep 22 23:14:28.014 INFO Database write version 1
21584 Sep 22 23:14:28.015 INFO current number of open files limit 65536 is already the maximum
21585 Sep 22 23:14:28.015 INFO Opened existing region file "/tmp/downstairs-OIXCD2JG/region.json"
21586 Sep 22 23:14:28.015 INFO Database read version 1
21587 Sep 22 23:14:28.015 INFO Database write version 1
21588 Sep 22 23:14:28.015 DEBG Write :1000 deps:[] res:true
21589 Sep 22 23:14:28.015 DEBG Write :1000 deps:[] res:true
21590 Sep 22 23:14:28.016 DEBG [0] Read AckReady 1001, : downstairs
21591 Sep 22 23:14:28.016 DEBG Write :1000 deps:[] res:true
21592 Sep 22 23:14:28.016 INFO UUID: 07752cff-eba4-4cda-9fc8-25c01941ef38
21593 Sep 22 23:14:28.016 INFO Blocks per extent:5 Total Extents: 2
21594 Sep 22 23:14:28.016 INFO Crucible Version: Crucible Version: 0.0.1
21595 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21596 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21597 rustc: 1.70.0 stable x86_64-unknown-illumos
21598 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21599 Sep 22 23:14:28.017 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21600 Sep 22 23:14:28.017 INFO Using address: 127.0.0.1:39201, task: main
21601 Sep 22 23:14:28.017 INFO UUID: 4653359c-1409-4b81-8a59-94f85d2bcf29
21602 Sep 22 23:14:28.017 INFO Repair listens on 127.0.0.1:0, task: repair
21603 Sep 22 23:14:28.017 INFO Blocks per extent:5 Total Extents: 2
21604 Sep 22 23:14:28.017 INFO Crucible Version: Crucible Version: 0.0.1
21605 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21606 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21607 rustc: 1.70.0 stable x86_64-unknown-illumos
21608 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21609 Sep 22 23:14:28.017 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:33789, task: repair
21610 Sep 22 23:14:28.017 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21611 Sep 22 23:14:28.017 INFO Using address: 127.0.0.1:43937, task: main
21612 Sep 22 23:14:28.017 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:33789, task: repair
21613 Sep 22 23:14:28.017 INFO listening, local_addr: 127.0.0.1:33789, task: repair
21614 Sep 22 23:14:28.017 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:33789, task: repair
21615 Sep 22 23:14:28.017 INFO Using repair address: 127.0.0.1:33789, task: main
21616 Sep 22 23:14:28.017 INFO No SSL acceptor configured, task: main
21617 Sep 22 23:14:28.017 INFO Repair listens on 127.0.0.1:0, task: repair
21618 Sep 22 23:14:28.017 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57722, task: repair
21619 Sep 22 23:14:28.018 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57722, task: repair
21620 Sep 22 23:14:28.018 INFO listening, local_addr: 127.0.0.1:57722, task: repair
21621 Sep 22 23:14:28.018 INFO current number of open files limit 65536 is already the maximum
21622 Sep 22 23:14:28.018 DEBG [1] Read already AckReady 1001, : downstairs
21623 Sep 22 23:14:28.018 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57722, task: repair
21624 Sep 22 23:14:28.018 INFO Using repair address: 127.0.0.1:57722, task: main
21625 Sep 22 23:14:28.018 INFO Created new region file "/tmp/downstairs-F0TZn2dN/region.json"
21626 Sep 22 23:14:28.018 INFO No SSL acceptor configured, task: main
21627 Sep 22 23:14:28.018 DEBG IO Write 1001 has deps [JobId(1000)]
21628 Sep 22 23:14:28.018 INFO current number of open files limit 65536 is already the maximum
21629 Sep 22 23:14:28.018 INFO Created new region file "/tmp/downstairs-rQ43necs/region.json"
21630 Sep 22 23:14:28.019 DEBG [2] Read already AckReady 1001, : downstairs
21631 Sep 22 23:14:28.020 DEBG up_ds_listen was notified
21632 Sep 22 23:14:28.020 DEBG up_ds_listen process 1001
21633 Sep 22 23:14:28.020 DEBG [A] ack job 1001:2, : downstairs
21634 Sep 22 23:14:28.020 DEBG up_ds_listen checked 1 jobs, back to waiting
21635 Sep 22 23:14:28.022 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
21636 Sep 22 23:14:28.022 DEBG up_ds_listen was notified
21637 Sep 22 23:14:28.022 DEBG up_ds_listen process 1001
21638 Sep 22 23:14:28.022 DEBG [A] ack job 1001:2, : downstairs
21639 Sep 22 23:14:28.022 DEBG up_ds_listen checked 1 jobs, back to waiting
21640 Sep 22 23:14:28.022 INFO current number of open files limit 65536 is already the maximum
21641 Sep 22 23:14:28.022 DEBG IO Read 1002 has deps [JobId(1001), JobId(1000)]
21642 Sep 22 23:14:28.022 INFO Opened existing region file "/tmp/downstairs-F0TZn2dN/region.json"
21643 Sep 22 23:14:28.022 INFO Database read version 1
21644 Sep 22 23:14:28.022 INFO Database write version 1
21645 Sep 22 23:14:28.022 INFO current number of open files limit 65536 is already the maximum
21646 Sep 22 23:14:28.022 INFO Opened existing region file "/tmp/downstairs-rQ43necs/region.json"
21647 Sep 22 23:14:28.023 INFO Database read version 1
21648 Sep 22 23:14:28.023 INFO Database write version 1
21649 Sep 22 23:14:28.023 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21650 Sep 22 23:14:28.023 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21651 Sep 22 23:14:28.023 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
21652 Sep 22 23:14:28.024 DEBG up_ds_listen was notified
21653 Sep 22 23:14:28.024 DEBG up_ds_listen process 1002
21654 Sep 22 23:14:28.024 DEBG [A] ack job 1002:3, : downstairs
21655 Sep 22 23:14:28.024 DEBG up_ds_listen checked 1 jobs, back to waiting
21656 Sep 22 23:14:28.025 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
21657 Sep 22 23:14:28.025 INFO UUID: 8a58fc4b-e2e1-4a7f-8747-0d8dd2cd6f65
21658 Sep 22 23:14:28.025 INFO Blocks per extent:5 Total Extents: 2
21659 Sep 22 23:14:28.025 INFO Crucible Version: Crucible Version: 0.0.1
21660 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21661 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21662 rustc: 1.70.0 stable x86_64-unknown-illumos
21663 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21664 Sep 22 23:14:28.025 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21665 Sep 22 23:14:28.025 INFO Using address: 127.0.0.1:39321, task: main
21666 Sep 22 23:14:28.025 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21667 Sep 22 23:14:28.025 INFO Repair listens on 127.0.0.1:0, task: repair
21668 Sep 22 23:14:28.025 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36320, task: repair
21669 Sep 22 23:14:28.025 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36320, task: repair
21670 Sep 22 23:14:28.025 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21671 Sep 22 23:14:28.025 INFO listening, local_addr: 127.0.0.1:36320, task: repair
21672 Sep 22 23:14:28.026 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36320, task: repair
21673 Sep 22 23:14:28.026 INFO UUID: 3f0b274b-a55a-4121-96c8-a689e27a459a
21674 Sep 22 23:14:28.026 INFO Using repair address: 127.0.0.1:36320, task: main
21675 Sep 22 23:14:28.026 INFO Blocks per extent:5 Total Extents: 2
21676 Sep 22 23:14:28.026 INFO No SSL acceptor configured, task: main
21677 Sep 22 23:14:28.026 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
21678 Sep 22 23:14:28.026 INFO Crucible Version: Crucible Version: 0.0.1
21679 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21680 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21681 rustc: 1.70.0 stable x86_64-unknown-illumos
21682 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21683 Sep 22 23:14:28.026 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21684 Sep 22 23:14:28.026 INFO Using address: 127.0.0.1:38253, task: main
21685 Sep 22 23:14:28.026 INFO current number of open files limit 65536 is already the maximum
21686 Sep 22 23:14:28.026 INFO Created new region file "/tmp/downstairs-eKrOivsr/region.json"
21687 Sep 22 23:14:28.026 DEBG [0] Read AckReady 1002, : downstairs
21688 Sep 22 23:14:28.026 INFO Repair listens on 127.0.0.1:0, task: repair
21689 Sep 22 23:14:28.026 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36529, task: repair
21690 Sep 22 23:14:28.026 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36529, task: repair
21691 Sep 22 23:14:28.026 INFO listening, local_addr: 127.0.0.1:36529, task: repair
21692 Sep 22 23:14:28.027 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36529, task: repair
21693 Sep 22 23:14:28.027 INFO Using repair address: 127.0.0.1:36529, task: main
21694 Sep 22 23:14:28.027 INFO No SSL acceptor configured, task: main
21695 Sep 22 23:14:28.027 INFO current number of open files limit 65536 is already the maximum
21696 Sep 22 23:14:28.027 INFO Created new region file "/tmp/downstairs-SPkLZJb1/region.json"
21697 Sep 22 23:14:28.028 DEBG [2] Read already AckReady 1002, : downstairs
21698 Sep 22 23:14:28.029 DEBG [0] Read AckReady 1003, : downstairs
21699 Sep 22 23:14:28.030 DEBG [1] Read already AckReady 1002, : downstairs
21700 Sep 22 23:14:28.030 DEBG up_ds_listen was notified
21701 Sep 22 23:14:28.030 DEBG up_ds_listen process 1002
21702 Sep 22 23:14:28.030 DEBG [A] ack job 1002:3, : downstairs
21703 Sep 22 23:14:28.030 INFO current number of open files limit 65536 is already the maximum
21704 Sep 22 23:14:28.030 DEBG up_ds_listen checked 1 jobs, back to waiting
21705 Sep 22 23:14:28.030 INFO Opened existing region file "/tmp/downstairs-eKrOivsr/region.json"
21706 Sep 22 23:14:28.030 INFO Database read version 1
21707 Sep 22 23:14:28.030 INFO Database write version 1
21708 Sep 22 23:14:28.030 DEBG [1] Read already AckReady 1003, : downstairs
21709 Sep 22 23:14:28.032 DEBG [2] Read already AckReady 1003, : downstairs
21710 Sep 22 23:14:28.032 DEBG up_ds_listen was notified
21711 Sep 22 23:14:28.032 DEBG up_ds_listen process 1003
21712 Sep 22 23:14:28.032 DEBG [A] ack job 1003:4, : downstairs
21713 Sep 22 23:14:28.033 DEBG up_ds_listen checked 1 jobs, back to waiting
21714 Sep 22 23:14:28.033 INFO UUID: 3f976252-a8b2-44c0-aea9-ba9b4e4fedb3
21715 Sep 22 23:14:28.033 INFO Blocks per extent:5 Total Extents: 2
21716 Sep 22 23:14:28.033 INFO Crucible Version: Crucible Version: 0.0.1
21717 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21718 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21719 rustc: 1.70.0 stable x86_64-unknown-illumos
21720 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21721 Sep 22 23:14:28.033 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21722 Sep 22 23:14:28.033 INFO Using address: 127.0.0.1:48350, task: main
21723 test test::integration_test_volume_write_unwritten_sparse ... ok
21724 Sep 22 23:14:28.033 INFO current number of open files limit 65536 is already the maximum
21725 Sep 22 23:14:28.033 INFO Opened existing region file "/tmp/downstairs-SPkLZJb1/region.json"
21726 Sep 22 23:14:28.033 INFO Database read version 1
21727 Sep 22 23:14:28.033 INFO Database write version 1
21728 Sep 22 23:14:28.033 INFO Repair listens on 127.0.0.1:0, task: repair
21729 Sep 22 23:14:28.033 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53088, task: repair
21730 Sep 22 23:14:28.033 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53088, task: repair
21731 Sep 22 23:14:28.033 INFO current number of open files limit 65536 is already the maximum
21732 Sep 22 23:14:28.033 INFO listening, local_addr: 127.0.0.1:53088, task: repair
21733 Sep 22 23:14:28.034 INFO Created new region file "/tmp/downstairs-5PKewOgB/region.json"
21734 Sep 22 23:14:28.034 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53088, task: repair
21735 Sep 22 23:14:28.034 INFO Using repair address: 127.0.0.1:53088, task: main
21736 Sep 22 23:14:28.034 INFO No SSL acceptor configured, task: main
21737 Sep 22 23:14:28.034 INFO current number of open files limit 65536 is already the maximum
21738 Sep 22 23:14:28.034 INFO Created new region file "/tmp/downstairs-aiQZQRd8/region.json"
21739 Sep 22 23:14:28.036 INFO UUID: 9a0e7cc4-1eb0-42dc-b42e-9dc1e52fa851
21740 Sep 22 23:14:28.036 INFO Blocks per extent:5 Total Extents: 2
21741 Sep 22 23:14:28.036 INFO Crucible Version: Crucible Version: 0.0.1
21742 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21743 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21744 rustc: 1.70.0 stable x86_64-unknown-illumos
21745 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21746 Sep 22 23:14:28.036 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21747 Sep 22 23:14:28.036 INFO Using address: 127.0.0.1:32937, task: main
21748 Sep 22 23:14:28.036 INFO Repair listens on 127.0.0.1:0, task: repair
21749 Sep 22 23:14:28.036 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63724, task: repair
21750 Sep 22 23:14:28.037 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63724, task: repair
21751 Sep 22 23:14:28.037 INFO listening, local_addr: 127.0.0.1:63724, task: repair
21752 Sep 22 23:14:28.037 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63724, task: repair
21753 Sep 22 23:14:28.037 INFO Using repair address: 127.0.0.1:63724, task: main
21754 Sep 22 23:14:28.037 INFO No SSL acceptor configured, task: main
21755 Sep 22 23:14:28.037 INFO current number of open files limit 65536 is already the maximum
21756 Sep 22 23:14:28.037 INFO Created new region file "/tmp/downstairs-WSynRapI/region.json"
21757 test test::integration_test_volume_write_unwritten_2 ... ok
21758 Sep 22 23:14:28.038 INFO current number of open files limit 65536 is already the maximum
21759 Sep 22 23:14:28.038 INFO Created new region file "/tmp/downstairs-U5vIRi38/region.json"
21760 Sep 22 23:14:28.038 INFO current number of open files limit 65536 is already the maximum
21761 Sep 22 23:14:28.038 INFO Opened existing region file "/tmp/downstairs-5PKewOgB/region.json"
21762 Sep 22 23:14:28.038 INFO Database read version 1
21763 Sep 22 23:14:28.038 INFO Database write version 1
21764 Sep 22 23:14:28.040 INFO current number of open files limit 65536 is already the maximum
21765 Sep 22 23:14:28.040 INFO Opened existing region file "/tmp/downstairs-aiQZQRd8/region.json"
21766 Sep 22 23:14:28.040 INFO Database read version 1
21767 Sep 22 23:14:28.040 INFO Database write version 1
21768 Sep 22 23:14:28.041 INFO UUID: e867ee61-247c-4a29-967e-48e3c07ee764
21769 Sep 22 23:14:28.041 INFO Blocks per extent:5 Total Extents: 2
21770 Sep 22 23:14:28.041 INFO Crucible Version: Crucible Version: 0.0.1
21771 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21772 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21773 rustc: 1.70.0 stable x86_64-unknown-illumos
21774 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21775 Sep 22 23:14:28.042 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21776 Sep 22 23:14:28.042 INFO Using address: 127.0.0.1:46618, task: main
21777 Sep 22 23:14:28.042 INFO Repair listens on 127.0.0.1:0, task: repair
21778 Sep 22 23:14:28.042 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58733, task: repair
21779 Sep 22 23:14:28.042 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58733, task: repair
21780 Sep 22 23:14:28.042 INFO listening, local_addr: 127.0.0.1:58733, task: repair
21781 Sep 22 23:14:28.042 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58733, task: repair
21782 Sep 22 23:14:28.042 INFO Using repair address: 127.0.0.1:58733, task: main
21783 Sep 22 23:14:28.042 INFO No SSL acceptor configured, task: main
21784 Sep 22 23:14:28.043 INFO current number of open files limit 65536 is already the maximum
21785 Sep 22 23:14:28.043 INFO Opened existing region file "/tmp/downstairs-WSynRapI/region.json"
21786 Sep 22 23:14:28.043 INFO Database read version 1
21787 Sep 22 23:14:28.043 INFO Database write version 1
21788 Sep 22 23:14:28.043 INFO UUID: dfa11e22-89c3-48d2-ac68-b5822a655ba0
21789 Sep 22 23:14:28.043 INFO current number of open files limit 65536 is already the maximum
21790 Sep 22 23:14:28.043 INFO Blocks per extent:5 Total Extents: 2
21791 Sep 22 23:14:28.043 INFO current number of open files limit 65536 is already the maximum
21792 Sep 22 23:14:28.043 INFO Crucible Version: Crucible Version: 0.0.1
21793 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21794 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21795 rustc: 1.70.0 stable x86_64-unknown-illumos
21796 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21797 Sep 22 23:14:28.043 INFO Opened existing region file "/tmp/downstairs-U5vIRi38/region.json"
21798 Sep 22 23:14:28.043 INFO Database read version 1
21799 Sep 22 23:14:28.043 INFO Created new region file "/tmp/downstairs-yyVBLW4f/region.json"
21800 Sep 22 23:14:28.043 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21801 Sep 22 23:14:28.043 INFO Database write version 1
21802 Sep 22 23:14:28.043 INFO Using address: 127.0.0.1:53594, task: main
21803 Sep 22 23:14:28.044 INFO Repair listens on 127.0.0.1:0, task: repair
21804 Sep 22 23:14:28.044 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50289, task: repair
21805 Sep 22 23:14:28.044 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50289, task: repair
21806 Sep 22 23:14:28.044 INFO listening, local_addr: 127.0.0.1:50289, task: repair
21807 Sep 22 23:14:28.044 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50289, task: repair
21808 Sep 22 23:14:28.044 INFO Using repair address: 127.0.0.1:50289, task: main
21809 Sep 22 23:14:28.044 INFO No SSL acceptor configured, task: main
21810 Sep 22 23:14:28.045 INFO current number of open files limit 65536 is already the maximum
21811 Sep 22 23:14:28.045 INFO Created new region file "/tmp/downstairs-a5Y9rIyO/region.json"
21812 Sep 22 23:14:28.046 INFO UUID: b7c099f2-5446-4f38-bad7-8d884c3a5016
21813 Sep 22 23:14:28.046 INFO Blocks per extent:5 Total Extents: 2
21814 Sep 22 23:14:28.046 INFO UUID: 6fddf8da-d152-4d38-8108-a3c9271fb31e
21815 Sep 22 23:14:28.046 INFO Blocks per extent:5 Total Extents: 2
21816 Sep 22 23:14:28.046 INFO Crucible Version: Crucible Version: 0.0.1
21817 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21818 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21819 rustc: 1.70.0 stable x86_64-unknown-illumos
21820 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21821 Sep 22 23:14:28.046 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21822 Sep 22 23:14:28.046 INFO Using address: 127.0.0.1:53693, task: main
21823 Sep 22 23:14:28.047 INFO Crucible Version: Crucible Version: 0.0.1
21824 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21825 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21826 rustc: 1.70.0 stable x86_64-unknown-illumos
21827 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21828 Sep 22 23:14:28.047 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21829 Sep 22 23:14:28.047 INFO Using address: 127.0.0.1:56487, task: main
21830 Sep 22 23:14:28.047 INFO Repair listens on 127.0.0.1:0, task: repair
21831 Sep 22 23:14:28.047 INFO Repair listens on 127.0.0.1:0, task: repair
21832 Sep 22 23:14:28.047 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40471, task: repair
21833 Sep 22 23:14:28.047 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42158, task: repair
21834 Sep 22 23:14:28.047 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42158, task: repair
21835 Sep 22 23:14:28.047 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40471, task: repair
21836 Sep 22 23:14:28.047 INFO listening, local_addr: 127.0.0.1:42158, task: repair
21837 Sep 22 23:14:28.047 INFO listening, local_addr: 127.0.0.1:40471, task: repair
21838 Sep 22 23:14:28.047 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42158, task: repair
21839 Sep 22 23:14:28.047 INFO Using repair address: 127.0.0.1:42158, task: main
21840 Sep 22 23:14:28.047 INFO No SSL acceptor configured, task: main
21841 Sep 22 23:14:28.047 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40471, task: repair
21842 Sep 22 23:14:28.047 INFO Using repair address: 127.0.0.1:40471, task: main
21843 Sep 22 23:14:28.047 INFO No SSL acceptor configured, task: main
21844 Sep 22 23:14:28.048 INFO current number of open files limit 65536 is already the maximum
21845 Sep 22 23:14:28.048 INFO current number of open files limit 65536 is already the maximum
21846 Sep 22 23:14:28.048 INFO Created new region file "/tmp/downstairs-kBuYI5cw/region.json"
21847 Sep 22 23:14:28.048 INFO Created new region file "/tmp/downstairs-oWPWb9nU/region.json"
21848 Sep 22 23:14:28.049 INFO current number of open files limit 65536 is already the maximum
21849 Sep 22 23:14:28.049 INFO Opened existing region file "/tmp/downstairs-yyVBLW4f/region.json"
21850 Sep 22 23:14:28.049 INFO Database read version 1
21851 Sep 22 23:14:28.049 INFO Database write version 1
21852 Sep 22 23:14:28.053 INFO UUID: 01a6d79b-23e4-44c1-a6f8-9804b08fb8c2
21853 Sep 22 23:14:28.053 INFO Blocks per extent:5 Total Extents: 2
21854 Sep 22 23:14:28.053 INFO Crucible Version: Crucible Version: 0.0.1
21855 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21856 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21857 rustc: 1.70.0 stable x86_64-unknown-illumos
21858 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21859 Sep 22 23:14:28.053 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21860 Sep 22 23:14:28.053 INFO Using address: 127.0.0.1:49265, task: main
21861 Sep 22 23:14:28.053 INFO Repair listens on 127.0.0.1:0, task: repair
21862 Sep 22 23:14:28.053 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38123, task: repair
21863 Sep 22 23:14:28.053 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38123, task: repair
21864 Sep 22 23:14:28.053 INFO listening, local_addr: 127.0.0.1:38123, task: repair
21865 Sep 22 23:14:28.053 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38123, task: repair
21866 Sep 22 23:14:28.053 INFO Using repair address: 127.0.0.1:38123, task: main
21867 Sep 22 23:14:28.053 INFO No SSL acceptor configured, task: main
21868 Sep 22 23:14:28.054 INFO current number of open files limit 65536 is already the maximum
21869 Sep 22 23:14:28.054 INFO Opened existing region file "/tmp/downstairs-kBuYI5cw/region.json"
21870 Sep 22 23:14:28.054 INFO Database read version 1
21871 Sep 22 23:14:28.054 INFO current number of open files limit 65536 is already the maximum
21872 Sep 22 23:14:28.054 INFO Database write version 1
21873 Sep 22 23:14:28.054 INFO Created new region file "/tmp/downstairs-qpN1lPF1/region.json"
21874 Sep 22 23:14:28.054 INFO current number of open files limit 65536 is already the maximum
21875 Sep 22 23:14:28.054 INFO Opened existing region file "/tmp/downstairs-a5Y9rIyO/region.json"
21876 Sep 22 23:14:28.054 INFO Database read version 1
21877 Sep 22 23:14:28.054 INFO Database write version 1
21878 Sep 22 23:14:28.057 INFO UUID: 2fe4896e-5871-495b-8a9e-a2c5b4173cd5
21879 Sep 22 23:14:28.057 INFO Blocks per extent:5 Total Extents: 2
21880 Sep 22 23:14:28.057 INFO Crucible Version: Crucible Version: 0.0.1
21881 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21882 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21883 rustc: 1.70.0 stable x86_64-unknown-illumos
21884 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21885 Sep 22 23:14:28.057 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21886 Sep 22 23:14:28.057 INFO Using address: 127.0.0.1:49049, task: main
21887 Sep 22 23:14:28.058 INFO Repair listens on 127.0.0.1:0, task: repair
21888 Sep 22 23:14:28.058 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57494, task: repair
21889 Sep 22 23:14:28.058 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57494, task: repair
21890 Sep 22 23:14:28.058 INFO current number of open files limit 65536 is already the maximum
21891 Sep 22 23:14:28.058 INFO listening, local_addr: 127.0.0.1:57494, task: repair
21892 Sep 22 23:14:28.058 INFO Opened existing region file "/tmp/downstairs-oWPWb9nU/region.json"
21893 Sep 22 23:14:28.058 INFO Database read version 1
21894 Sep 22 23:14:28.058 INFO Database write version 1
21895 Sep 22 23:14:28.058 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57494, task: repair
21896 Sep 22 23:14:28.058 INFO Using repair address: 127.0.0.1:57494, task: main
21897 Sep 22 23:14:28.058 INFO No SSL acceptor configured, task: main
21898 Sep 22 23:14:28.058 INFO current number of open files limit 65536 is already the maximum
21899 Sep 22 23:14:28.058 INFO Created new region file "/tmp/downstairs-NfNZrfMD/region.json"
21900 Sep 22 23:14:28.059 INFO UUID: 106a66d5-99df-45fc-8e19-905c44b58eb8
21901 Sep 22 23:14:28.059 INFO Blocks per extent:5 Total Extents: 2
21902 Sep 22 23:14:28.059 INFO Crucible Version: Crucible Version: 0.0.1
21903 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21904 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21905 rustc: 1.70.0 stable x86_64-unknown-illumos
21906 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21907 Sep 22 23:14:28.059 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21908 Sep 22 23:14:28.059 INFO Using address: 127.0.0.1:50602, task: main
21909 Sep 22 23:14:28.059 INFO Repair listens on 127.0.0.1:0, task: repair
21910 Sep 22 23:14:28.059 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:60375, task: repair
21911 Sep 22 23:14:28.059 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:60375, task: repair
21912 Sep 22 23:14:28.059 INFO listening, local_addr: 127.0.0.1:60375, task: repair
21913 Sep 22 23:14:28.059 INFO current number of open files limit 65536 is already the maximum
21914 Sep 22 23:14:28.060 INFO Opened existing region file "/tmp/downstairs-qpN1lPF1/region.json"
21915 Sep 22 23:14:28.060 INFO Database read version 1
21916 Sep 22 23:14:28.060 INFO Database write version 1
21917 Sep 22 23:14:28.060 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:60375, task: repair
21918 Sep 22 23:14:28.060 INFO Using repair address: 127.0.0.1:60375, task: main
21919 Sep 22 23:14:28.060 INFO No SSL acceptor configured, task: main
21920 Sep 22 23:14:28.060 INFO current number of open files limit 65536 is already the maximum
21921 Sep 22 23:14:28.060 INFO Created new region file "/tmp/downstairs-tMrbAOzK/region.json"
21922 Sep 22 23:14:28.062 INFO UUID: a532462e-5feb-4b2d-aa58-e3da9a570973
21923 Sep 22 23:14:28.062 INFO Blocks per extent:5 Total Extents: 2
21924 Sep 22 23:14:28.062 INFO UUID: 4b9378e8-0a63-4886-801a-4023859bd2fc
21925 Sep 22 23:14:28.062 INFO Blocks per extent:5 Total Extents: 2
21926 Sep 22 23:14:28.062 INFO Crucible Version: Crucible Version: 0.0.1
21927 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21928 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21929 rustc: 1.70.0 stable x86_64-unknown-illumos
21930 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21931 Sep 22 23:14:28.062 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21932 Sep 22 23:14:28.062 INFO Crucible Version: Crucible Version: 0.0.1
21933 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21934 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21935 rustc: 1.70.0 stable x86_64-unknown-illumos
21936 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21937 Sep 22 23:14:28.062 INFO Using address: 127.0.0.1:43883, task: main
21938 Sep 22 23:14:28.062 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21939 Sep 22 23:14:28.062 INFO Using address: 127.0.0.1:52057, task: main
21940 Sep 22 23:14:28.063 INFO Repair listens on 127.0.0.1:0, task: repair
21941 Sep 22 23:14:28.063 INFO Repair listens on 127.0.0.1:0, task: repair
21942 Sep 22 23:14:28.063 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63277, task: repair
21943 Sep 22 23:14:28.063 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63277, task: repair
21944 Sep 22 23:14:28.063 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56173, task: repair
21945 Sep 22 23:14:28.063 INFO listening, local_addr: 127.0.0.1:63277, task: repair
21946 Sep 22 23:14:28.063 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56173, task: repair
21947 Sep 22 23:14:28.063 INFO listening, local_addr: 127.0.0.1:56173, task: repair
21948 Sep 22 23:14:28.063 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63277, task: repair
21949 Sep 22 23:14:28.063 INFO Using repair address: 127.0.0.1:63277, task: main
21950 Sep 22 23:14:28.063 INFO No SSL acceptor configured, task: main
21951 Sep 22 23:14:28.063 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56173, task: repair
21952 Sep 22 23:14:28.063 INFO Using repair address: 127.0.0.1:56173, task: main
21953 Sep 22 23:14:28.063 INFO No SSL acceptor configured, task: main
21954 Sep 22 23:14:28.063 INFO current number of open files limit 65536 is already the maximum
21955 Sep 22 23:14:28.063 INFO Created new region file "/tmp/downstairs-GX64VDLV/region.json"
21956 Sep 22 23:14:28.064 INFO current number of open files limit 65536 is already the maximum
21957 Sep 22 23:14:28.065 INFO Created new region file "/tmp/downstairs-lzKcgOE9/region.json"
21958 Sep 22 23:14:28.065 INFO current number of open files limit 65536 is already the maximum
21959 Sep 22 23:14:28.065 INFO Opened existing region file "/tmp/downstairs-NfNZrfMD/region.json"
21960 Sep 22 23:14:28.065 INFO Database read version 1
21961 Sep 22 23:14:28.065 INFO Database write version 1
21962 Sep 22 23:14:28.066 INFO current number of open files limit 65536 is already the maximum
21963 Sep 22 23:14:28.066 INFO Opened existing region file "/tmp/downstairs-tMrbAOzK/region.json"
21964 Sep 22 23:14:28.066 INFO Database read version 1
21965 Sep 22 23:14:28.066 INFO Database write version 1
21966 Sep 22 23:14:28.068 INFO UUID: adbabfb8-1e80-4905-a6ee-914c6aaac853
21967 Sep 22 23:14:28.068 INFO Blocks per extent:5 Total Extents: 2
21968 Sep 22 23:14:28.068 INFO Crucible Version: Crucible Version: 0.0.1
21969 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21970 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21971 rustc: 1.70.0 stable x86_64-unknown-illumos
21972 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21973 Sep 22 23:14:28.068 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21974 Sep 22 23:14:28.068 INFO Using address: 127.0.0.1:57827, task: main
21975 Sep 22 23:14:28.068 INFO Repair listens on 127.0.0.1:0, task: repair
21976 Sep 22 23:14:28.068 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:41816, task: repair
21977 Sep 22 23:14:28.068 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:41816, task: repair
21978 Sep 22 23:14:28.068 INFO listening, local_addr: 127.0.0.1:41816, task: repair
21979 Sep 22 23:14:28.068 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:41816, task: repair
21980 Sep 22 23:14:28.068 INFO Using repair address: 127.0.0.1:41816, task: main
21981 Sep 22 23:14:28.068 INFO No SSL acceptor configured, task: main
21982 Sep 22 23:14:28.069 INFO current number of open files limit 65536 is already the maximum
21983 Sep 22 23:14:28.069 INFO Opened existing region file "/tmp/downstairs-GX64VDLV/region.json"
21984 Sep 22 23:14:28.069 INFO Database read version 1
21985 Sep 22 23:14:28.069 INFO Database write version 1
21986 note: configured to log to "/dev/stdout"
21987 Sep 22 23:14:28.070 INFO current number of open files limit 65536 is already the maximum
21988 Sep 22 23:14:28.070 INFO Opened existing region file "/tmp/downstairs-lzKcgOE9/region.json"
21989 Sep 22 23:14:28.070 INFO Database read version 1
21990 Sep 22 23:14:28.070 INFO Database write version 1
21991 Sep 22 23:14:28.070 INFO UUID: 087cb3f4-dc4b-491c-947f-013a8f45b507
21992 Sep 22 23:14:28.070 INFO Blocks per extent:5 Total Extents: 2
21993 Sep 22 23:14:28.070 INFO Crucible Version: Crucible Version: 0.0.1
21994 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
21995 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
21996 rustc: 1.70.0 stable x86_64-unknown-illumos
21997 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
21998 Sep 22 23:14:28.070 INFO Upstairs <-> Downstairs Message Version: 4, task: main
21999 Sep 22 23:14:28.070 INFO Using address: 127.0.0.1:53970, task: main
22000 Sep 22 23:14:28.070 INFO Repair listens on 127.0.0.1:0, task: repair
22001 Sep 22 23:14:28.070 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52685, task: repair
22002 Sep 22 23:14:28.070 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52685, task: repair
22003 Sep 22 23:14:28.070 INFO listening, local_addr: 127.0.0.1:52685, task: repair
22004 Sep 22 23:14:28.070 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52685, task: repair
22005 Sep 22 23:14:28.070 INFO Using repair address: 127.0.0.1:52685, task: main
22006 Sep 22 23:14:28.070 INFO No SSL acceptor configured, task: main
22007 Sep 22 23:14:28.071 INFO Upstairs starts
22008 Sep 22 23:14:28.071 INFO Crucible Version: BuildInfo {
22009 version: "0.0.1",
22010 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22011 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22012 git_branch: "main",
22013 rustc_semver: "1.70.0",
22014 rustc_channel: "stable",
22015 rustc_host_triple: "x86_64-unknown-illumos",
22016 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22017 cargo_triple: "x86_64-unknown-illumos",
22018 debug: true,
22019 opt_level: 0,
22020 }
22021 Sep 22 23:14:28.071 INFO Upstairs <-> Downstairs Message Version: 4
22022 Sep 22 23:14:28.071 INFO Crucible stats registered with UUID: 5a917d88-e6fb-4057-be02-fbcd45b1ea14
22023 Sep 22 23:14:28.071 INFO Crucible 5a917d88-e6fb-4057-be02-fbcd45b1ea14 has session id: eee82ed7-e118-43b0-ba47-830f213fb9e4
22024 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22025 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22026 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22027 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22028 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22029 Sep 22 23:14:28.071 INFO listening on 127.0.0.1:0, task: main
22030 Sep 22 23:14:28.071 INFO [0] connecting to 127.0.0.1:39201, looper: 0
22031 Sep 22 23:14:28.071 INFO [1] connecting to 127.0.0.1:39321, looper: 1
22032 Sep 22 23:14:28.072 INFO [2] connecting to 127.0.0.1:48350, looper: 2
22033 Sep 22 23:14:28.072 INFO up_listen starts, task: up_listen
22034 Sep 22 23:14:28.072 INFO Wait for all three downstairs to come online
22035 Sep 22 23:14:28.072 INFO Flush timeout: 0.5
22036 Sep 22 23:14:28.072 INFO accepted connection from 127.0.0.1:42419, task: main
22037 Sep 22 23:14:28.072 INFO accepted connection from 127.0.0.1:41895, task: main
22038 Sep 22 23:14:28.072 INFO accepted connection from 127.0.0.1:46108, task: main
220392023-09-22T23:14:28.072ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:40823
220402023-09-22T23:14:28.072ZINFOcrucible-pantry: listen IP: 127.0.0.1:40823
22041 Sep 22 23:14:28.072 INFO [0] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 looper connected, looper: 0
22042 Sep 22 23:14:28.072 INFO [0] Proc runs for 127.0.0.1:39201 in state New
22043 Sep 22 23:14:28.072 INFO [1] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 looper connected, looper: 1
22044 Sep 22 23:14:28.073 INFO [1] Proc runs for 127.0.0.1:39321 in state New
22045 Sep 22 23:14:28.073 INFO UUID: b901037e-6e64-43d6-a637-75008a7adf81
22046 Sep 22 23:14:28.073 INFO Blocks per extent:5 Total Extents: 2
22047 Sep 22 23:14:28.073 INFO [2] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 looper connected, looper: 2
22048 Sep 22 23:14:28.073 INFO [2] Proc runs for 127.0.0.1:48350 in state New
22049 Sep 22 23:14:28.073 INFO Crucible Version: Crucible Version: 0.0.1
22050 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22051 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22052 rustc: 1.70.0 stable x86_64-unknown-illumos
22053 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22054 Sep 22 23:14:28.073 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22055 Sep 22 23:14:28.073 INFO Using address: 127.0.0.1:35251, task: main
22056 Sep 22 23:14:28.073 INFO Repair listens on 127.0.0.1:0, task: repair
22057 Sep 22 23:14:28.073 INFO Upstairs starts
22058 Sep 22 23:14:28.073 INFO Crucible Version: BuildInfo {
22059 version: "0.0.1",
22060 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22061 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22062 git_branch: "main",
22063 rustc_semver: "1.70.0",
22064 rustc_channel: "stable",
22065 rustc_host_triple: "x86_64-unknown-illumos",
22066 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22067 cargo_triple: "x86_64-unknown-illumos",
22068 debug: true,
22069 opt_level: 0,
22070 }
22071 Sep 22 23:14:28.073 INFO Upstairs <-> Downstairs Message Version: 4
22072 Sep 22 23:14:28.073 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:48552, task: repair
22073 Sep 22 23:14:28.073 INFO Crucible stats registered with UUID: 93b7973b-49e5-4363-943a-5bcc4e5bcda7
22074 Sep 22 23:14:28.073 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:48552, task: repair
22075 Sep 22 23:14:28.073 INFO Crucible 93b7973b-49e5-4363-943a-5bcc4e5bcda7 has session id: f8ce71f3-e79f-43d9-adf6-7772f1e05e28
22076 Sep 22 23:14:28.073 INFO listening, local_addr: 127.0.0.1:48552, task: repair
22077 Sep 22 23:14:28.073 INFO UUID: 0d1a8812-63e7-43b3-ae0e-38daa05e774f
22078 Sep 22 23:14:28.073 INFO Blocks per extent:5 Total Extents: 2
22079 Sep 22 23:14:28.073 INFO Connection request from 5a917d88-e6fb-4057-be02-fbcd45b1ea14 with version 4, task: proc
22080 Sep 22 23:14:28.073 INFO upstairs UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } connected, version 4, task: proc
22081 Sep 22 23:14:28.073 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:48552, task: repair
22082 Sep 22 23:14:28.073 INFO Using repair address: 127.0.0.1:48552, task: main
22083 Sep 22 23:14:28.073 INFO Crucible Version: Crucible Version: 0.0.1
22084 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22085 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22086 rustc: 1.70.0 stable x86_64-unknown-illumos
22087 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22088 Sep 22 23:14:28.073 INFO No SSL acceptor configured, task: main
22089 Sep 22 23:14:28.074 INFO Connection request from 5a917d88-e6fb-4057-be02-fbcd45b1ea14 with version 4, task: proc
22090 Sep 22 23:14:28.074 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22091 Sep 22 23:14:28.074 INFO Using address: 127.0.0.1:37204, task: main
22092 Sep 22 23:14:28.074 INFO upstairs UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } connected, version 4, task: proc
22093 Sep 22 23:14:28.074 INFO Connection request from 5a917d88-e6fb-4057-be02-fbcd45b1ea14 with version 4, task: proc
22094 Sep 22 23:14:28.074 INFO upstairs UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } connected, version 4, task: proc
22095 Sep 22 23:14:28.074 INFO current number of open files limit 65536 is already the maximum
22096 Sep 22 23:14:28.074 INFO [0] connecting to 127.0.0.1:53594, looper: 0
22097 Sep 22 23:14:28.074 INFO Created new region file "/tmp/downstairs-YhnRrOOA/region.json"
22098 Sep 22 23:14:28.074 INFO [1] connecting to 127.0.0.1:50602, looper: 1
22099 Sep 22 23:14:28.074 INFO [2] connecting to 127.0.0.1:53970, looper: 2
22100 Sep 22 23:14:28.074 INFO Repair listens on 127.0.0.1:0, task: repair
22101 Sep 22 23:14:28.074 INFO up_listen starts, task: up_listen
22102 Sep 22 23:14:28.074 INFO Wait for all three downstairs to come online
22103 Sep 22 23:14:28.074 INFO Flush timeout: 0.5
22104 Sep 22 23:14:28.074 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:53439, task: repair
22105 Sep 22 23:14:28.074 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:53439, task: repair
22106 Sep 22 23:14:28.074 INFO listening, local_addr: 127.0.0.1:53439, task: repair
22107 Sep 22 23:14:28.074 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:53439, task: repair
22108 Sep 22 23:14:28.074 INFO Using repair address: 127.0.0.1:53439, task: main
22109 Sep 22 23:14:28.074 INFO accepted connection from 127.0.0.1:55652, task: main
22110 Sep 22 23:14:28.074 INFO No SSL acceptor configured, task: main
22111 Sep 22 23:14:28.074 INFO accepted connection from 127.0.0.1:54461, task: main
22112 Sep 22 23:14:28.075 INFO [1] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 looper connected, looper: 1
22113 Sep 22 23:14:28.075 INFO [1] Proc runs for 127.0.0.1:50602 in state New
22114 Sep 22 23:14:28.075 INFO [0] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 looper connected, looper: 0
22115 Sep 22 23:14:28.075 INFO [0] Proc runs for 127.0.0.1:53594 in state New
22116 Sep 22 23:14:28.075 INFO accepted connection from 127.0.0.1:65326, task: main
22117 Sep 22 23:14:28.075 INFO Upstairs starts
22118 Sep 22 23:14:28.075 INFO [0] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) New New New ds_transition to WaitActive
22119 Sep 22 23:14:28.075 INFO Crucible Version: BuildInfo {
22120 version: "0.0.1",
22121 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22122 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22123 git_branch: "main",
22124 rustc_semver: "1.70.0",
22125 rustc_channel: "stable",
22126 rustc_host_triple: "x86_64-unknown-illumos",
22127 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22128 cargo_triple: "x86_64-unknown-illumos",
22129 debug: true,
22130 opt_level: 0,
22131 }
22132 Sep 22 23:14:28.075 INFO Upstairs <-> Downstairs Message Version: 4
22133 Sep 22 23:14:28.075 INFO [0] Transition from New to WaitActive
22134 Sep 22 23:14:28.075 INFO Crucible stats registered with UUID: 20e42765-410e-433b-9d48-11eb6e978e6e
22135 Sep 22 23:14:28.075 INFO Crucible 20e42765-410e-433b-9d48-11eb6e978e6e has session id: 10f391b6-c49b-41f7-bbae-ab24f53d2215
22136 Sep 22 23:14:28.075 INFO [1] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) WaitActive New New ds_transition to WaitActive
22137 Sep 22 23:14:28.075 INFO [1] Transition from New to WaitActive
22138 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22139 Sep 22 23:14:28.075 INFO [2] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) WaitActive WaitActive New ds_transition to WaitActive
22140 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22141 Sep 22 23:14:28.075 INFO [2] Transition from New to WaitActive
22142 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22143 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22144 Sep 22 23:14:28.075 INFO [2] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 looper connected, looper: 2
22145 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22146 Sep 22 23:14:28.075 INFO listening on 127.0.0.1:0, task: main
22147 Sep 22 23:14:28.075 INFO [2] Proc runs for 127.0.0.1:53970 in state New
22148 Sep 22 23:14:28.075 INFO [0] connecting to 127.0.0.1:43937, looper: 0
22149 Sep 22 23:14:28.075 INFO [1] connecting to 127.0.0.1:38253, looper: 1
22150 Sep 22 23:14:28.076 INFO [2] connecting to 127.0.0.1:32937, looper: 2
22151 Sep 22 23:14:28.076 INFO Connection request from 93b7973b-49e5-4363-943a-5bcc4e5bcda7 with version 4, task: proc
22152 Sep 22 23:14:28.076 INFO up_listen starts, task: up_listen
22153 Sep 22 23:14:28.076 INFO Wait for all three downstairs to come online
22154 Sep 22 23:14:28.076 INFO upstairs UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } connected, version 4, task: proc
22155 Sep 22 23:14:28.076 INFO Flush timeout: 0.5
22156 Sep 22 23:14:28.076 INFO Connection request from 93b7973b-49e5-4363-943a-5bcc4e5bcda7 with version 4, task: proc
22157 Sep 22 23:14:28.076 INFO upstairs UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } connected, version 4, task: proc
22158 Sep 22 23:14:28.076 INFO accepted connection from 127.0.0.1:59670, task: main
22159 Sep 22 23:14:28.076 INFO Connection request from 93b7973b-49e5-4363-943a-5bcc4e5bcda7 with version 4, task: proc
22160 Sep 22 23:14:28.076 INFO upstairs UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } connected, version 4, task: proc
22161 Sep 22 23:14:28.076 INFO accepted connection from 127.0.0.1:58743, task: main
22162 Sep 22 23:14:28.076 INFO accepted connection from 127.0.0.1:56193, task: main
22163 Sep 22 23:14:28.076 INFO [0] 20e42765-410e-433b-9d48-11eb6e978e6e looper connected, looper: 0
22164 Sep 22 23:14:28.076 INFO [0] Proc runs for 127.0.0.1:43937 in state New
22165 Sep 22 23:14:28.076 INFO [1] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) New New New ds_transition to WaitActive
22166 Sep 22 23:14:28.076 INFO [1] Transition from New to WaitActive
22167 Sep 22 23:14:28.076 INFO [1] 20e42765-410e-433b-9d48-11eb6e978e6e looper connected, looper: 1
22168 Sep 22 23:14:28.076 INFO [1] Proc runs for 127.0.0.1:38253 in state New
22169 Sep 22 23:14:28.076 INFO [0] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) New WaitActive New ds_transition to WaitActive
22170 Sep 22 23:14:28.076 INFO [0] Transition from New to WaitActive
22171 Sep 22 23:14:28.076 INFO [2] 20e42765-410e-433b-9d48-11eb6e978e6e looper connected, looper: 2
22172 Sep 22 23:14:28.076 INFO [2] Proc runs for 127.0.0.1:32937 in state New
22173 Sep 22 23:14:28.076 INFO [2] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) WaitActive WaitActive New ds_transition to WaitActive
22174 Sep 22 23:14:28.076 INFO [2] Transition from New to WaitActive
22175 The guest has requested activation
22176 Sep 22 23:14:28.077 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 active request set
22177 Sep 22 23:14:28.077 INFO Upstairs starts
22178 Sep 22 23:14:28.077 INFO Crucible Version: BuildInfo {
22179 version: "0.0.1",
22180 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22181 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22182 git_branch: "main",
22183 rustc_semver: "1.70.0",
22184 rustc_channel: "stable",
22185 rustc_host_triple: "x86_64-unknown-illumos",
22186 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22187 cargo_triple: "x86_64-unknown-illumos",
22188 debug: true,
22189 opt_level: 0,
22190 }
22191 Sep 22 23:14:28.077 INFO Upstairs <-> Downstairs Message Version: 4
22192 Sep 22 23:14:28.077 INFO [0] received activate with gen 1
22193 Sep 22 23:14:28.077 INFO Crucible stats registered with UUID: 668b284a-1cf2-4b38-b8f6-907605c718a3
22194 Sep 22 23:14:28.077 INFO [0] client got ds_active_rx, promote! session 75cf63e3-c4b7-4d25-80fe-726953cbb3fa
22195 Sep 22 23:14:28.077 INFO Crucible 668b284a-1cf2-4b38-b8f6-907605c718a3 has session id: 38d842b8-b294-4d07-9341-5554639614f2
22196 Sep 22 23:14:28.077 INFO [1] received activate with gen 1
22197 Sep 22 23:14:28.077 INFO [1] client got ds_active_rx, promote! session 75cf63e3-c4b7-4d25-80fe-726953cbb3fa
22198 Sep 22 23:14:28.077 INFO Connection request from 20e42765-410e-433b-9d48-11eb6e978e6e with version 4, task: proc
22199 Sep 22 23:14:28.077 INFO [2] received activate with gen 1
22200 Sep 22 23:14:28.077 INFO upstairs UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } connected, version 4, task: proc
22201 Sep 22 23:14:28.077 INFO [2] client got ds_active_rx, promote! session 75cf63e3-c4b7-4d25-80fe-726953cbb3fa
22202 Sep 22 23:14:28.077 INFO Connection request from 20e42765-410e-433b-9d48-11eb6e978e6e with version 4, task: proc
22203 Sep 22 23:14:28.077 INFO upstairs UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } connected, version 4, task: proc
22204 Sep 22 23:14:28.077 INFO UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } is now active (read-write)
22205 Sep 22 23:14:28.077 INFO Connection request from 20e42765-410e-433b-9d48-11eb6e978e6e with version 4, task: proc
22206 Sep 22 23:14:28.077 INFO upstairs UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } connected, version 4, task: proc
22207 Sep 22 23:14:28.077 INFO [0] connecting to 127.0.0.1:53693, looper: 0
22208 Sep 22 23:14:28.077 INFO UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } is now active (read-write)
22209 Sep 22 23:14:28.077 INFO [1] connecting to 127.0.0.1:43883, looper: 1
22210 Sep 22 23:14:28.077 INFO [2] connecting to 127.0.0.1:37204, looper: 2
22211 Sep 22 23:14:28.077 INFO UpstairsConnection { upstairs_id: 5a917d88-e6fb-4057-be02-fbcd45b1ea14, session_id: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa, gen: 1 } is now active (read-write)
22212 Sep 22 23:14:28.077 INFO up_listen starts, task: up_listen
22213 Sep 22 23:14:28.077 INFO Wait for all three downstairs to come online
22214 Sep 22 23:14:28.077 INFO Flush timeout: 0.5
22215 Sep 22 23:14:28.078 INFO current number of open files limit 65536 is already the maximum
22216 Sep 22 23:14:28.078 INFO accepted connection from 127.0.0.1:46626, task: main
22217 Sep 22 23:14:28.078 INFO Opened existing region file "/tmp/downstairs-YhnRrOOA/region.json"
22218 Sep 22 23:14:28.078 INFO Database read version 1
22219 Sep 22 23:14:28.078 INFO Database write version 1
22220 Sep 22 23:14:28.078 INFO accepted connection from 127.0.0.1:45892, task: main
22221 Sep 22 23:14:28.078 INFO accepted connection from 127.0.0.1:42215, task: main
22222 Sep 22 23:14:28.078 INFO [0] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) New New New ds_transition to WaitActive
22223 Sep 22 23:14:28.078 INFO [0] Transition from New to WaitActive
22224 Sep 22 23:14:28.078 INFO [0] downstairs client at 127.0.0.1:39201 has UUID 07752cff-eba4-4cda-9fc8-25c01941ef38
22225 Sep 22 23:14:28.078 INFO [1] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) WaitActive New New ds_transition to WaitActive
22226 Sep 22 23:14:28.078 INFO [1] Transition from New to WaitActive
22227 Sep 22 23:14:28.078 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 07752cff-eba4-4cda-9fc8-25c01941ef38, encrypted: true, database_read_version: 1, database_write_version: 1 }
22228 Sep 22 23:14:28.078 INFO [2] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) WaitActive WaitActive New ds_transition to WaitActive
22229 Sep 22 23:14:28.078 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 WaitActive WaitActive WaitActive
22230 Sep 22 23:14:28.078 INFO [2] Transition from New to WaitActive
22231 Sep 22 23:14:28.078 INFO [0] 668b284a-1cf2-4b38-b8f6-907605c718a3 looper connected, looper: 0
22232 Sep 22 23:14:28.078 INFO [0] Proc runs for 127.0.0.1:53693 in state New
22233 Sep 22 23:14:28.078 INFO [1] downstairs client at 127.0.0.1:39321 has UUID 8a58fc4b-e2e1-4a7f-8747-0d8dd2cd6f65
22234 Sep 22 23:14:28.078 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 8a58fc4b-e2e1-4a7f-8747-0d8dd2cd6f65, encrypted: true, database_read_version: 1, database_write_version: 1 }
22235 Sep 22 23:14:28.078 INFO [1] 668b284a-1cf2-4b38-b8f6-907605c718a3 looper connected, looper: 1
22236 Sep 22 23:14:28.078 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 WaitActive WaitActive WaitActive
22237 Sep 22 23:14:28.078 INFO [1] Proc runs for 127.0.0.1:43883 in state New
22238 Sep 22 23:14:28.078 INFO [2] downstairs client at 127.0.0.1:48350 has UUID 3f976252-a8b2-44c0-aea9-ba9b4e4fedb3
22239 Sep 22 23:14:28.078 INFO [2] 668b284a-1cf2-4b38-b8f6-907605c718a3 looper connected, looper: 2
22240 Sep 22 23:14:28.078 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3f976252-a8b2-44c0-aea9-ba9b4e4fedb3, encrypted: true, database_read_version: 1, database_write_version: 1 }
22241 Sep 22 23:14:28.078 INFO [2] Proc runs for 127.0.0.1:37204 in state New
22242 Sep 22 23:14:28.078 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 WaitActive WaitActive WaitActive
22243 Sep 22 23:14:28.078 INFO Connection request from 668b284a-1cf2-4b38-b8f6-907605c718a3 with version 4, task: proc
22244 Sep 22 23:14:28.078 INFO Current flush_numbers [0..12]: [0, 0]
22245 Sep 22 23:14:28.078 INFO upstairs UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } connected, version 4, task: proc
22246 Sep 22 23:14:28.079 INFO Connection request from 668b284a-1cf2-4b38-b8f6-907605c718a3 with version 4, task: proc
22247 Sep 22 23:14:28.079 INFO upstairs UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } connected, version 4, task: proc
22248 Sep 22 23:14:28.079 INFO Connection request from 668b284a-1cf2-4b38-b8f6-907605c718a3 with version 4, task: proc
22249 Sep 22 23:14:28.079 INFO Downstairs has completed Negotiation, task: proc
22250 Sep 22 23:14:28.079 INFO upstairs UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } connected, version 4, task: proc
22251 Sep 22 23:14:28.079 INFO [0] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) New New New ds_transition to WaitActive
22252 Sep 22 23:14:28.079 INFO Current flush_numbers [0..12]: [0, 0]
22253 Sep 22 23:14:28.079 INFO [0] Transition from New to WaitActive
22254 Sep 22 23:14:28.079 INFO [1] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) WaitActive New New ds_transition to WaitActive
22255 Sep 22 23:14:28.079 INFO [1] Transition from New to WaitActive
22256 Sep 22 23:14:28.079 INFO [2] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) WaitActive WaitActive New ds_transition to WaitActive
22257 Sep 22 23:14:28.079 INFO [2] Transition from New to WaitActive
22258 Sep 22 23:14:28.079 INFO Downstairs has completed Negotiation, task: proc
22259 The guest has requested activation
22260 Sep 22 23:14:28.079 INFO 20e42765-410e-433b-9d48-11eb6e978e6e active request set
22261 Sep 22 23:14:28.079 INFO [0] received activate with gen 1
22262 Sep 22 23:14:28.079 INFO [0] client got ds_active_rx, promote! session 08079beb-4388-43ba-85d1-274eac4beb70
22263 Sep 22 23:14:28.079 INFO Current flush_numbers [0..12]: [0, 0]
22264 Sep 22 23:14:28.079 INFO [1] received activate with gen 1
22265 Sep 22 23:14:28.079 INFO [1] client got ds_active_rx, promote! session 08079beb-4388-43ba-85d1-274eac4beb70
22266 Sep 22 23:14:28.079 INFO [2] received activate with gen 1
22267 Sep 22 23:14:28.079 INFO [2] client got ds_active_rx, promote! session 08079beb-4388-43ba-85d1-274eac4beb70
22268 Sep 22 23:14:28.079 INFO Downstairs has completed Negotiation, task: proc
22269 Sep 22 23:14:28.079 INFO UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } is now active (read-write)
22270 Sep 22 23:14:28.079 INFO UUID: b54ba00b-56b7-4fec-8b06-0390fa4a34ff
22271 Sep 22 23:14:28.079 INFO Blocks per extent:5 Total Extents: 2
22272 Sep 22 23:14:28.080 INFO UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } is now active (read-write)
22273 Sep 22 23:14:28.080 INFO Crucible Version: Crucible Version: 0.0.1
22274 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22275 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22276 rustc: 1.70.0 stable x86_64-unknown-illumos
22277 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22278 Sep 22 23:14:28.080 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22279 Sep 22 23:14:28.080 INFO Using address: 127.0.0.1:33061, task: main
22280 Sep 22 23:14:28.080 INFO [0] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22281 Sep 22 23:14:28.080 INFO [0] Transition from WaitActive to WaitQuorum
22282 Sep 22 23:14:28.080 INFO UpstairsConnection { upstairs_id: 20e42765-410e-433b-9d48-11eb6e978e6e, session_id: 08079beb-4388-43ba-85d1-274eac4beb70, gen: 1 } is now active (read-write)
22283 Sep 22 23:14:28.080 WARN [0] new RM replaced this: None
22284 Sep 22 23:14:28.080 INFO [0] Starts reconcile loop
22285 Sep 22 23:14:28.080 INFO [1] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22286 Sep 22 23:14:28.080 INFO [1] Transition from WaitActive to WaitQuorum
22287 Sep 22 23:14:28.080 WARN [1] new RM replaced this: None
22288 Sep 22 23:14:28.080 INFO Repair listens on 127.0.0.1:0, task: repair
22289 Sep 22 23:14:28.080 INFO [1] Starts reconcile loop
22290 Sep 22 23:14:28.080 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58530, task: repair
22291 Sep 22 23:14:28.080 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58530, task: repair
22292 Sep 22 23:14:28.080 INFO [2] 5a917d88-e6fb-4057-be02-fbcd45b1ea14 (75cf63e3-c4b7-4d25-80fe-726953cbb3fa) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22293 Sep 22 23:14:28.080 INFO [2] Transition from WaitActive to WaitQuorum
22294 Sep 22 23:14:28.080 INFO listening, local_addr: 127.0.0.1:58530, task: repair
22295 Sep 22 23:14:28.080 WARN [2] new RM replaced this: None
22296 Sep 22 23:14:28.080 INFO [0] downstairs client at 127.0.0.1:43937 has UUID 4653359c-1409-4b81-8a59-94f85d2bcf29
22297 Sep 22 23:14:28.080 INFO [2] Starts reconcile loop
22298 Sep 22 23:14:28.080 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4653359c-1409-4b81-8a59-94f85d2bcf29, encrypted: true, database_read_version: 1, database_write_version: 1 }
22299 Sep 22 23:14:28.080 INFO 20e42765-410e-433b-9d48-11eb6e978e6e WaitActive WaitActive WaitActive
22300 Sep 22 23:14:28.080 INFO [0] 127.0.0.1:39201 task reports connection:true
22301 Sep 22 23:14:28.080 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58530, task: repair
22302 Sep 22 23:14:28.080 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 WaitQuorum WaitQuorum WaitQuorum
22303 Sep 22 23:14:28.080 INFO Using repair address: 127.0.0.1:58530, task: main
22304 Sep 22 23:14:28.080 INFO [1] downstairs client at 127.0.0.1:38253 has UUID 3f0b274b-a55a-4121-96c8-a689e27a459a
22305 Sep 22 23:14:28.080 INFO No SSL acceptor configured, task: main
22306 Sep 22 23:14:28.080 INFO [0]R flush_numbers: [0, 0]
22307 Sep 22 23:14:28.080 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3f0b274b-a55a-4121-96c8-a689e27a459a, encrypted: true, database_read_version: 1, database_write_version: 1 }
22308 Sep 22 23:14:28.080 INFO [0]R generation: [0, 0]
22309 Sep 22 23:14:28.080 INFO [0]R dirty: [false, false]
22310 Sep 22 23:14:28.080 INFO [1]R flush_numbers: [0, 0]
22311 Sep 22 23:14:28.080 INFO 20e42765-410e-433b-9d48-11eb6e978e6e WaitActive WaitActive WaitActive
22312 Sep 22 23:14:28.080 INFO [1]R generation: [0, 0]
22313 Sep 22 23:14:28.080 INFO [1]R dirty: [false, false]
22314 Sep 22 23:14:28.080 INFO [2]R flush_numbers: [0, 0]
22315 Sep 22 23:14:28.080 INFO [2] downstairs client at 127.0.0.1:32937 has UUID 9a0e7cc4-1eb0-42dc-b42e-9dc1e52fa851
22316 Sep 22 23:14:28.080 INFO [2]R generation: [0, 0]
22317 Sep 22 23:14:28.080 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9a0e7cc4-1eb0-42dc-b42e-9dc1e52fa851, encrypted: true, database_read_version: 1, database_write_version: 1 }
22318 Sep 22 23:14:28.080 INFO [2]R dirty: [false, false]
22319 Sep 22 23:14:28.080 INFO Max found gen is 1
22320 Sep 22 23:14:28.080 INFO Generation requested: 1 >= found:1
22321 Sep 22 23:14:28.080 INFO 20e42765-410e-433b-9d48-11eb6e978e6e WaitActive WaitActive WaitActive
22322 Sep 22 23:14:28.080 INFO Next flush: 1
22323 Sep 22 23:14:28.080 INFO All extents match
22324 Sep 22 23:14:28.080 INFO No downstairs repair required
22325 Sep 22 23:14:28.080 INFO No initial repair work was required
22326 Sep 22 23:14:28.080 INFO Set Downstairs and Upstairs active
22327 Sep 22 23:14:28.080 INFO current number of open files limit 65536 is already the maximum
22328 Sep 22 23:14:28.080 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 is now active with session: 75cf63e3-c4b7-4d25-80fe-726953cbb3fa
22329 Sep 22 23:14:28.080 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 Set Active after no repair
22330 Sep 22 23:14:28.080 INFO Current flush_numbers [0..12]: [0, 0]
22331 Sep 22 23:14:28.080 INFO Notify all downstairs, region set compare is done.
22332 Sep 22 23:14:28.080 INFO Created new region file "/tmp/downstairs-OkVHThGJ/region.json"
22333 Sep 22 23:14:28.080 INFO Set check for repair
22334 Sep 22 23:14:28.081 INFO [1] 127.0.0.1:39321 task reports connection:true
22335 Sep 22 23:14:28.081 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 Active Active Active
22336 Sep 22 23:14:28.081 INFO Downstairs has completed Negotiation, task: proc
22337 Sep 22 23:14:28.081 INFO Set check for repair
22338 Sep 22 23:14:28.081 INFO [2] 127.0.0.1:48350 task reports connection:true
22339 Sep 22 23:14:28.081 INFO 5a917d88-e6fb-4057-be02-fbcd45b1ea14 Active Active Active
22340 Sep 22 23:14:28.081 INFO Set check for repair
22341 Sep 22 23:14:28.081 INFO [0] received reconcile message
22342 Sep 22 23:14:28.081 INFO Current flush_numbers [0..12]: [0, 0]
22343 Sep 22 23:14:28.081 INFO [0] All repairs completed, exit
22344 Sep 22 23:14:28.081 INFO [0] Starts cmd_loop
22345 Sep 22 23:14:28.081 INFO [1] received reconcile message
22346 Sep 22 23:14:28.081 INFO Downstairs has completed Negotiation, task: proc
22347 Sep 22 23:14:28.081 INFO [1] All repairs completed, exit
22348 Sep 22 23:14:28.081 INFO [1] Starts cmd_loop
22349 Sep 22 23:14:28.081 INFO [2] received reconcile message
22350 Sep 22 23:14:28.081 INFO [2] All repairs completed, exit
22351 Sep 22 23:14:28.081 INFO [2] Starts cmd_loop
22352 Sep 22 23:14:28.081 INFO Current flush_numbers [0..12]: [0, 0]
22353 The guest has finished waiting for activation
22354 Sep 22 23:14:28.081 INFO Downstairs has completed Negotiation, task: proc
22355 Sep 22 23:14:28.081 INFO [0] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22356 Sep 22 23:14:28.081 INFO [0] Transition from WaitActive to WaitQuorum
22357 Sep 22 23:14:28.082 WARN [0] new RM replaced this: None
22358 The guest has requested activation
22359 Sep 22 23:14:28.082 INFO [0] Starts reconcile loop
22360 Sep 22 23:14:28.082 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 active request set
22361 Sep 22 23:14:28.082 INFO [1] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22362 Sep 22 23:14:28.082 INFO [1] Transition from WaitActive to WaitQuorum
22363 Sep 22 23:14:28.082 WARN [1] new RM replaced this: None
22364 Sep 22 23:14:28.082 INFO [0] received activate with gen 1
22365 Sep 22 23:14:28.082 INFO [1] Starts reconcile loop
22366 Sep 22 23:14:28.082 INFO [0] client got ds_active_rx, promote! session d6ec8a5d-f8e6-4e07-ae83-e77d818c990e
22367 Sep 22 23:14:28.082 INFO [2] 20e42765-410e-433b-9d48-11eb6e978e6e (08079beb-4388-43ba-85d1-274eac4beb70) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22368 Sep 22 23:14:28.082 INFO [1] received activate with gen 1
22369 Sep 22 23:14:28.082 INFO [2] Transition from WaitActive to WaitQuorum
22370 Sep 22 23:14:28.082 INFO [1] client got ds_active_rx, promote! session d6ec8a5d-f8e6-4e07-ae83-e77d818c990e
22371 Sep 22 23:14:28.082 WARN [2] new RM replaced this: None
22372 Sep 22 23:14:28.082 INFO [2] Starts reconcile loop
22373 Sep 22 23:14:28.082 INFO [2] received activate with gen 1
22374 Sep 22 23:14:28.082 INFO [2] client got ds_active_rx, promote! session d6ec8a5d-f8e6-4e07-ae83-e77d818c990e
22375 Sep 22 23:14:28.082 INFO [0] 127.0.0.1:43937 task reports connection:true
22376 Sep 22 23:14:28.082 INFO 20e42765-410e-433b-9d48-11eb6e978e6e WaitQuorum WaitQuorum WaitQuorum
22377 Sep 22 23:14:28.082 INFO [0]R flush_numbers: [0, 0]
22378 Sep 22 23:14:28.082 INFO [0]R generation: [0, 0]
22379 Sep 22 23:14:28.082 INFO [0]R dirty: [false, false]
22380 Sep 22 23:14:28.082 INFO [1]R flush_numbers: [0, 0]
22381 Sep 22 23:14:28.082 INFO [1]R generation: [0, 0]
22382 Sep 22 23:14:28.082 INFO [1]R dirty: [false, false]
22383 Sep 22 23:14:28.082 INFO [2]R flush_numbers: [0, 0]
22384 Sep 22 23:14:28.082 INFO UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } is now active (read-write)
22385 Sep 22 23:14:28.082 INFO [2]R generation: [0, 0]
22386 Sep 22 23:14:28.082 INFO [2]R dirty: [false, false]
22387 Sep 22 23:14:28.082 INFO Max found gen is 1
22388 Sep 22 23:14:28.082 INFO Generation requested: 1 >= found:1
22389 Sep 22 23:14:28.082 INFO Next flush: 1
22390 Sep 22 23:14:28.082 INFO All extents match
22391 Sep 22 23:14:28.082 INFO No downstairs repair required
22392 Sep 22 23:14:28.082 INFO UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } is now active (read-write)
22393 Sep 22 23:14:28.082 INFO No initial repair work was required
22394 Sep 22 23:14:28.082 INFO Set Downstairs and Upstairs active
22395 Sep 22 23:14:28.082 INFO 20e42765-410e-433b-9d48-11eb6e978e6e is now active with session: 08079beb-4388-43ba-85d1-274eac4beb70
22396 Sep 22 23:14:28.082 INFO 20e42765-410e-433b-9d48-11eb6e978e6e Set Active after no repair
22397 Sep 22 23:14:28.082 INFO Notify all downstairs, region set compare is done.
22398 Sep 22 23:14:28.082 INFO Set check for repair
22399 Sep 22 23:14:28.082 INFO UpstairsConnection { upstairs_id: 93b7973b-49e5-4363-943a-5bcc4e5bcda7, session_id: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e, gen: 1 } is now active (read-write)
22400 Sep 22 23:14:28.082 INFO [1] 127.0.0.1:38253 task reports connection:true
22401 Sep 22 23:14:28.082 INFO 20e42765-410e-433b-9d48-11eb6e978e6e Active Active Active
22402 Sep 22 23:14:28.082 INFO Set check for repair
22403 Sep 22 23:14:28.082 INFO [2] 127.0.0.1:32937 task reports connection:true
22404 Sep 22 23:14:28.082 INFO 20e42765-410e-433b-9d48-11eb6e978e6e Active Active Active
22405 Sep 22 23:14:28.082 INFO Set check for repair
22406 Sep 22 23:14:28.082 INFO [0] received reconcile message
22407 Sep 22 23:14:28.082 INFO [0] All repairs completed, exit
22408 Sep 22 23:14:28.082 INFO [0] Starts cmd_loop
22409 Sep 22 23:14:28.082 INFO [1] received reconcile message
22410 Sep 22 23:14:28.082 INFO [1] All repairs completed, exit
22411 Sep 22 23:14:28.082 INFO [1] Starts cmd_loop
22412 Sep 22 23:14:28.082 INFO [1] downstairs client at 127.0.0.1:50602 has UUID 106a66d5-99df-45fc-8e19-905c44b58eb8
22413 Sep 22 23:14:28.082 INFO [2] received reconcile message
22414 Sep 22 23:14:28.082 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 106a66d5-99df-45fc-8e19-905c44b58eb8, encrypted: true, database_read_version: 1, database_write_version: 1 }
22415 Sep 22 23:14:28.082 INFO [2] All repairs completed, exit
22416 Sep 22 23:14:28.082 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 WaitActive WaitActive WaitActive
22417 Sep 22 23:14:28.083 INFO [0] downstairs client at 127.0.0.1:53594 has UUID dfa11e22-89c3-48d2-ac68-b5822a655ba0
22418 Sep 22 23:14:28.083 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: dfa11e22-89c3-48d2-ac68-b5822a655ba0, encrypted: true, database_read_version: 1, database_write_version: 1 }
22419 Sep 22 23:14:28.083 INFO [2] Starts cmd_loop
22420 Sep 22 23:14:28.083 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 WaitActive WaitActive WaitActive
22421 The guest has finished waiting for activation
22422 Sep 22 23:14:28.083 INFO [2] downstairs client at 127.0.0.1:53970 has UUID 087cb3f4-dc4b-491c-947f-013a8f45b507
22423 Sep 22 23:14:28.083 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 087cb3f4-dc4b-491c-947f-013a8f45b507, encrypted: true, database_read_version: 1, database_write_version: 1 }
22424 Sep 22 23:14:28.083 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 WaitActive WaitActive WaitActive
22425 The guest has requested activation
22426 Sep 22 23:14:28.083 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 active request set
22427 Sep 22 23:14:28.083 INFO [0] received activate with gen 1
22428 Sep 22 23:14:28.083 INFO Current flush_numbers [0..12]: [0, 0]
22429 Sep 22 23:14:28.083 INFO [0] client got ds_active_rx, promote! session 5cfe049d-d730-4ad8-841b-451298810994
22430 Sep 22 23:14:28.083 INFO [1] received activate with gen 1
22431 Sep 22 23:14:28.083 INFO [1] client got ds_active_rx, promote! session 5cfe049d-d730-4ad8-841b-451298810994
22432 Sep 22 23:14:28.083 INFO [2] received activate with gen 1
22433 Sep 22 23:14:28.083 INFO [2] client got ds_active_rx, promote! session 5cfe049d-d730-4ad8-841b-451298810994
22434 Sep 22 23:14:28.083 INFO Downstairs has completed Negotiation, task: proc
22435 Sep 22 23:14:28.083 INFO UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } is now active (read-write)
22436 Sep 22 23:14:28.083 INFO Current flush_numbers [0..12]: [0, 0]
22437 Sep 22 23:14:28.083 INFO UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } is now active (read-write)
22438 Sep 22 23:14:28.083 INFO UpstairsConnection { upstairs_id: 668b284a-1cf2-4b38-b8f6-907605c718a3, session_id: 5cfe049d-d730-4ad8-841b-451298810994, gen: 1 } is now active (read-write)
22439 Sep 22 23:14:28.084 INFO Downstairs has completed Negotiation, task: proc
22440 Sep 22 23:14:28.084 INFO Current flush_numbers [0..12]: [0, 0]
22441 Sep 22 23:14:28.084 INFO [0] downstairs client at 127.0.0.1:53693 has UUID 6fddf8da-d152-4d38-8108-a3c9271fb31e
22442 Sep 22 23:14:28.084 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6fddf8da-d152-4d38-8108-a3c9271fb31e, encrypted: true, database_read_version: 1, database_write_version: 1 }
22443 Sep 22 23:14:28.084 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 WaitActive WaitActive WaitActive
22444 Sep 22 23:14:28.084 INFO Downstairs has completed Negotiation, task: proc
22445 Sep 22 23:14:28.084 INFO [1] downstairs client at 127.0.0.1:43883 has UUID a532462e-5feb-4b2d-aa58-e3da9a570973
22446 Sep 22 23:14:28.084 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a532462e-5feb-4b2d-aa58-e3da9a570973, encrypted: true, database_read_version: 1, database_write_version: 1 }
22447 Sep 22 23:14:28.084 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 WaitActive WaitActive WaitActive
22448 Sep 22 23:14:28.084 INFO [2] downstairs client at 127.0.0.1:37204 has UUID 0d1a8812-63e7-43b3-ae0e-38daa05e774f
22449 Sep 22 23:14:28.084 INFO [1] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22450 Sep 22 23:14:28.084 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0d1a8812-63e7-43b3-ae0e-38daa05e774f, encrypted: true, database_read_version: 1, database_write_version: 1 }
22451 Sep 22 23:14:28.084 INFO [1] Transition from WaitActive to WaitQuorum
22452 Sep 22 23:14:28.084 WARN [1] new RM replaced this: None
22453 Sep 22 23:14:28.084 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 WaitActive WaitActive WaitActive
22454 Sep 22 23:14:28.084 INFO [1] Starts reconcile loop
22455 Sep 22 23:14:28.084 INFO [0] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
22456 Sep 22 23:14:28.084 INFO [0] Transition from WaitActive to WaitQuorum
22457 Sep 22 23:14:28.084 WARN [0] new RM replaced this: None
22458 Sep 22 23:14:28.084 INFO [0] Starts reconcile loop
22459 Sep 22 23:14:28.084 INFO Current flush_numbers [0..12]: [0, 0]
22460 Sep 22 23:14:28.084 INFO [2] 93b7973b-49e5-4363-943a-5bcc4e5bcda7 (d6ec8a5d-f8e6-4e07-ae83-e77d818c990e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22461 Sep 22 23:14:28.084 INFO [2] Transition from WaitActive to WaitQuorum
22462 Sep 22 23:14:28.084 WARN [2] new RM replaced this: None
22463 Sep 22 23:14:28.084 INFO [2] Starts reconcile loop
22464 Sep 22 23:14:28.084 INFO current number of open files limit 65536 is already the maximum
22465 Sep 22 23:14:28.084 INFO Opened existing region file "/tmp/downstairs-OkVHThGJ/region.json"
22466 Sep 22 23:14:28.084 INFO [1] 127.0.0.1:50602 task reports connection:true
22467 Sep 22 23:14:28.084 INFO Database read version 1
22468 Sep 22 23:14:28.084 INFO Database write version 1
22469 Sep 22 23:14:28.084 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 WaitQuorum WaitQuorum WaitQuorum
22470 Sep 22 23:14:28.084 INFO [0]R flush_numbers: [0, 0]
22471 Sep 22 23:14:28.084 INFO Downstairs has completed Negotiation, task: proc
22472 Sep 22 23:14:28.084 INFO [0]R generation: [0, 0]
22473 Sep 22 23:14:28.084 INFO [0]R dirty: [false, false]
22474 Sep 22 23:14:28.084 INFO [1]R flush_numbers: [0, 0]
22475 Sep 22 23:14:28.084 INFO [1]R generation: [0, 0]
22476 Sep 22 23:14:28.084 INFO [1]R dirty: [false, false]
22477 Sep 22 23:14:28.084 INFO [2]R flush_numbers: [0, 0]
22478 Sep 22 23:14:28.084 INFO [2]R generation: [0, 0]
22479 Sep 22 23:14:28.084 INFO [2]R dirty: [false, false]
22480 Sep 22 23:14:28.084 INFO Max found gen is 1
22481 Sep 22 23:14:28.084 INFO Generation requested: 1 >= found:1
22482 Sep 22 23:14:28.084 INFO Next flush: 1
22483 Sep 22 23:14:28.084 INFO All extents match
22484 Sep 22 23:14:28.085 INFO No downstairs repair required
22485 Sep 22 23:14:28.085 INFO No initial repair work was required
22486 Sep 22 23:14:28.085 INFO Set Downstairs and Upstairs active
22487 Sep 22 23:14:28.085 INFO Current flush_numbers [0..12]: [0, 0]
22488 Sep 22 23:14:28.085 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 is now active with session: d6ec8a5d-f8e6-4e07-ae83-e77d818c990e
22489 Sep 22 23:14:28.085 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 Set Active after no repair
22490 Sep 22 23:14:28.085 INFO Notify all downstairs, region set compare is done.
22491 Sep 22 23:14:28.085 INFO Set check for repair
22492 Sep 22 23:14:28.085 INFO [0] 127.0.0.1:53594 task reports connection:true
22493 Sep 22 23:14:28.085 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 Active Active Active
22494 Sep 22 23:14:28.085 INFO Set check for repair
22495 Sep 22 23:14:28.085 INFO Downstairs has completed Negotiation, task: proc
22496 Sep 22 23:14:28.085 INFO [2] 127.0.0.1:53970 task reports connection:true
22497 Sep 22 23:14:28.085 INFO 93b7973b-49e5-4363-943a-5bcc4e5bcda7 Active Active Active
22498 Sep 22 23:14:28.085 INFO Set check for repair
22499 Sep 22 23:14:28.085 INFO [0] received reconcile message
22500 Sep 22 23:14:28.085 INFO [0] All repairs completed, exit
22501 Sep 22 23:14:28.085 INFO [0] Starts cmd_loop
22502 Sep 22 23:14:28.085 INFO Current flush_numbers [0..12]: [0, 0]
22503 Sep 22 23:14:28.085 INFO [1] received reconcile message
22504 Sep 22 23:14:28.085 INFO [1] All repairs completed, exit
22505 Sep 22 23:14:28.085 INFO [1] Starts cmd_loop
22506 Sep 22 23:14:28.085 INFO Downstairs has completed Negotiation, task: proc
22507 Sep 22 23:14:28.085 INFO [2] received reconcile message
22508 Sep 22 23:14:28.085 INFO [2] All repairs completed, exit
22509 Sep 22 23:14:28.085 INFO [2] Starts cmd_loop
22510 The guest has finished waiting for activation
22511 Sep 22 23:14:28.085 INFO [0] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22512 Sep 22 23:14:28.085 INFO [0] Transition from WaitActive to WaitQuorum
22513 Sep 22 23:14:28.085 WARN [0] new RM replaced this: None
22514 Sep 22 23:14:28.085 INFO [0] Starts reconcile loop
22515 Sep 22 23:14:28.085 INFO [1] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22516 Sep 22 23:14:28.085 INFO [1] Transition from WaitActive to WaitQuorum
22517 Sep 22 23:14:28.085 WARN [1] new RM replaced this: None
22518 Sep 22 23:14:28.085 INFO [1] Starts reconcile loop
22519 Sep 22 23:14:28.085 INFO [2] 668b284a-1cf2-4b38-b8f6-907605c718a3 (5cfe049d-d730-4ad8-841b-451298810994) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22520 Sep 22 23:14:28.085 INFO [2] Transition from WaitActive to WaitQuorum
22521 Sep 22 23:14:28.085 WARN [2] new RM replaced this: None
22522 Sep 22 23:14:28.085 INFO [2] Starts reconcile loop
22523 Sep 22 23:14:28.086 INFO [0] 127.0.0.1:53693 task reports connection:true
22524 Sep 22 23:14:28.086 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 WaitQuorum WaitQuorum WaitQuorum
22525 Sep 22 23:14:28.086 INFO [0]R flush_numbers: [0, 0]
22526 Sep 22 23:14:28.086 INFO [0]R generation: [0, 0]
22527 Sep 22 23:14:28.086 INFO [0]R dirty: [false, false]
22528 Sep 22 23:14:28.086 INFO [1]R flush_numbers: [0, 0]
22529 Sep 22 23:14:28.086 INFO [1]R generation: [0, 0]
22530 Sep 22 23:14:28.086 INFO [1]R dirty: [false, false]
22531 Sep 22 23:14:28.086 INFO [2]R flush_numbers: [0, 0]
22532 Sep 22 23:14:28.086 INFO [2]R generation: [0, 0]
22533 Sep 22 23:14:28.086 INFO [2]R dirty: [false, false]
22534 Sep 22 23:14:28.086 INFO Max found gen is 1
22535 Sep 22 23:14:28.086 INFO Generation requested: 1 >= found:1
22536 Sep 22 23:14:28.086 INFO Next flush: 1
22537 Sep 22 23:14:28.086 INFO All extents match
22538 Sep 22 23:14:28.086 INFO No downstairs repair required
22539 Sep 22 23:14:28.086 INFO No initial repair work was required
22540 Sep 22 23:14:28.086 INFO Set Downstairs and Upstairs active
22541 Sep 22 23:14:28.086 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 is now active with session: 5cfe049d-d730-4ad8-841b-451298810994
22542 Sep 22 23:14:28.086 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 Set Active after no repair
22543 Sep 22 23:14:28.086 INFO Notify all downstairs, region set compare is done.
22544 Sep 22 23:14:28.086 INFO Set check for repair
22545 Sep 22 23:14:28.086 INFO [1] 127.0.0.1:43883 task reports connection:true
22546 Sep 22 23:14:28.086 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 Active Active Active
22547 Sep 22 23:14:28.086 INFO Set check for repair
22548 Sep 22 23:14:28.086 INFO [2] 127.0.0.1:37204 task reports connection:true
22549 Sep 22 23:14:28.086 INFO 668b284a-1cf2-4b38-b8f6-907605c718a3 Active Active Active
22550 Sep 22 23:14:28.086 INFO Set check for repair
22551 Sep 22 23:14:28.086 INFO [0] received reconcile message
22552 Sep 22 23:14:28.086 INFO [0] All repairs completed, exit
22553 Sep 22 23:14:28.086 INFO [0] Starts cmd_loop
22554 Sep 22 23:14:28.086 INFO [1] received reconcile message
22555 Sep 22 23:14:28.086 INFO [1] All repairs completed, exit
22556 Sep 22 23:14:28.086 INFO [1] Starts cmd_loop
22557 Sep 22 23:14:28.086 INFO [2] received reconcile message
22558 Sep 22 23:14:28.086 INFO [2] All repairs completed, exit
22559 Sep 22 23:14:28.086 INFO [2] Starts cmd_loop
22560 The guest has finished waiting for activation
22561 Sep 22 23:14:28.086 INFO UUID: d07ff0ba-0bf5-4b77-9ec5-a7938bb906f6
22562 Sep 22 23:14:28.086 INFO Blocks per extent:5 Total Extents: 2
22563 Sep 22 23:14:28.086 INFO Crucible Version: Crucible Version: 0.0.1
22564 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22565 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22566 rustc: 1.70.0 stable x86_64-unknown-illumos
22567 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22568 Sep 22 23:14:28.086 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22569 Sep 22 23:14:28.086 INFO Using address: 127.0.0.1:61465, task: main
22570 Sep 22 23:14:28.087 INFO Repair listens on 127.0.0.1:0, task: repair
22571 Sep 22 23:14:28.087 DEBG IO Write 1000 has deps []
22572 Sep 22 23:14:28.087 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52938, task: repair
22573 Sep 22 23:14:28.087 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52938, task: repair
22574 Sep 22 23:14:28.087 INFO listening, local_addr: 127.0.0.1:52938, task: repair
22575 Sep 22 23:14:28.087 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52938, task: repair
22576 Sep 22 23:14:28.087 INFO Using repair address: 127.0.0.1:52938, task: main
22577 Sep 22 23:14:28.087 INFO No SSL acceptor configured, task: main
22578 Sep 22 23:14:28.087 INFO Upstairs starts
22579 Sep 22 23:14:28.087 INFO Crucible Version: BuildInfo {
22580 version: "0.0.1",
22581 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22582 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22583 git_branch: "main",
22584 rustc_semver: "1.70.0",
22585 rustc_channel: "stable",
22586 rustc_host_triple: "x86_64-unknown-illumos",
22587 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22588 cargo_triple: "x86_64-unknown-illumos",
22589 debug: true,
22590 opt_level: 0,
22591 }
22592 Sep 22 23:14:28.087 INFO Upstairs <-> Downstairs Message Version: 4
22593 Sep 22 23:14:28.087 INFO Crucible stats registered with UUID: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf
22594 Sep 22 23:14:28.087 INFO Crucible 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf has session id: 8b3cf6dc-0c13-435e-aadb-56c831fe7679
22595 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22596 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22597 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22598 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22599 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22600 Sep 22 23:14:28.088 INFO listening on 127.0.0.1:0, task: main
22601 Sep 22 23:14:28.088 INFO [0] connecting to 127.0.0.1:46618, looper: 0
22602 Sep 22 23:14:28.088 INFO [1] connecting to 127.0.0.1:49265, looper: 1
22603 Sep 22 23:14:28.088 DEBG IO Write 1000 has deps []
22604 Sep 22 23:14:28.088 INFO [2] connecting to 127.0.0.1:52057, looper: 2
22605 Sep 22 23:14:28.088 INFO up_listen starts, task: up_listen
22606 Sep 22 23:14:28.088 INFO Wait for all three downstairs to come online
22607 Sep 22 23:14:28.088 INFO Flush timeout: 0.5
22608 Sep 22 23:14:28.088 INFO accepted connection from 127.0.0.1:34129, task: main
22609 Sep 22 23:14:28.088 INFO accepted connection from 127.0.0.1:39645, task: main
22610 Sep 22 23:14:28.088 INFO accepted connection from 127.0.0.1:37248, task: main
22611 Sep 22 23:14:28.089 INFO [0] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf looper connected, looper: 0
22612 Sep 22 23:14:28.089 INFO [0] Proc runs for 127.0.0.1:46618 in state New
22613 Sep 22 23:14:28.089 INFO [2] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf looper connected, looper: 2
22614 Sep 22 23:14:28.089 INFO [2] Proc runs for 127.0.0.1:52057 in state New
22615 Sep 22 23:14:28.089 INFO [1] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf looper connected, looper: 1
22616 Sep 22 23:14:28.089 DEBG up_ds_listen was notified
22617 Sep 22 23:14:28.089 INFO [1] Proc runs for 127.0.0.1:49265 in state New
22618 Sep 22 23:14:28.089 DEBG up_ds_listen process 1000
22619 Sep 22 23:14:28.089 DEBG [A] ack job 1000:1, : downstairs
22620 Sep 22 23:14:28.089 DEBG up_ds_listen checked 1 jobs, back to waiting
22621 Sep 22 23:14:28.089 INFO Upstairs starts
22622 Sep 22 23:14:28.089 INFO Crucible Version: BuildInfo {
22623 version: "0.0.1",
22624 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
22625 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
22626 git_branch: "main",
22627 rustc_semver: "1.70.0",
22628 rustc_channel: "stable",
22629 rustc_host_triple: "x86_64-unknown-illumos",
22630 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
22631 cargo_triple: "x86_64-unknown-illumos",
22632 debug: true,
22633 opt_level: 0,
22634 }
22635 Sep 22 23:14:28.089 INFO Upstairs <-> Downstairs Message Version: 4
22636 Sep 22 23:14:28.089 INFO Crucible stats registered with UUID: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72
22637 Sep 22 23:14:28.089 DEBG IO Write 1000 has deps []
22638 Sep 22 23:14:28.089 INFO Crucible 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 has session id: 64f5e595-7972-4cba-a861-ca60f3d9d8a6
22639 Sep 22 23:14:28.089 INFO Connection request from 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf with version 4, task: proc
22640 Sep 22 23:14:28.089 INFO upstairs UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } connected, version 4, task: proc
22641 Sep 22 23:14:28.089 INFO Connection request from 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf with version 4, task: proc
22642 Sep 22 23:14:28.090 INFO upstairs UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } connected, version 4, task: proc
22643 Sep 22 23:14:28.090 INFO Connection request from 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf with version 4, task: proc
22644 Sep 22 23:14:28.090 INFO upstairs UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } connected, version 4, task: proc
22645 Sep 22 23:14:28.090 INFO [0] connecting to 127.0.0.1:35251, looper: 0
22646 Sep 22 23:14:28.090 INFO [1] connecting to 127.0.0.1:33061, looper: 1
22647 Sep 22 23:14:28.090 INFO [2] connecting to 127.0.0.1:61465, looper: 2
22648 Sep 22 23:14:28.090 INFO up_listen starts, task: up_listen
22649 Sep 22 23:14:28.090 INFO Wait for all three downstairs to come online
22650 Sep 22 23:14:28.090 INFO Flush timeout: 0.5
22651 Sep 22 23:14:28.090 INFO accepted connection from 127.0.0.1:54798, task: main
22652 Sep 22 23:14:28.090 INFO accepted connection from 127.0.0.1:65146, task: main
22653 Sep 22 23:14:28.091 INFO accepted connection from 127.0.0.1:56088, task: main
22654 Sep 22 23:14:28.091 INFO [0] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) New New New ds_transition to WaitActive
22655 Sep 22 23:14:28.091 INFO [0] Transition from New to WaitActive
22656 Sep 22 23:14:28.091 INFO [2] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) WaitActive New New ds_transition to WaitActive
22657 Sep 22 23:14:28.091 INFO [2] Transition from New to WaitActive
22658 Sep 22 23:14:28.091 INFO [1] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) WaitActive New WaitActive ds_transition to WaitActive
22659 Sep 22 23:14:28.091 INFO [1] Transition from New to WaitActive
22660 Sep 22 23:14:28.091 INFO [0] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 looper connected, looper: 0
22661 Sep 22 23:14:28.091 INFO [0] Proc runs for 127.0.0.1:35251 in state New
22662 Sep 22 23:14:28.091 INFO [1] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 looper connected, looper: 1
22663 Sep 22 23:14:28.091 INFO [1] Proc runs for 127.0.0.1:33061 in state New
22664 Sep 22 23:14:28.091 INFO [2] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 looper connected, looper: 2
22665 Sep 22 23:14:28.091 INFO [2] Proc runs for 127.0.0.1:61465 in state New
22666 Sep 22 23:14:28.091 DEBG up_ds_listen was notified
22667 Sep 22 23:14:28.092 DEBG up_ds_listen process 1000
22668 Sep 22 23:14:28.092 DEBG [A] ack job 1000:1, : downstairs
22669 Sep 22 23:14:28.092 INFO Connection request from 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 with version 4, task: proc
22670 Sep 22 23:14:28.092 DEBG up_ds_listen checked 1 jobs, back to waiting
22671 Sep 22 23:14:28.092 INFO upstairs UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } connected, version 4, task: proc
22672 Sep 22 23:14:28.092 INFO Connection request from 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 with version 4, task: proc
22673 Sep 22 23:14:28.092 INFO upstairs UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } connected, version 4, task: proc
22674 Sep 22 23:14:28.092 INFO Connection request from 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 with version 4, task: proc
22675 Sep 22 23:14:28.092 INFO upstairs UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } connected, version 4, task: proc
22676 Sep 22 23:14:28.092 INFO [0] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) New New New ds_transition to WaitActive
22677 Sep 22 23:14:28.092 INFO [0] Transition from New to WaitActive
22678 Sep 22 23:14:28.092 INFO [1] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) WaitActive New New ds_transition to WaitActive
22679 Sep 22 23:14:28.092 INFO [1] Transition from New to WaitActive
22680 Sep 22 23:14:28.092 INFO [2] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) WaitActive WaitActive New ds_transition to WaitActive
22681 Sep 22 23:14:28.092 INFO [2] Transition from New to WaitActive
22682 The guest has requested activation
22683 Sep 22 23:14:28.093 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf active request set
22684 Sep 22 23:14:28.093 INFO [0] received activate with gen 1
22685 Sep 22 23:14:28.093 INFO [0] client got ds_active_rx, promote! session f4dd55be-06a0-4424-99ef-c92da9671dc7
22686 Sep 22 23:14:28.093 INFO [1] received activate with gen 1
22687 Sep 22 23:14:28.093 INFO [1] client got ds_active_rx, promote! session f4dd55be-06a0-4424-99ef-c92da9671dc7
22688 Sep 22 23:14:28.093 INFO [2] received activate with gen 1
22689 Sep 22 23:14:28.093 INFO [2] client got ds_active_rx, promote! session f4dd55be-06a0-4424-99ef-c92da9671dc7
22690 Sep 22 23:14:28.093 INFO UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } is now active (read-write)
22691 Sep 22 23:14:28.093 INFO UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } is now active (read-write)
22692 Sep 22 23:14:28.093 INFO UpstairsConnection { upstairs_id: 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf, session_id: f4dd55be-06a0-4424-99ef-c92da9671dc7, gen: 1 } is now active (read-write)
22693 Sep 22 23:14:28.093 DEBG up_ds_listen was notified
22694 Sep 22 23:14:28.093 DEBG up_ds_listen process 1000
22695 Sep 22 23:14:28.094 DEBG [A] ack job 1000:1, : downstairs
22696 Sep 22 23:14:28.094 DEBG up_ds_listen checked 1 jobs, back to waiting
22697 Sep 22 23:14:28.094 INFO [0] downstairs client at 127.0.0.1:46618 has UUID e867ee61-247c-4a29-967e-48e3c07ee764
22698 Sep 22 23:14:28.094 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: e867ee61-247c-4a29-967e-48e3c07ee764, encrypted: true, database_read_version: 1, database_write_version: 1 }
22699 Sep 22 23:14:28.094 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf WaitActive WaitActive WaitActive
22700 Sep 22 23:14:28.094 INFO [2] downstairs client at 127.0.0.1:52057 has UUID 4b9378e8-0a63-4886-801a-4023859bd2fc
22701 Sep 22 23:14:28.094 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4b9378e8-0a63-4886-801a-4023859bd2fc, encrypted: true, database_read_version: 1, database_write_version: 1 }
22702 Sep 22 23:14:28.094 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf WaitActive WaitActive WaitActive
22703 Sep 22 23:14:28.094 INFO [1] downstairs client at 127.0.0.1:49265 has UUID 01a6d79b-23e4-44c1-a6f8-9804b08fb8c2
22704 Sep 22 23:14:28.094 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 01a6d79b-23e4-44c1-a6f8-9804b08fb8c2, encrypted: true, database_read_version: 1, database_write_version: 1 }
22705 Sep 22 23:14:28.094 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf WaitActive WaitActive WaitActive
22706 Sep 22 23:14:28.094 INFO Current flush_numbers [0..12]: [0, 0]
22707 Sep 22 23:14:28.094 DEBG IO Write 1001 has deps [JobId(1000)]
22708 Sep 22 23:14:28.095 INFO Downstairs has completed Negotiation, task: proc
22709 Sep 22 23:14:28.095 INFO Current flush_numbers [0..12]: [0, 0]
22710 Sep 22 23:14:28.095 INFO Downstairs has completed Negotiation, task: proc
22711 Sep 22 23:14:28.095 INFO Current flush_numbers [0..12]: [0, 0]
22712 Sep 22 23:14:28.095 INFO Downstairs has completed Negotiation, task: proc
22713 Sep 22 23:14:28.096 INFO [0] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22714 Sep 22 23:14:28.096 INFO [0] Transition from WaitActive to WaitQuorum
22715 Sep 22 23:14:28.096 WARN [0] new RM replaced this: None
22716 Sep 22 23:14:28.096 INFO [0] Starts reconcile loop
22717 Sep 22 23:14:28.096 INFO [2] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22718 Sep 22 23:14:28.096 INFO [2] Transition from WaitActive to WaitQuorum
22719 Sep 22 23:14:28.096 WARN [2] new RM replaced this: None
22720 Sep 22 23:14:28.096 INFO [2] Starts reconcile loop
22721 Sep 22 23:14:28.096 INFO [1] 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf (f4dd55be-06a0-4424-99ef-c92da9671dc7) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
22722 Sep 22 23:14:28.096 INFO [1] Transition from WaitActive to WaitQuorum
22723 Sep 22 23:14:28.096 WARN [1] new RM replaced this: None
22724 Sep 22 23:14:28.096 INFO [1] Starts reconcile loop
22725 Sep 22 23:14:28.096 INFO [0] 127.0.0.1:46618 task reports connection:true
22726 Sep 22 23:14:28.096 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf WaitQuorum WaitQuorum WaitQuorum
22727 Sep 22 23:14:28.096 INFO [0]R flush_numbers: [0, 0]
22728 Sep 22 23:14:28.096 INFO [0]R generation: [0, 0]
22729 Sep 22 23:14:28.096 INFO [0]R dirty: [false, false]
22730 Sep 22 23:14:28.096 DEBG IO Write 1000 has deps []
22731 Sep 22 23:14:28.096 INFO [1]R flush_numbers: [0, 0]
22732 Sep 22 23:14:28.096 INFO [1]R generation: [0, 0]
22733 Sep 22 23:14:28.096 INFO [1]R dirty: [false, false]
22734 Sep 22 23:14:28.096 INFO [2]R flush_numbers: [0, 0]
22735 Sep 22 23:14:28.096 INFO [2]R generation: [0, 0]
22736 Sep 22 23:14:28.096 INFO [2]R dirty: [false, false]
22737 Sep 22 23:14:28.096 INFO Max found gen is 1
22738 Sep 22 23:14:28.096 INFO Generation requested: 1 >= found:1
22739 Sep 22 23:14:28.096 INFO Next flush: 1
22740 Sep 22 23:14:28.096 INFO All extents match
22741 Sep 22 23:14:28.096 INFO No downstairs repair required
22742 Sep 22 23:14:28.096 INFO No initial repair work was required
22743 Sep 22 23:14:28.096 INFO Set Downstairs and Upstairs active
22744 Sep 22 23:14:28.096 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf is now active with session: f4dd55be-06a0-4424-99ef-c92da9671dc7
22745 Sep 22 23:14:28.096 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf Set Active after no repair
22746 Sep 22 23:14:28.096 INFO Notify all downstairs, region set compare is done.
22747 Sep 22 23:14:28.096 INFO Set check for repair
22748 Sep 22 23:14:28.096 INFO [2] 127.0.0.1:52057 task reports connection:true
22749 Sep 22 23:14:28.096 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf Active Active Active
22750 Sep 22 23:14:28.096 INFO Set check for repair
22751 Sep 22 23:14:28.096 INFO [1] 127.0.0.1:49265 task reports connection:true
22752 Sep 22 23:14:28.096 INFO 8cdc62de-7a87-47c6-9c8c-02c45f1c00bf Active Active Active
22753 Sep 22 23:14:28.096 INFO Set check for repair
22754 Sep 22 23:14:28.096 INFO [0] received reconcile message
22755 Sep 22 23:14:28.096 INFO [0] All repairs completed, exit
22756 Sep 22 23:14:28.096 INFO [0] Starts cmd_loop
22757 Sep 22 23:14:28.096 INFO [1] received reconcile message
22758 Sep 22 23:14:28.096 INFO [1] All repairs completed, exit
22759 Sep 22 23:14:28.096 INFO [1] Starts cmd_loop
22760 Sep 22 23:14:28.096 INFO [2] received reconcile message
22761 Sep 22 23:14:28.096 INFO [2] All repairs completed, exit
22762 Sep 22 23:14:28.097 INFO [2] Starts cmd_loop
22763 The guest has finished waiting for activation
22764 The guest has requested activation
22765 Sep 22 23:14:28.097 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 active request set
22766 Sep 22 23:14:28.097 INFO [0] received activate with gen 1
22767 Sep 22 23:14:28.097 INFO [0] client got ds_active_rx, promote! session b557ec33-3655-4c40-910a-6ea3a4aafaac
22768 Sep 22 23:14:28.097 INFO [1] received activate with gen 1
22769 Sep 22 23:14:28.097 INFO [1] client got ds_active_rx, promote! session b557ec33-3655-4c40-910a-6ea3a4aafaac
22770 Sep 22 23:14:28.097 INFO [2] received activate with gen 1
22771 Sep 22 23:14:28.097 INFO [2] client got ds_active_rx, promote! session b557ec33-3655-4c40-910a-6ea3a4aafaac
22772 Sep 22 23:14:28.097 INFO UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } is now active (read-write)
22773 Sep 22 23:14:28.097 INFO UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } is now active (read-write)
22774 Sep 22 23:14:28.097 INFO UpstairsConnection { upstairs_id: 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72, session_id: b557ec33-3655-4c40-910a-6ea3a4aafaac, gen: 1 } is now active (read-write)
22775 Sep 22 23:14:28.098 INFO [0] downstairs client at 127.0.0.1:35251 has UUID b901037e-6e64-43d6-a637-75008a7adf81
22776 Sep 22 23:14:28.098 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b901037e-6e64-43d6-a637-75008a7adf81, encrypted: true, database_read_version: 1, database_write_version: 1 }
22777 Sep 22 23:14:28.098 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 WaitActive WaitActive WaitActive
22778 Sep 22 23:14:28.098 INFO [1] downstairs client at 127.0.0.1:33061 has UUID b54ba00b-56b7-4fec-8b06-0390fa4a34ff
22779 Sep 22 23:14:28.098 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b54ba00b-56b7-4fec-8b06-0390fa4a34ff, encrypted: true, database_read_version: 1, database_write_version: 1 }
22780 Sep 22 23:14:28.098 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 WaitActive WaitActive WaitActive
22781 Sep 22 23:14:28.098 INFO [2] downstairs client at 127.0.0.1:61465 has UUID d07ff0ba-0bf5-4b77-9ec5-a7938bb906f6
22782 Sep 22 23:14:28.098 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: d07ff0ba-0bf5-4b77-9ec5-a7938bb906f6, encrypted: true, database_read_version: 1, database_write_version: 1 }
22783 Sep 22 23:14:28.098 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 WaitActive WaitActive WaitActive
22784 Sep 22 23:14:28.098 INFO Current flush_numbers [0..12]: [0, 0]
22785 Sep 22 23:14:28.098 INFO Downstairs has completed Negotiation, task: proc
22786 Sep 22 23:14:28.099 INFO Current flush_numbers [0..12]: [0, 0]
22787 Sep 22 23:14:28.099 INFO Downstairs has completed Negotiation, task: proc
22788 Sep 22 23:14:28.099 INFO Current flush_numbers [0..12]: [0, 0]
22789 Sep 22 23:14:28.099 INFO Downstairs has completed Negotiation, task: proc
22790 Sep 22 23:14:28.099 INFO [0] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
22791 Sep 22 23:14:28.099 INFO [0] Transition from WaitActive to WaitQuorum
22792 Sep 22 23:14:28.099 WARN [0] new RM replaced this: None
22793 Sep 22 23:14:28.099 INFO [0] Starts reconcile loop
22794 Sep 22 23:14:28.099 INFO [1] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
22795 Sep 22 23:14:28.099 INFO [1] Transition from WaitActive to WaitQuorum
22796 Sep 22 23:14:28.099 WARN [1] new RM replaced this: None
22797 Sep 22 23:14:28.099 INFO [1] Starts reconcile loop
22798 Sep 22 23:14:28.099 INFO [2] 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 (b557ec33-3655-4c40-910a-6ea3a4aafaac) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
22799 Sep 22 23:14:28.099 INFO [2] Transition from WaitActive to WaitQuorum
22800 Sep 22 23:14:28.099 WARN [2] new RM replaced this: None
22801 Sep 22 23:14:28.100 INFO [2] Starts reconcile loop
22802 Sep 22 23:14:28.100 INFO [0] 127.0.0.1:35251 task reports connection:true
22803 Sep 22 23:14:28.100 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 WaitQuorum WaitQuorum WaitQuorum
22804 Sep 22 23:14:28.100 INFO [0]R flush_numbers: [0, 0]
22805 Sep 22 23:14:28.100 INFO [0]R generation: [0, 0]
22806 Sep 22 23:14:28.100 INFO [0]R dirty: [false, false]
22807 Sep 22 23:14:28.100 INFO [1]R flush_numbers: [0, 0]
22808 Sep 22 23:14:28.100 INFO [1]R generation: [0, 0]
22809 Sep 22 23:14:28.100 INFO [1]R dirty: [false, false]
22810 Sep 22 23:14:28.100 INFO [2]R flush_numbers: [0, 0]
22811 Sep 22 23:14:28.100 INFO [2]R generation: [0, 0]
22812 Sep 22 23:14:28.100 INFO [2]R dirty: [false, false]
22813 Sep 22 23:14:28.100 INFO Max found gen is 1
22814 Sep 22 23:14:28.100 INFO Generation requested: 1 >= found:1
22815 Sep 22 23:14:28.100 INFO Next flush: 1
22816 Sep 22 23:14:28.100 INFO All extents match
22817 Sep 22 23:14:28.100 INFO No downstairs repair required
22818 Sep 22 23:14:28.100 INFO No initial repair work was required
22819 Sep 22 23:14:28.100 INFO Set Downstairs and Upstairs active
22820 Sep 22 23:14:28.100 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 is now active with session: b557ec33-3655-4c40-910a-6ea3a4aafaac
22821 Sep 22 23:14:28.100 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 Set Active after no repair
22822 Sep 22 23:14:28.100 INFO Notify all downstairs, region set compare is done.
22823 Sep 22 23:14:28.100 INFO Set check for repair
22824 Sep 22 23:14:28.100 INFO [1] 127.0.0.1:33061 task reports connection:true
22825 Sep 22 23:14:28.100 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 Active Active Active
22826 Sep 22 23:14:28.100 INFO Set check for repair
22827 Sep 22 23:14:28.100 INFO [2] 127.0.0.1:61465 task reports connection:true
22828 Sep 22 23:14:28.100 INFO 23d5a1a6-b740-4ec5-ba77-4864ba7d7d72 Active Active Active
22829 Sep 22 23:14:28.100 INFO Set check for repair
22830 Sep 22 23:14:28.100 DEBG up_ds_listen was notified
22831 Sep 22 23:14:28.100 INFO [0] received reconcile message
22832 Sep 22 23:14:28.100 DEBG up_ds_listen process 1001
22833 Sep 22 23:14:28.100 INFO [0] All repairs completed, exit
22834 Sep 22 23:14:28.100 DEBG [A] ack job 1001:2, : downstairs
22835 Sep 22 23:14:28.100 INFO [0] Starts cmd_loop
22836 Sep 22 23:14:28.100 DEBG up_ds_listen checked 1 jobs, back to waiting
22837 Sep 22 23:14:28.100 INFO [1] received reconcile message
22838 Sep 22 23:14:28.100 INFO [1] All repairs completed, exit
22839 Sep 22 23:14:28.100 INFO [1] Starts cmd_loop
22840 Sep 22 23:14:28.100 INFO [2] received reconcile message
22841 Sep 22 23:14:28.100 INFO [2] All repairs completed, exit
22842 Sep 22 23:14:28.100 INFO [2] Starts cmd_loop
22843 The guest has finished waiting for activation
22844 Sep 22 23:14:28.101 DEBG IO Write 1000 has deps []
22845 Sep 22 23:14:28.101 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
22846 Sep 22 23:14:28.101 DEBG up_ds_listen was notified
22847 Sep 22 23:14:28.101 DEBG up_ds_listen process 1002
22848 Sep 22 23:14:28.101 DEBG [A] ack job 1002:3, : downstairs
22849 Sep 22 23:14:28.101 DEBG up_ds_listen checked 1 jobs, back to waiting
22850 Sep 22 23:14:28.102 DEBG up_ds_listen was notified
22851 Sep 22 23:14:28.102 DEBG up_ds_listen process 1000
22852 Sep 22 23:14:28.102 DEBG [A] ack job 1000:1, : downstairs
22853 Sep 22 23:14:28.102 DEBG up_ds_listen checked 1 jobs, back to waiting
22854 Sep 22 23:14:28.102 DEBG IO Read 1001 has deps [JobId(1000)]
22855 Sep 22 23:14:28.102 DEBG up_ds_listen was notified
22856 Sep 22 23:14:28.103 DEBG up_ds_listen process 1000
22857 Sep 22 23:14:28.103 DEBG [A] ack job 1000:1, : downstairs
22858 Sep 22 23:14:28.103 DEBG up_ds_listen checked 1 jobs, back to waiting
22859 Sep 22 23:14:28.103 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
22860 Sep 22 23:14:28.103 DEBG IO Write 1000 has deps []
22861 Sep 22 23:14:28.103 DEBG Read :1001 deps:[JobId(1000)] res:true
22862 Sep 22 23:14:28.103 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
22863 Sep 22 23:14:28.103 INFO current number of open files limit 65536 is already the maximum
22864 Sep 22 23:14:28.103 INFO Opened existing region file "/tmp/downstairs-WRx1oCsz/region.json"
22865 Sep 22 23:14:28.103 INFO Database read version 1
22866 Sep 22 23:14:28.103 INFO Database write version 1
22867 Sep 22 23:14:28.103 DEBG Write :1002 deps:[JobId(1001), JobId(1000)] res:true
22868 Sep 22 23:14:28.103 DEBG Read :1001 deps:[JobId(1000)] res:true
22869 Sep 22 23:14:28.104 DEBG Read :1001 deps:[JobId(1000)] res:true
22870 Sep 22 23:14:28.105 DEBG up_ds_listen was notified
22871 Sep 22 23:14:28.105 DEBG up_ds_listen process 1000
22872 Sep 22 23:14:28.105 DEBG [A] ack job 1000:1, : downstairs
22873 Sep 22 23:14:28.105 DEBG up_ds_listen checked 1 jobs, back to waiting
22874 Sep 22 23:14:28.105 DEBG IO Read 1001 has deps [JobId(1000)]
22875 Sep 22 23:14:28.106 INFO UUID: bbec14f8-e0c9-4fdd-8394-c7675df3174b
22876 Sep 22 23:14:28.106 INFO Blocks per extent:512 Total Extents: 188
22877 Sep 22 23:14:28.106 INFO Crucible Version: Crucible Version: 0.0.1
22878 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
22879 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
22880 rustc: 1.70.0 stable x86_64-unknown-illumos
22881 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
22882 Sep 22 23:14:28.106 INFO Upstairs <-> Downstairs Message Version: 4, task: main
22883 Sep 22 23:14:28.106 INFO Using address: 127.0.0.1:53298, task: main
22884 Sep 22 23:14:28.106 DEBG Read :1001 deps:[JobId(1000)] res:true
22885 Sep 22 23:14:28.106 DEBG Read :1001 deps:[JobId(1000)] res:true
22886 Sep 22 23:14:28.106 INFO Repair listens on 127.0.0.1:0, task: repair
22887 Sep 22 23:14:28.106 DEBG Read :1001 deps:[JobId(1000)] res:true
22888 Sep 22 23:14:28.106 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45028, task: repair
22889 Sep 22 23:14:28.106 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45028, task: repair
22890 Sep 22 23:14:28.106 INFO listening, local_addr: 127.0.0.1:45028, task: repair
22891 Sep 22 23:14:28.106 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45028, task: repair
22892 Sep 22 23:14:28.106 INFO Using repair address: 127.0.0.1:45028, task: main
22893 Sep 22 23:14:28.106 INFO No SSL acceptor configured, task: main
22894 Sep 22 23:14:28.106 DEBG IO Write 1001 has deps [JobId(1000)]
22895 Sep 22 23:14:28.106 DEBG [0] Read AckReady 1001, : downstairs
22896 Sep 22 23:14:28.106 DEBG up_ds_listen was notified
22897 Sep 22 23:14:28.107 DEBG up_ds_listen process 1001
22898 Sep 22 23:14:28.107 DEBG [A] ack job 1001:2, : downstairs
22899 Sep 22 23:14:28.107 DEBG up_ds_listen checked 1 jobs, back to waiting
22900 Sep 22 23:14:28.107 DEBG [2] Read already AckReady 1001, : downstairs
22901 Sep 22 23:14:28.107 DEBG [1] Read already AckReady 1001, : downstairs
22902 Sep 22 23:14:28.107 DEBG up_ds_listen was notified
22903 Sep 22 23:14:28.107 DEBG up_ds_listen process 1001
22904 Sep 22 23:14:28.107 DEBG [A] ack job 1001:2, : downstairs
22905 Sep 22 23:14:28.107 DEBG up_ds_listen checked 1 jobs, back to waiting
22906 Sep 22 23:14:28.107 DEBG IO Read 1001 has deps [JobId(1000)]
22907 Sep 22 23:14:28.108 DEBG Read :1001 deps:[JobId(1000)] res:true
22908 Sep 22 23:14:28.108 DEBG Read :1001 deps:[JobId(1000)] res:true
22909 Sep 22 23:14:28.108 DEBG Read :1001 deps:[JobId(1000)] res:true
22910 Sep 22 23:14:28.108 DEBG [0] Read AckReady 1001, : downstairs
22911 Sep 22 23:14:28.109 DEBG [0] Read AckReady 1001, : downstairs
22912 Sep 22 23:14:28.109 DEBG [1] Read already AckReady 1001, : downstairs
22913 Sep 22 23:14:28.109 DEBG [2] Read already AckReady 1001, : downstairs
22914 Sep 22 23:14:28.109 DEBG up_ds_listen was notified
22915 Sep 22 23:14:28.109 DEBG IO Flush 1001 has deps [JobId(1000)]
22916 Sep 22 23:14:28.109 DEBG up_ds_listen process 1001
22917 Sep 22 23:14:28.109 DEBG [A] ack job 1001:2, : downstairs
22918 Sep 22 23:14:28.109 DEBG up_ds_listen checked 1 jobs, back to waiting
22919 Sep 22 23:14:28.110 DEBG Write :1001 deps:[JobId(1000)] res:true
22920 Sep 22 23:14:28.110 DEBG Write :1001 deps:[JobId(1000)] res:true
22921 Sep 22 23:14:28.110 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
22922 Sep 22 23:14:28.111 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
22923 Sep 22 23:14:28.111 DEBG up_ds_listen was notified
22924 Sep 22 23:14:28.111 DEBG up_ds_listen process 1001
22925 Sep 22 23:14:28.111 DEBG [A] ack job 1001:2, : downstairs
22926 Sep 22 23:14:28.111 DEBG [1] Read already AckReady 1001, : downstairs
22927 Sep 22 23:14:28.111 DEBG Write :1001 deps:[JobId(1000)] res:true
22928 Sep 22 23:14:28.111 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
22929 Sep 22 23:14:28.111 DEBG up_ds_listen checked 1 jobs, back to waiting
22930 Sep 22 23:14:28.111 DEBG IO Read 1003 has deps [JobId(1002), JobId(1001), JobId(1000)]
22931 Sep 22 23:14:28.112 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
22932 Sep 22 23:14:28.112 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
22933 Sep 22 23:14:28.113 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
22934 Sep 22 23:14:28.113 DEBG Read :1003 deps:[JobId(1002), JobId(1001), JobId(1000)] res:true
22935 Sep 22 23:14:28.114 DEBG [2] Read already AckReady 1001, : downstairs
22936 Sep 22 23:14:28.114 DEBG up_ds_listen was notified
22937 Sep 22 23:14:28.114 DEBG up_ds_listen process 1001
22938 Sep 22 23:14:28.114 DEBG [A] ack job 1001:2, : downstairs
22939 Sep 22 23:14:28.114 DEBG up_ds_listen checked 1 jobs, back to waiting
22940 Sep 22 23:14:28.114 DEBG IO Read 1001 has deps [JobId(1000)]
22941 Sep 22 23:14:28.115 DEBG up_ds_listen was notified
22942 Sep 22 23:14:28.115 DEBG up_ds_listen process 1002
22943 Sep 22 23:14:28.115 DEBG [A] ack job 1002:3, : downstairs
22944 Sep 22 23:14:28.115 DEBG up_ds_listen checked 1 jobs, back to waiting
22945 Sep 22 23:14:28.115 DEBG Read :1001 deps:[JobId(1000)] res:true
22946 Sep 22 23:14:28.116 DEBG Read :1001 deps:[JobId(1000)] res:true
22947 Sep 22 23:14:28.116 DEBG Read :1001 deps:[JobId(1000)] res:true
22948 Sep 22 23:14:28.117 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
22949 Sep 22 23:14:28.117 INFO current number of open files limit 65536 is already the maximum
22950 Sep 22 23:14:28.117 INFO Created new region file "/tmp/downstairs-SvxEiywd/region.json"
22951 Sep 22 23:14:28.118 DEBG [0] Read AckReady 1003, : downstairs
22952 Sep 22 23:14:28.120 DEBG up_ds_listen was notified
22953 Sep 22 23:14:28.120 DEBG up_ds_listen process 1002
22954 Sep 22 23:14:28.120 DEBG [A] ack job 1002:3, : downstairs
22955 Sep 22 23:14:28.120 DEBG up_ds_listen checked 1 jobs, back to waiting
22956 Sep 22 23:14:28.120 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
22957 Sep 22 23:14:28.120 DEBG [1] Read already AckReady 1003, : downstairs
22958 Sep 22 23:14:28.120 DEBG [1] Read AckReady 1001, : downstairs
22959 Sep 22 23:14:28.121 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22960 Sep 22 23:14:28.121 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22961 Sep 22 23:14:28.122 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22962 Sep 22 23:14:28.123 DEBG [2] Read already AckReady 1003, : downstairs
22963 Sep 22 23:14:28.123 DEBG up_ds_listen was notified
22964 Sep 22 23:14:28.123 DEBG [0] Read already AckReady 1001, : downstairs
22965 Sep 22 23:14:28.123 DEBG up_ds_listen process 1003
22966 Sep 22 23:14:28.123 DEBG [A] ack job 1003:4, : downstairs
22967 Sep 22 23:14:28.124 DEBG up_ds_listen checked 1 jobs, back to waiting
22968 Sep 22 23:14:28.124 DEBG IO Read 1002 has deps [JobId(1001), JobId(1000)]
22969 Sep 22 23:14:28.124 DEBG [0] Read AckReady 1003, : downstairs
22970 Sep 22 23:14:28.125 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
22971 Sep 22 23:14:28.125 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
22972 Sep 22 23:14:28.126 DEBG [2] Read already AckReady 1003, : downstairs
22973 Sep 22 23:14:28.126 DEBG [2] Read already AckReady 1001, : downstairs
22974 Sep 22 23:14:28.126 DEBG Read :1002 deps:[JobId(1001), JobId(1000)] res:true
22975 Sep 22 23:14:28.126 DEBG up_ds_listen was notified
22976 Sep 22 23:14:28.126 DEBG up_ds_listen process 1001
22977 Sep 22 23:14:28.126 DEBG [A] ack job 1001:2, : downstairs
22978 Sep 22 23:14:28.127 DEBG up_ds_listen checked 1 jobs, back to waiting
22979 Sep 22 23:14:28.128 DEBG [1] Read already AckReady 1003, : downstairs
22980 Sep 22 23:14:28.128 DEBG up_ds_listen was notified
22981 Sep 22 23:14:28.128 DEBG up_ds_listen process 1003
22982 Sep 22 23:14:28.128 DEBG [A] ack job 1003:4, : downstairs
22983 Sep 22 23:14:28.128 DEBG up_ds_listen checked 1 jobs, back to waiting
22984 Sep 22 23:14:28.128 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
22985 Sep 22 23:14:28.129 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22986 Sep 22 23:14:28.129 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22987 Sep 22 23:14:28.129 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
22988 Sep 22 23:14:28.130 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
22989 Sep 22 23:14:28.130 DEBG [0] Read AckReady 1002, : downstairs
22990 Sep 22 23:14:28.132 DEBG [0] Read AckReady 1003, : downstairs
22991 Sep 22 23:14:28.133 DEBG [1] Read already AckReady 1002, : downstairs
22992 Sep 22 23:14:28.133 DEBG up_ds_listen was notified
22993 Sep 22 23:14:28.133 DEBG up_ds_listen process 1002
22994 Sep 22 23:14:28.133 DEBG [A] ack job 1002:3, : downstairs
22995 Sep 22 23:14:28.133 DEBG up_ds_listen checked 1 jobs, back to waiting
22996 Sep 22 23:14:28.134 DEBG [1] Read already AckReady 1003, : downstairs
22997 Sep 22 23:14:28.136 DEBG [2] Read already AckReady 1003, : downstairs
22998 Sep 22 23:14:28.136 DEBG up_ds_listen was notified
22999 Sep 22 23:14:28.136 DEBG up_ds_listen process 1003
23000 Sep 22 23:14:28.136 DEBG [A] ack job 1003:4, : downstairs
23001 Sep 22 23:14:28.136 DEBG [2] Read already AckReady 1002, : downstairs
23002 Sep 22 23:14:28.136 DEBG up_ds_listen was notified
23003 Sep 22 23:14:28.136 DEBG up_ds_listen process 1002
23004 Sep 22 23:14:28.136 DEBG up_ds_listen checked 1 jobs, back to waiting
23005 Sep 22 23:14:28.136 DEBG [A] ack job 1002:3, : downstairs
23006 Sep 22 23:14:28.136 DEBG IO Write 1002 has deps [JobId(1001), JobId(1000)]
23007 Sep 22 23:14:28.136 DEBG up_ds_listen checked 1 jobs, back to waiting
23008 Sep 22 23:14:28.142 DEBG up_ds_listen was notified
23009 Sep 22 23:14:28.142 DEBG up_ds_listen process 1002
23010 Sep 22 23:14:28.142 DEBG [A] ack job 1002:3, : downstairs
23011 Sep 22 23:14:28.142 DEBG up_ds_listen checked 1 jobs, back to waiting
23012 Sep 22 23:14:28.142 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
23013 test test::integration_test_volume_write_unwritten_subvols_3 ... ok
23014 Sep 22 23:14:28.143 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23015 Sep 22 23:14:28.143 INFO current number of open files limit 65536 is already the maximum
23016 test test::integration_test_volume_write_unwritten_subvols_sparse ... ok
23017 Sep 22 23:14:28.143 INFO Created new region file "/tmp/downstairs-6IEHRjPM/region.json"
23018 Sep 22 23:14:28.143 INFO current number of open files limit 65536 is already the maximum
23019 Sep 22 23:14:28.143 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23020 Sep 22 23:14:28.143 INFO Created new region file "/tmp/downstairs-2o76BUZT/region.json"
23021 Sep 22 23:14:28.144 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23022 Sep 22 23:14:28.146 INFO current number of open files limit 65536 is already the maximum
23023 Sep 22 23:14:28.147 INFO Opened existing region file "/tmp/downstairs-2o76BUZT/region.json"
23024 Sep 22 23:14:28.147 INFO Database read version 1
23025 Sep 22 23:14:28.147 INFO Database write version 1
23026 Sep 22 23:14:28.147 INFO UUID: 16035766-fcf1-4aff-8be5-4662118fbeba
23027 Sep 22 23:14:28.147 INFO Blocks per extent:5 Total Extents: 2
23028 Sep 22 23:14:28.147 INFO Crucible Version: Crucible Version: 0.0.1
23029 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23030 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23031 rustc: 1.70.0 stable x86_64-unknown-illumos
23032 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23033 Sep 22 23:14:28.148 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23034 Sep 22 23:14:28.148 INFO Using address: 127.0.0.1:64713, task: main
23035 Sep 22 23:14:28.148 INFO Repair listens on 127.0.0.1:0, task: repair
23036 Sep 22 23:14:28.148 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43597, task: repair
23037 Sep 22 23:14:28.148 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43597, task: repair
23038 Sep 22 23:14:28.148 INFO listening, local_addr: 127.0.0.1:43597, task: repair
23039 Sep 22 23:14:28.148 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43597, task: repair
23040 Sep 22 23:14:28.148 INFO Using repair address: 127.0.0.1:43597, task: main
23041 Sep 22 23:14:28.148 INFO No SSL acceptor configured, task: main
23042 Sep 22 23:14:28.148 DEBG [0] Read AckReady 1003, : downstairs
23043 Sep 22 23:14:28.148 INFO current number of open files limit 65536 is already the maximum
23044 Sep 22 23:14:28.148 INFO Created new region file "/tmp/downstairs-etPhPbAg/region.json"
23045 Sep 22 23:14:28.150 INFO current number of open files limit 65536 is already the maximum
23046 Sep 22 23:14:28.151 INFO Opened existing region file "/tmp/downstairs-etPhPbAg/region.json"
23047 Sep 22 23:14:28.151 INFO Database read version 1
23048 Sep 22 23:14:28.151 INFO Database write version 1
23049 Sep 22 23:14:28.151 DEBG [1] Read already AckReady 1003, : downstairs
23050 Sep 22 23:14:28.152 INFO UUID: c1ff2b45-bb0b-4e95-bfb6-6e2ba62da531
23051 Sep 22 23:14:28.152 INFO Blocks per extent:5 Total Extents: 2
23052 Sep 22 23:14:28.152 INFO Crucible Version: Crucible Version: 0.0.1
23053 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23054 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23055 rustc: 1.70.0 stable x86_64-unknown-illumos
23056 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23057 Sep 22 23:14:28.152 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23058 Sep 22 23:14:28.152 INFO Using address: 127.0.0.1:60775, task: main
23059 Sep 22 23:14:28.152 INFO Repair listens on 127.0.0.1:0, task: repair
23060 Sep 22 23:14:28.152 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:42948, task: repair
23061 Sep 22 23:14:28.152 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:42948, task: repair
23062 Sep 22 23:14:28.152 INFO listening, local_addr: 127.0.0.1:42948, task: repair
23063 Sep 22 23:14:28.152 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:42948, task: repair
23064 Sep 22 23:14:28.152 INFO Using repair address: 127.0.0.1:42948, task: main
23065 Sep 22 23:14:28.152 INFO No SSL acceptor configured, task: main
23066 Sep 22 23:14:28.152 INFO current number of open files limit 65536 is already the maximum
23067 Sep 22 23:14:28.152 INFO Created new region file "/tmp/downstairs-19iRuZ8i/region.json"
23068 Sep 22 23:14:28.154 DEBG [2] Read already AckReady 1003, : downstairs
23069 Sep 22 23:14:28.154 DEBG up_ds_listen was notified
23070 Sep 22 23:14:28.154 DEBG up_ds_listen process 1003
23071 Sep 22 23:14:28.154 DEBG [A] ack job 1003:4, : downstairs
23072 Sep 22 23:14:28.154 DEBG up_ds_listen checked 1 jobs, back to waiting
23073 Sep 22 23:14:28.155 DEBG IO Read 1003 has deps [JobId(1002), JobId(1000)]
23074 Sep 22 23:14:28.155 INFO current number of open files limit 65536 is already the maximum
23075 Sep 22 23:14:28.155 INFO Opened existing region file "/tmp/downstairs-19iRuZ8i/region.json"
23076 Sep 22 23:14:28.155 INFO Database read version 1
23077 Sep 22 23:14:28.155 INFO Database write version 1
23078 Sep 22 23:14:28.155 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23079 Sep 22 23:14:28.156 INFO UUID: f77cd6ac-8353-429b-8c45-d7fd309de24d
23080 Sep 22 23:14:28.156 INFO Blocks per extent:5 Total Extents: 2
23081 Sep 22 23:14:28.156 INFO Crucible Version: Crucible Version: 0.0.1
23082 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23083 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23084 rustc: 1.70.0 stable x86_64-unknown-illumos
23085 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23086 Sep 22 23:14:28.156 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23087 Sep 22 23:14:28.156 INFO Using address: 127.0.0.1:35797, task: main
23088 Sep 22 23:14:28.156 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23089 Sep 22 23:14:28.156 INFO Repair listens on 127.0.0.1:0, task: repair
23090 Sep 22 23:14:28.156 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47430, task: repair
23091 Sep 22 23:14:28.156 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47430, task: repair
23092 Sep 22 23:14:28.156 INFO listening, local_addr: 127.0.0.1:47430, task: repair
23093 Sep 22 23:14:28.156 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47430, task: repair
23094 Sep 22 23:14:28.156 INFO Using repair address: 127.0.0.1:47430, task: main
23095 Sep 22 23:14:28.156 INFO No SSL acceptor configured, task: main
23096 Sep 22 23:14:28.156 DEBG Read :1003 deps:[JobId(1002), JobId(1000)] res:true
23097 note: configured to log to "/dev/stdout"
230982023-09-22T23:14:28.158ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:48931
230992023-09-22T23:14:28.158ZINFOcrucible-pantry: listen IP: 127.0.0.1:48931
23100 Sep 22 23:14:28.161 DEBG [1] Read AckReady 1003, : downstairs
23101 Sep 22 23:14:28.163 DEBG [0] Read already AckReady 1003, : downstairs
23102 Sep 22 23:14:28.166 DEBG [2] Read already AckReady 1003, : downstairs
23103 Sep 22 23:14:28.166 DEBG up_ds_listen was notified
23104 Sep 22 23:14:28.166 DEBG up_ds_listen process 1003
23105 Sep 22 23:14:28.166 DEBG [A] ack job 1003:4, : downstairs
23106 Sep 22 23:14:28.166 DEBG up_ds_listen checked 1 jobs, back to waiting
23107 test test::integration_test_volume_write_unwritten_subvols ... ok
23108 Sep 22 23:14:28.176 INFO current number of open files limit 65536 is already the maximum
23109 Sep 22 23:14:28.176 INFO Created new region file "/tmp/downstairs-57IWH8PC/region.json"
23110 Sep 22 23:14:28.194 INFO listening on 127.0.0.1:0, task: main
23111 Sep 22 23:14:28.194 INFO listening on 127.0.0.1:0, task: main
23112 Sep 22 23:14:28.194 INFO listening on 127.0.0.1:0, task: main
231132023-09-22T23:14:28.195ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:40823 remote_addr = 127.0.0.1:53528
231142023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): no entry exists for volume 6fe510e3-7bac-480b-b94c-14525ec58a40, constructing...
231152023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): Upstairs starts
231162023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
231172023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
231182023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: b85efbb3-3a5a-4a31-b421-0eab80343c65
231192023-09-22T23:14:28.196ZINFOcrucible-pantry (datafile): Crucible b85efbb3-3a5a-4a31-b421-0eab80343c65 has session id: fb76dd05-e142-43e1-91db-873cbbb0ea10
231202023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:56487 looper = 0
231212023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:49049 looper = 1
231222023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:57827 looper = 2
231232023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
231242023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
231252023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
231262023-09-22T23:14:28.197ZINFOcrucible-pantry (datafile): volume 6fe510e3-7bac-480b-b94c-14525ec58a40 constructed ok
23127 The guest has requested activation
231282023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 active request set
23129 Sep 22 23:14:28.198 INFO accepted connection from 127.0.0.1:39551, task: main
23130 Sep 22 23:14:28.198 INFO accepted connection from 127.0.0.1:59221, task: main
23131 Sep 22 23:14:28.198 INFO accepted connection from 127.0.0.1:36069, task: main
231322023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [0] b85efbb3-3a5a-4a31-b421-0eab80343c65 looper connected looper = 0
231332023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:56487 in state New
231342023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [1] b85efbb3-3a5a-4a31-b421-0eab80343c65 looper connected looper = 1
231352023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:49049 in state New
231362023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [2] b85efbb3-3a5a-4a31-b421-0eab80343c65 looper connected looper = 2
231372023-09-22T23:14:28.198ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:57827 in state New
23138 Sep 22 23:14:28.198 INFO Connection request from b85efbb3-3a5a-4a31-b421-0eab80343c65 with version 4, task: proc
23139 Sep 22 23:14:28.198 INFO upstairs UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } connected, version 4, task: proc
23140 Sep 22 23:14:28.199 INFO Connection request from b85efbb3-3a5a-4a31-b421-0eab80343c65 with version 4, task: proc
23141 Sep 22 23:14:28.199 INFO upstairs UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } connected, version 4, task: proc
23142 Sep 22 23:14:28.199 INFO Connection request from b85efbb3-3a5a-4a31-b421-0eab80343c65 with version 4, task: proc
23143 Sep 22 23:14:28.199 INFO upstairs UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } connected, version 4, task: proc
231442023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [0] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) New New New ds_transition to WaitActive
231452023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
231462023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 3edd5ed2-e7dd-4825-829f-91f58f1773fc
231472023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [1] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) WaitActive New New ds_transition to WaitActive
231482023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
231492023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 3edd5ed2-e7dd-4825-829f-91f58f1773fc
231502023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [2] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) WaitActive WaitActive New ds_transition to WaitActive
231512023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
231522023-09-22T23:14:28.199ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 3edd5ed2-e7dd-4825-829f-91f58f1773fc
23153 Sep 22 23:14:28.199 INFO UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } is now active (read-write)
23154 Sep 22 23:14:28.199 INFO UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } is now active (read-write)
23155 Sep 22 23:14:28.199 INFO UpstairsConnection { upstairs_id: b85efbb3-3a5a-4a31-b421-0eab80343c65, session_id: 3edd5ed2-e7dd-4825-829f-91f58f1773fc, gen: 1 } is now active (read-write)
231562023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:56487 has UUID b7c099f2-5446-4f38-bad7-8d884c3a5016
231572023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b7c099f2-5446-4f38-bad7-8d884c3a5016, encrypted: true, database_read_version: 1, database_write_version: 1 }
231582023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 WaitActive WaitActive WaitActive
231592023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:49049 has UUID 2fe4896e-5871-495b-8a9e-a2c5b4173cd5
231602023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2fe4896e-5871-495b-8a9e-a2c5b4173cd5, encrypted: true, database_read_version: 1, database_write_version: 1 }
231612023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 WaitActive WaitActive WaitActive
231622023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:57827 has UUID adbabfb8-1e80-4905-a6ee-914c6aaac853
231632023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: adbabfb8-1e80-4905-a6ee-914c6aaac853, encrypted: true, database_read_version: 1, database_write_version: 1 }
231642023-09-22T23:14:28.200ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 WaitActive WaitActive WaitActive
23165 Sep 22 23:14:28.200 INFO Current flush_numbers [0..12]: [0, 0]
23166 Sep 22 23:14:28.201 INFO Downstairs has completed Negotiation, task: proc
23167 Sep 22 23:14:28.201 INFO Current flush_numbers [0..12]: [0, 0]
23168 Sep 22 23:14:28.201 INFO Downstairs has completed Negotiation, task: proc
23169 Sep 22 23:14:28.202 INFO Current flush_numbers [0..12]: [0, 0]
23170 Sep 22 23:14:28.202 INFO Downstairs has completed Negotiation, task: proc
231712023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [0] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
231722023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
231732023-09-22T23:14:28.202ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
231742023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
231752023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [1] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
231762023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
231772023-09-22T23:14:28.202ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
231782023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
231792023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [2] b85efbb3-3a5a-4a31-b421-0eab80343c65 (3edd5ed2-e7dd-4825-829f-91f58f1773fc) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
231802023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
23181 The guest has finished waiting for activation
231822023-09-22T23:14:28.202ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
231832023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
231842023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:56487 task reports connection:true
231852023-09-22T23:14:28.202ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 WaitQuorum WaitQuorum WaitQuorum
231862023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
231872023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
231882023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
231892023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
231902023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
231912023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
231922023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
231932023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
231942023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
231952023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Max found gen is 1
231962023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
231972023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Next flush: 1
231982023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): All extents match
231992023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): No downstairs repair required
232002023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): No initial repair work was required
232012023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
232022023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 is now active with session: 3edd5ed2-e7dd-4825-829f-91f58f1773fc
232032023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 Set Active after no repair
232042023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
232052023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Set check for repair
232062023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:49049 task reports connection:true
232072023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 Active Active Active
232082023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Set check for repair
232092023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:57827 task reports connection:true
232102023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 Active Active Active
232112023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): Set check for repair
232122023-09-22T23:14:28.203ZINFOcrucible-pantry (datafile): [0] received reconcile message
232132023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
232142023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
232152023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [1] received reconcile message
232162023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
232172023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
232182023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [2] received reconcile message
232192023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
232202023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
232212023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): volume 6fe510e3-7bac-480b-b94c-14525ec58a40 activated ok
232222023-09-22T23:14:28.204ZINFOcrucible-pantry (datafile): volume 6fe510e3-7bac-480b-b94c-14525ec58a40 constructed and inserted ok
232232023-09-22T23:14:28.204ZINFOcrucible-pantry (dropshot): request completed latency_us = 7859 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 28f2ad76-6d7c-4684-bffe-a57312bf354d response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40
232242023-09-22T23:14:28.205ZINFOcrucible-pantry (dropshot): request completed latency_us = 1506 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 4cb6ab2d-f8d2-4eb6-ba1c-cc2664672daf response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23225 Sep 22 23:14:28.206 DEBG Write :1000 deps:[] res:true
23226 Sep 22 23:14:28.207 DEBG Write :1000 deps:[] res:true
23227 Sep 22 23:14:28.208 DEBG Write :1000 deps:[] res:true
232282023-09-22T23:14:28.210ZINFOcrucible-pantry (dropshot): request completed latency_us = 1428 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 6b505b2f-ed30-40b2-9197-9bceb8c3a10a response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23229 Sep 22 23:14:28.211 DEBG Write :1001 deps:[] res:true
23230 Sep 22 23:14:28.211 DEBG Write :1001 deps:[] res:true
23231 Sep 22 23:14:28.211 DEBG Write :1001 deps:[] res:true
232322023-09-22T23:14:28.214ZINFOcrucible-pantry (dropshot): request completed latency_us = 1359 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 5f819979-a240-430c-946a-e3d6e048b24b response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23233 Sep 22 23:14:28.214 DEBG Write :1002 deps:[] res:true
23234 Sep 22 23:14:28.215 DEBG Write :1002 deps:[] res:true
23235 Sep 22 23:14:28.215 DEBG Write :1002 deps:[] res:true
232362023-09-22T23:14:28.217ZINFOcrucible-pantry (dropshot): request completed latency_us = 1425 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 45f4ff80-c5b9-40be-bb86-838fb49f9c78 response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23237 Sep 22 23:14:28.218 DEBG Write :1003 deps:[] res:true
23238 Sep 22 23:14:28.218 DEBG Write :1003 deps:[] res:true
23239 Sep 22 23:14:28.218 DEBG Write :1003 deps:[] res:true
232402023-09-22T23:14:28.220ZINFOcrucible-pantry (dropshot): request completed latency_us = 1033 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = c64b56e1-3c54-4f6b-86f2-3a16e9ed07cb response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23241 Sep 22 23:14:28.220 DEBG Write :1004 deps:[] res:true
23242 Sep 22 23:14:28.221 DEBG Write :1004 deps:[] res:true
23243 Sep 22 23:14:28.221 DEBG Write :1004 deps:[] res:true
232442023-09-22T23:14:28.223ZINFOcrucible-pantry (dropshot): request completed latency_us = 1218 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 5b81c2ae-f65c-4c0b-b4d6-239be7a0b488 response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23245 Sep 22 23:14:28.224 DEBG Write :1005 deps:[] res:true
23246 Sep 22 23:14:28.225 DEBG Write :1005 deps:[] res:true
23247 Sep 22 23:14:28.225 DEBG Write :1005 deps:[] res:true
232482023-09-22T23:14:28.228ZINFOcrucible-pantry (dropshot): request completed latency_us = 1643 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 2c6a1296-0cd8-4fe0-b447-fd1566f38f7f response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23249 Sep 22 23:14:28.228 DEBG Write :1006 deps:[] res:true
23250 Sep 22 23:14:28.229 DEBG Write :1006 deps:[] res:true
23251 Sep 22 23:14:28.229 DEBG Write :1006 deps:[] res:true
232522023-09-22T23:14:28.231ZINFOcrucible-pantry (dropshot): request completed latency_us = 1462 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = dbdb70f4-870e-4754-a60c-3ebfccf17d1d response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23253 Sep 22 23:14:28.232 DEBG Write :1007 deps:[] res:true
23254 Sep 22 23:14:28.232 DEBG Write :1007 deps:[] res:true
23255 Sep 22 23:14:28.232 DEBG Write :1007 deps:[] res:true
232562023-09-22T23:14:28.235ZINFOcrucible-pantry (dropshot): request completed latency_us = 1396 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = d2c74d6c-f974-4944-9b53-05e4cac458e1 response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23257 Sep 22 23:14:28.235 DEBG Write :1008 deps:[] res:true
23258 Sep 22 23:14:28.236 DEBG Write :1008 deps:[] res:true
23259 Sep 22 23:14:28.236 DEBG Write :1008 deps:[] res:true
232602023-09-22T23:14:28.238ZINFOcrucible-pantry (dropshot): request completed latency_us = 1410 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 7bc63b22-9568-43af-a3e7-c5147dc5aed1 response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_write
23261 Sep 22 23:14:28.239 DEBG Write :1009 deps:[] res:true
23262 Sep 22 23:14:28.239 DEBG Write :1009 deps:[] res:true
23263 Sep 22 23:14:28.239 DEBG Write :1009 deps:[] res:true
23264 Sep 22 23:14:28.241 DEBG Read :1010 deps:[JobId(1000)] res:true
23265 Sep 22 23:14:28.242 DEBG Read :1010 deps:[JobId(1000)] res:true
23266 Sep 22 23:14:28.242 DEBG Read :1010 deps:[JobId(1000)] res:true
232672023-09-22T23:14:28.243ZINFOcrucible-pantry (dropshot): request completed latency_us = 3061 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = cb5bd336-164d-4cab-b2a9-09aef8439cfa response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23268 Sep 22 23:14:28.245 DEBG Read :1011 deps:[JobId(1001)] res:true
23269 Sep 22 23:14:28.245 DEBG Read :1011 deps:[JobId(1001)] res:true
23270 Sep 22 23:14:28.245 DEBG Read :1011 deps:[JobId(1001)] res:true
232712023-09-22T23:14:28.246ZINFOcrucible-pantry (dropshot): request completed latency_us = 2488 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 2835b3cd-bb7c-4df5-a892-dcef0bc0c99c response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23272 Sep 22 23:14:28.248 DEBG Read :1012 deps:[JobId(1002)] res:true
23273 Sep 22 23:14:28.248 DEBG Read :1012 deps:[JobId(1002)] res:true
23274 Sep 22 23:14:28.248 DEBG Read :1012 deps:[JobId(1002)] res:true
232752023-09-22T23:14:28.250ZINFOcrucible-pantry (dropshot): request completed latency_us = 2804 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 036e364f-528a-4ac9-831f-5ab654aa43aa response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23276 Sep 22 23:14:28.252 DEBG Read :1013 deps:[JobId(1003)] res:true
23277 Sep 22 23:14:28.252 DEBG Read :1013 deps:[JobId(1003)] res:true
23278 Sep 22 23:14:28.252 DEBG Read :1013 deps:[JobId(1003)] res:true
232792023-09-22T23:14:28.254ZINFOcrucible-pantry (dropshot): request completed latency_us = 3781 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = e051b346-5530-40ed-8149-212b79f50021 response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23280 Sep 22 23:14:28.256 DEBG Read :1014 deps:[JobId(1004)] res:true
23281 Sep 22 23:14:28.256 DEBG Read :1014 deps:[JobId(1004)] res:true
23282 Sep 22 23:14:28.257 DEBG Read :1014 deps:[JobId(1004)] res:true
232832023-09-22T23:14:28.259ZINFOcrucible-pantry (dropshot): request completed latency_us = 3646 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 8d900b84-f6c9-492c-ba5c-e966bc77df92 response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23284 Sep 22 23:14:28.259 INFO listening on 127.0.0.1:0, task: main
23285 Sep 22 23:14:28.259 INFO listening on 127.0.0.1:0, task: main
23286 Sep 22 23:14:28.259 INFO listening on 127.0.0.1:0, task: main
232872023-09-22T23:14:28.259ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:48931 remote_addr = 127.0.0.1:35367
232882023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): no entry exists for volume 78899c23-7806-4736-ba33-b5f432ab026e, constructing...
232892023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): Upstairs starts
232902023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
232912023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
232922023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: aa7cbd55-9585-444d-917b-34b9d1d5a423
232932023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): Crucible aa7cbd55-9585-444d-917b-34b9d1d5a423 has session id: 7a7229f8-e775-4acb-96eb-89fbc1a08db9
232942023-09-22T23:14:28.260ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:64713 looper = 0
23295 Sep 22 23:14:28.261 DEBG Read :1015 deps:[JobId(1005)] res:true
232962023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:60775 looper = 1
23297 Sep 22 23:14:28.261 DEBG Read :1015 deps:[JobId(1005)] res:true
232982023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:35797 looper = 2
232992023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
233002023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
233012023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
23302 Sep 22 23:14:28.261 DEBG Read :1015 deps:[JobId(1005)] res:true
233032023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): volume 78899c23-7806-4736-ba33-b5f432ab026e constructed ok
23304 The guest has requested activation
233052023-09-22T23:14:28.261ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 active request set
23306 Sep 22 23:14:28.262 INFO accepted connection from 127.0.0.1:59929, task: main
23307 Sep 22 23:14:28.262 INFO accepted connection from 127.0.0.1:32800, task: main
23308 Sep 22 23:14:28.262 INFO accepted connection from 127.0.0.1:55758, task: main
233092023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected looper = 0
233102023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:64713 in state New
233112023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected looper = 1
233122023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:60775 in state New
233132023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected looper = 2
233142023-09-22T23:14:28.262ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:35797 in state New
23315 Sep 22 23:14:28.262 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23316 Sep 22 23:14:28.262 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } connected, version 4, task: proc
23317 Sep 22 23:14:28.262 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23318 Sep 22 23:14:28.262 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } connected, version 4, task: proc
23319 Sep 22 23:14:28.263 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23320 Sep 22 23:14:28.263 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } connected, version 4, task: proc
233212023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) New New New ds_transition to WaitActive
233222023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
233232023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
233242023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) WaitActive New New ds_transition to WaitActive
233252023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
233262023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
233272023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) WaitActive WaitActive New ds_transition to WaitActive
233282023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
233292023-09-22T23:14:28.263ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
233302023-09-22T23:14:28.263ZINFOcrucible-pantry (dropshot): request completed latency_us = 3706 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 202bf347-4761-4970-a482-1249a249a5ba response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23331 Sep 22 23:14:28.263 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } is now active (read-write)
23332 Sep 22 23:14:28.263 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } is now active (read-write)
23333 Sep 22 23:14:28.263 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } is now active (read-write)
233342023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:64713 has UUID 16035766-fcf1-4aff-8be5-4662118fbeba
233352023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 16035766-fcf1-4aff-8be5-4662118fbeba, encrypted: true, database_read_version: 1, database_write_version: 1 }
233362023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
233372023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:60775 has UUID c1ff2b45-bb0b-4e95-bfb6-6e2ba62da531
233382023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c1ff2b45-bb0b-4e95-bfb6-6e2ba62da531, encrypted: true, database_read_version: 1, database_write_version: 1 }
233392023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
233402023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:35797 has UUID f77cd6ac-8353-429b-8c45-d7fd309de24d
233412023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f77cd6ac-8353-429b-8c45-d7fd309de24d, encrypted: true, database_read_version: 1, database_write_version: 1 }
233422023-09-22T23:14:28.264ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
23343 Sep 22 23:14:28.265 INFO Current flush_numbers [0..12]: [0, 0]
23344 Sep 22 23:14:28.265 INFO Downstairs has completed Negotiation, task: proc
23345 Sep 22 23:14:28.265 INFO Current flush_numbers [0..12]: [0, 0]
23346 Sep 22 23:14:28.265 DEBG Read :1016 deps:[JobId(1006)] res:true
23347 Sep 22 23:14:28.265 DEBG Read :1016 deps:[JobId(1006)] res:true
23348 Sep 22 23:14:28.265 INFO Downstairs has completed Negotiation, task: proc
23349 Sep 22 23:14:28.265 DEBG Read :1016 deps:[JobId(1006)] res:true
23350 Sep 22 23:14:28.266 INFO Current flush_numbers [0..12]: [0, 0]
23351 Sep 22 23:14:28.266 INFO Downstairs has completed Negotiation, task: proc
233522023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
233532023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
233542023-09-22T23:14:28.266ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
233552023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
233562023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
233572023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
233582023-09-22T23:14:28.266ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
233592023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
233602023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
233612023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
233622023-09-22T23:14:28.266ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
233632023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
233642023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:64713 task reports connection:true
233652023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitQuorum WaitQuorum WaitQuorum
233662023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
233672023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
233682023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
233692023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
233702023-09-22T23:14:28.266ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
233712023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
233722023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
233732023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
23374 The guest has finished waiting for activation
233752023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
233762023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Max found gen is 1
233772023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
233782023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Next flush: 1
233792023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): All extents match
233802023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): No downstairs repair required
233812023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): No initial repair work was required
233822023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
233832023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 is now active with session: ae2b86c4-327d-41e8-b04c-5803e0cf8d60
233842023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Set Active after no repair
233852023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
233862023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Set check for repair
233872023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:60775 task reports connection:true
233882023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Active Active Active
233892023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Set check for repair
233902023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:35797 task reports connection:true
233912023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Active Active Active
233922023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): Set check for repair
233932023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [0] received reconcile message
233942023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
233952023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
233962023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [1] received reconcile message
233972023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
233982023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
233992023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2] received reconcile message
234002023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
234012023-09-22T23:14:28.267ZINFOcrucible-pantry (dropshot): request completed latency_us = 3481 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = e6f4af19-4ca5-49d6-8f8c-d6d166ba5df7 response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
234022023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
234032023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): volume 78899c23-7806-4736-ba33-b5f432ab026e activated ok
234042023-09-22T23:14:28.267ZINFOcrucible-pantry (datafile): volume 78899c23-7806-4736-ba33-b5f432ab026e constructed and inserted ok
234052023-09-22T23:14:28.267ZINFOcrucible-pantry (dropshot): request completed latency_us = 7420 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = 19c572f7-03f0-43d4-bd63-d56ceab01e33 response_code = 200 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e
23406 Sep 22 23:14:28.269 DEBG Read :1017 deps:[JobId(1007)] res:true
234072023-09-22T23:14:28.269ZINFOcrucible-pantry (dropshot): request completed latency_us = 1450 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = b4807276-cc2f-443d-88e4-ebee284d6639 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23408 Sep 22 23:14:28.269 DEBG Read :1017 deps:[JobId(1007)] res:true
23409 Sep 22 23:14:28.270 DEBG Read :1017 deps:[JobId(1007)] res:true
23410 Sep 22 23:14:28.270 DEBG Write :1000 deps:[] res:true
23411 Sep 22 23:14:28.271 DEBG Write :1000 deps:[] res:true
23412 Sep 22 23:14:28.271 DEBG Write :1000 deps:[] res:true
234132023-09-22T23:14:28.272ZINFOcrucible-pantry (dropshot): request completed latency_us = 3548 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = fc4d52af-0e45-4704-9688-9261bf81c61d response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23414 Sep 22 23:14:28.273 INFO UUID: 353b066e-6f4d-4802-9336-0f9c4c2ea130
23415 Sep 22 23:14:28.273 INFO Blocks per extent:512 Total Extents: 188
23416 Sep 22 23:14:28.273 INFO Crucible Version: Crucible Version: 0.0.1
23417 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23418 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23419 rustc: 1.70.0 stable x86_64-unknown-illumos
23420 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23421 Sep 22 23:14:28.273 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23422 Sep 22 23:14:28.273 INFO Using address: 127.0.0.1:42762, task: main
234232023-09-22T23:14:28.273ZINFOcrucible-pantry (dropshot): request completed latency_us = 1397 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = de44c06f-8f20-42e9-9aec-536641baa80f response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23424 Sep 22 23:14:28.274 DEBG Read :1018 deps:[JobId(1008)] res:true
23425 Sep 22 23:14:28.274 INFO Repair listens on 127.0.0.1:0, task: repair
23426 Sep 22 23:14:28.274 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43667, task: repair
23427 Sep 22 23:14:28.274 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43667, task: repair
23428 Sep 22 23:14:28.274 DEBG Read :1018 deps:[JobId(1008)] res:true
23429 Sep 22 23:14:28.274 INFO listening, local_addr: 127.0.0.1:43667, task: repair
23430 Sep 22 23:14:28.274 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43667, task: repair
23431 Sep 22 23:14:28.274 DEBG Read :1018 deps:[JobId(1008)] res:true
23432 Sep 22 23:14:28.274 INFO Using repair address: 127.0.0.1:43667, task: main
23433 Sep 22 23:14:28.274 INFO No SSL acceptor configured, task: main
23434 Sep 22 23:14:28.274 DEBG Write :1001 deps:[] res:true
23435 Sep 22 23:14:28.274 DEBG Write :1001 deps:[] res:true
23436 Sep 22 23:14:28.275 DEBG Write :1001 deps:[] res:true
234372023-09-22T23:14:28.276ZINFOcrucible-pantry (dropshot): request completed latency_us = 3788 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = 2ffb3177-418d-4c22-83a8-4a21e9901ec7 response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
234382023-09-22T23:14:28.277ZINFOcrucible-pantry (dropshot): request completed latency_us = 1445 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = 8c0f2aa4-9126-4c3f-93d1-2a01202558d6 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23439 Sep 22 23:14:28.278 DEBG Write :1002 deps:[] res:true
23440 Sep 22 23:14:28.278 DEBG Write :1002 deps:[] res:true
23441 Sep 22 23:14:28.278 DEBG Read :1019 deps:[JobId(1009)] res:true
23442 Sep 22 23:14:28.278 DEBG Write :1002 deps:[] res:true
23443 Sep 22 23:14:28.278 DEBG Read :1019 deps:[JobId(1009)] res:true
23444 Sep 22 23:14:28.279 DEBG Read :1019 deps:[JobId(1009)] res:true
234452023-09-22T23:14:28.280ZINFOcrucible-pantry (dropshot): request completed latency_us = 1383 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = a8d9cb68-a0a0-4197-b8a4-37b3cb438873 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
234462023-09-22T23:14:28.280ZINFOcrucible-pantry (dropshot): request completed latency_us = 3657 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = a09ab62e-8daf-4a1a-b39a-e816bd60aa31 response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23447 Sep 22 23:14:28.281 DEBG Write :1003 deps:[] res:true
23448 Sep 22 23:14:28.282 DEBG Write :1003 deps:[] res:true
23449 Sep 22 23:14:28.282 DEBG Write :1003 deps:[] res:true
23450 Sep 22 23:14:28.283 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
23451 Sep 22 23:14:28.283 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
234522023-09-22T23:14:28.284ZINFOcrucible-pantry (dropshot): request completed latency_us = 1304 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = d8efcda3-7c48-4e3e-b200-b7b11e9cb39a response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23453 Sep 22 23:14:28.284 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
23454 Sep 22 23:14:28.285 DEBG Write :1004 deps:[] res:true
23455 Sep 22 23:14:28.285 DEBG Write :1004 deps:[] res:true
23456 Sep 22 23:14:28.286 DEBG Write :1004 deps:[] res:true
234572023-09-22T23:14:28.288ZINFOcrucible-pantry (dropshot): request completed latency_us = 1437 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = 357853c6-a4d0-4093-acd2-4d1a56b42f8e response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23458 Sep 22 23:14:28.289 DEBG Write :1005 deps:[] res:true
23459 Sep 22 23:14:28.289 DEBG Write :1005 deps:[] res:true
23460 Sep 22 23:14:28.290 DEBG Write :1005 deps:[] res:true
234612023-09-22T23:14:28.292ZINFOcrucible-pantry (dropshot): request completed latency_us = 1093 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = cd37d740-4ea4-43be-90cb-50f002e4d108 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23462 Sep 22 23:14:28.292 DEBG Write :1006 deps:[] res:true
23463 Sep 22 23:14:28.292 DEBG Write :1006 deps:[] res:true
23464 Sep 22 23:14:28.293 DEBG Write :1006 deps:[] res:true
234652023-09-22T23:14:28.294ZINFOcrucible-pantry (dropshot): request completed latency_us = 1099 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = f8b62fce-60f5-4dc2-b374-de3d17bd5aa5 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23466 Sep 22 23:14:28.295 DEBG Write :1007 deps:[] res:true
234672023-09-22T23:14:28.295ZINFOcrucible-pantry (dropshot): request completed latency_us = 13946 local_addr = 127.0.0.1:40823 method = POST remote_addr = 127.0.0.1:53528 req_id = d62a3f4f-b3fb-4434-bb77-20b0a7a8a18d response_code = 200 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40/bulk_read
23468 Sep 22 23:14:28.295 DEBG Write :1007 deps:[] res:true
23469 Sep 22 23:14:28.296 DEBG Write :1007 deps:[] res:true
234702023-09-22T23:14:28.296ZINFOcrucible-pantry (datafile): detach removing entry for volume 6fe510e3-7bac-480b-b94c-14525ec58a40
234712023-09-22T23:14:28.296ZINFOcrucible-pantry (datafile): detaching volume 6fe510e3-7bac-480b-b94c-14525ec58a40
234722023-09-22T23:14:28.297ZINFOcrucible-pantry (dropshot): request completed latency_us = 1041 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = 19414047-b377-4324-b1d4-9213ffd1493c response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23473 Sep 22 23:14:28.298 DEBG Write :1008 deps:[] res:true
23474 Sep 22 23:14:28.298 DEBG Write :1008 deps:[] res:true
23475 Sep 22 23:14:28.298 DEBG Write :1008 deps:[] res:true
23476 Sep 22 23:14:28.299 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23477 Sep 22 23:14:28.299 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23478 Sep 22 23:14:28.299 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
234792023-09-22T23:14:28.300ZINFOcrucible-pantry (datafile): Request to deactivate this guest
234802023-09-22T23:14:28.300ZINFOcrucible-pantry (datafile): b85efbb3-3a5a-4a31-b421-0eab80343c65 set deactivating.
234812023-09-22T23:14:28.300ZINFOcrucible-pantry (dropshot): request completed latency_us = 4163 local_addr = 127.0.0.1:40823 method = DELETE remote_addr = 127.0.0.1:53528 req_id = d3793d99-d440-407b-a757-f12e279e16cd response_code = 204 uri = /crucible/pantry/0/volume/6fe510e3-7bac-480b-b94c-14525ec58a40
234822023-09-22T23:14:28.300ZINFOcrucible-pantry (dropshot): request completed latency_us = 1128 local_addr = 127.0.0.1:48931 method = POST remote_addr = 127.0.0.1:35367 req_id = 905ef482-6214-4e25-9c4c-6772b9478fd6 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e/bulk_write
23483 Sep 22 23:14:28.301 DEBG Write :1009 deps:[] res:true
23484 Sep 22 23:14:28.301 DEBG Write :1009 deps:[] res:true
23485 Sep 22 23:14:28.301 DEBG Write :1009 deps:[] res:true
234862023-09-22T23:14:28.302ZINFOcrucible-pantry (datafile): detach removing entry for volume 78899c23-7806-4736-ba33-b5f432ab026e
234872023-09-22T23:14:28.302ZINFOcrucible-pantry (datafile): detaching volume 78899c23-7806-4736-ba33-b5f432ab026e
23488 Sep 22 23:14:28.306 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23489 Sep 22 23:14:28.306 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
23490 Sep 22 23:14:28.306 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
234912023-09-22T23:14:28.307ZINFOcrucible-pantry (datafile): Request to deactivate this guest
23492 Sep 22 23:14:28.307 INFO listening on 127.0.0.1:0, task: main
234932023-09-22T23:14:28.307ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 set deactivating.
234942023-09-22T23:14:28.307ZINFOcrucible-pantry (dropshot): request completed latency_us = 4992 local_addr = 127.0.0.1:48931 method = DELETE remote_addr = 127.0.0.1:35367 req_id = f9dc307e-8670-4496-8955-2f14a4cc4df3 response_code = 204 uri = /crucible/pantry/0/volume/78899c23-7806-4736-ba33-b5f432ab026e
23495 Sep 22 23:14:28.307 INFO current number of open files limit 65536 is already the maximum
23496 Sep 22 23:14:28.307 INFO Opened existing region file "/tmp/downstairs-SvxEiywd/region.json"
23497 Sep 22 23:14:28.307 INFO Upstairs starts
23498 Sep 22 23:14:28.307 INFO Database read version 1
23499 Sep 22 23:14:28.307 INFO Database write version 1
23500 Sep 22 23:14:28.307 INFO Crucible Version: BuildInfo {
23501 version: "0.0.1",
23502 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
23503 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
23504 git_branch: "main",
23505 rustc_semver: "1.70.0",
23506 rustc_channel: "stable",
23507 rustc_host_triple: "x86_64-unknown-illumos",
23508 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
23509 cargo_triple: "x86_64-unknown-illumos",
23510 debug: true,
23511 opt_level: 0,
23512 }
23513 Sep 22 23:14:28.307 INFO Upstairs <-> Downstairs Message Version: 4
23514 Sep 22 23:14:28.307 INFO Crucible stats registered with UUID: aa7cbd55-9585-444d-917b-34b9d1d5a423
23515 Sep 22 23:14:28.307 INFO Crucible aa7cbd55-9585-444d-917b-34b9d1d5a423 has session id: cc26b89c-1762-43aa-9f1e-89b721c949d3
23516 Sep 22 23:14:28.307 INFO [0] connecting to 127.0.0.1:64713, looper: 0
23517 Sep 22 23:14:28.308 INFO [1] connecting to 127.0.0.1:60775, looper: 1
23518 Sep 22 23:14:28.308 INFO [2] connecting to 127.0.0.1:35797, looper: 2
23519 Sep 22 23:14:28.308 INFO up_listen starts, task: up_listen
23520 Sep 22 23:14:28.308 INFO Wait for all three downstairs to come online
23521 Sep 22 23:14:28.308 INFO Flush timeout: 0.5
23522 Sep 22 23:14:28.308 INFO [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected, looper: 0
23523 Sep 22 23:14:28.308 INFO [0] Proc runs for 127.0.0.1:64713 in state New
23524 Sep 22 23:14:28.308 INFO [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected, looper: 1
23525 Sep 22 23:14:28.308 INFO [1] Proc runs for 127.0.0.1:60775 in state New
23526 Sep 22 23:14:28.308 INFO [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 looper connected, looper: 2
23527 test test::test_pantry_bulk_read ... okSep 22 23:14:28.308 INFO [2] Proc runs for 127.0.0.1:35797 in state New
23528 
23529 Sep 22 23:14:28.308 INFO accepted connection from 127.0.0.1:45873, task: main
23530 Sep 22 23:14:28.308 INFO accepted connection from 127.0.0.1:33997, task: main
23531 Sep 22 23:14:28.308 INFO accepted connection from 127.0.0.1:54269, task: main
23532 Sep 22 23:14:28.309 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23533 Sep 22 23:14:28.309 INFO current number of open files limit 65536 is already the maximum
23534 Sep 22 23:14:28.309 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } connected, version 4, task: proc
23535 Sep 22 23:14:28.309 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23536 Sep 22 23:14:28.309 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } connected, version 4, task: proc
23537 Sep 22 23:14:28.309 INFO Created new region file "/tmp/downstairs-XcMs9S9H/region.json"
23538 Sep 22 23:14:28.309 INFO Connection request from aa7cbd55-9585-444d-917b-34b9d1d5a423 with version 4, task: proc
23539 Sep 22 23:14:28.309 INFO upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } connected, version 4, task: proc
23540 Sep 22 23:14:28.309 INFO [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) New New New ds_transition to WaitActive
23541 Sep 22 23:14:28.309 INFO [0] Transition from New to WaitActive
23542 Sep 22 23:14:28.309 INFO [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) WaitActive New New ds_transition to WaitActive
23543 Sep 22 23:14:28.309 INFO [1] Transition from New to WaitActive
23544 Sep 22 23:14:28.309 INFO [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) WaitActive WaitActive New ds_transition to WaitActive
23545 Sep 22 23:14:28.309 INFO [2] Transition from New to WaitActive
23546 The guest has requested activation
23547 Sep 22 23:14:28.309 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 active request set
23548 Sep 22 23:14:28.309 INFO [0] received activate with gen 2
23549 Sep 22 23:14:28.309 INFO [0] client got ds_active_rx, promote! session 76885fcd-caf3-4c24-a3a9-04dbfcef7b58
23550 Sep 22 23:14:28.309 INFO [1] received activate with gen 2
23551 Sep 22 23:14:28.309 INFO [1] client got ds_active_rx, promote! session 76885fcd-caf3-4c24-a3a9-04dbfcef7b58
23552 Sep 22 23:14:28.310 INFO [2] received activate with gen 2
23553 Sep 22 23:14:28.310 INFO [2] client got ds_active_rx, promote! session 76885fcd-caf3-4c24-a3a9-04dbfcef7b58
23554 Sep 22 23:14:28.310 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 }
23555 Sep 22 23:14:28.310 WARN Signaling to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } thread that UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is being promoted (read-write)
23556 Sep 22 23:14:28.310 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is now active (read-write)
23557 Sep 22 23:14:28.310 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 }
23558 Sep 22 23:14:28.310 WARN Signaling to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } thread that UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is being promoted (read-write)
23559 Sep 22 23:14:28.310 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is now active (read-write)
23560 Sep 22 23:14:28.310 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 }
23561 Sep 22 23:14:28.310 WARN Signaling to UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 } thread that UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is being promoted (read-write)
23562 Sep 22 23:14:28.310 INFO UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } is now active (read-write)
23563 Sep 22 23:14:28.310 WARN Another upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 }, task: main
23564 Sep 22 23:14:28.310 INFO connection (127.0.0.1:59929): all done
23565 Sep 22 23:14:28.310 WARN Another upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 }, task: main
23566 Sep 22 23:14:28.310 INFO connection (127.0.0.1:32800): all done
23567 Sep 22 23:14:28.310 WARN Another upstairs UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: aa7cbd55-9585-444d-917b-34b9d1d5a423, session_id: ae2b86c4-327d-41e8-b04c-5803e0cf8d60, gen: 1 }, task: main
23568 Sep 22 23:14:28.310 INFO connection (127.0.0.1:55758): all done
235692023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) cmd_loop saw YouAreNoLongerActive aa7cbd55-9585-444d-917b-34b9d1d5a423 76885fcd-caf3-4c24-a3a9-04dbfcef7b58 2
235702023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) Active Active Active ds_transition to Disabled
235712023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
235722023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 set inactive, session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
235732023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): 127.0.0.1:64713: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
235742023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 Gone missing, transition from Disabled to Disconnected
235752023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 connection to 127.0.0.1:64713 closed looper = 0
235762023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) cmd_loop saw YouAreNoLongerActive aa7cbd55-9585-444d-917b-34b9d1d5a423 76885fcd-caf3-4c24-a3a9-04dbfcef7b58 2
235772023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) Disconnected Active Active ds_transition to Disabled
235782023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
235792023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 set inactive, session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
235802023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): 127.0.0.1:60775: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
235812023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 Gone missing, transition from Disabled to Disconnected
235822023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 connection to 127.0.0.1:60775 closed looper = 1
235832023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) cmd_loop saw YouAreNoLongerActive aa7cbd55-9585-444d-917b-34b9d1d5a423 76885fcd-caf3-4c24-a3a9-04dbfcef7b58 2
235842023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (ae2b86c4-327d-41e8-b04c-5803e0cf8d60) Disconnected Disconnected Active ds_transition to Disabled
235852023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
23586 Sep 22 23:14:28.311 INFO [0] downstairs client at 127.0.0.1:64713 has UUID 16035766-fcf1-4aff-8be5-4662118fbeba
23587 Sep 22 23:14:28.311 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 16035766-fcf1-4aff-8be5-4662118fbeba, encrypted: true, database_read_version: 1, database_write_version: 1 }
235882023-09-22T23:14:28.311ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 set inactive, session ae2b86c4-327d-41e8-b04c-5803e0cf8d60
23589 Sep 22 23:14:28.311 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
235902023-09-22T23:14:28.311ZERROcrucible-pantry (datafile): 127.0.0.1:35797: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
235912023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 Gone missing, transition from Disabled to Disconnected
23592 Sep 22 23:14:28.312 INFO [1] downstairs client at 127.0.0.1:60775 has UUID c1ff2b45-bb0b-4e95-bfb6-6e2ba62da531
23593 Sep 22 23:14:28.312 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: c1ff2b45-bb0b-4e95-bfb6-6e2ba62da531, encrypted: true, database_read_version: 1, database_write_version: 1 }
235942023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 connection to 127.0.0.1:35797 closed looper = 2
23595 Sep 22 23:14:28.312 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
235962023-09-22T23:14:28.312ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
235972023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:64713 task reports connection:false
23598 Sep 22 23:14:28.312 INFO [2] downstairs client at 127.0.0.1:35797 has UUID f77cd6ac-8353-429b-8c45-d7fd309de24d
23599 Sep 22 23:14:28.312 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f77cd6ac-8353-429b-8c45-d7fd309de24d, encrypted: true, database_read_version: 1, database_write_version: 1 }
236002023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Disconnected Disconnected Disconnected
23601 Sep 22 23:14:28.312 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitActive WaitActive WaitActive
236022023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:64713 task reports offline
236032023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:60775 task reports connection:false
236042023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Disconnected Disconnected Disconnected
23605 Sep 22 23:14:28.312 INFO Current flush_numbers [0..12]: [1, 1]
236062023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:60775 task reports offline
236072023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:35797 task reports connection:false
236082023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): aa7cbd55-9585-444d-917b-34b9d1d5a423 Disconnected Disconnected Disconnected
236092023-09-22T23:14:28.312ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:35797 task reports offline
236102023-09-22T23:14:28.312ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
236112023-09-22T23:14:28.312ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
23612 Sep 22 23:14:28.313 INFO Downstairs has completed Negotiation, task: proc
23613 Sep 22 23:14:28.313 INFO current number of open files limit 65536 is already the maximum
23614 Sep 22 23:14:28.313 INFO Opened existing region file "/tmp/downstairs-XcMs9S9H/region.json"
23615 Sep 22 23:14:28.313 INFO Database read version 1
23616 Sep 22 23:14:28.313 INFO Database write version 1
23617 Sep 22 23:14:28.313 INFO Current flush_numbers [0..12]: [1, 1]
23618 Sep 22 23:14:28.313 INFO Downstairs has completed Negotiation, task: proc
23619 Sep 22 23:14:28.313 INFO current number of open files limit 65536 is already the maximum
23620 Sep 22 23:14:28.313 INFO Current flush_numbers [0..12]: [1, 1]
23621 Sep 22 23:14:28.313 INFO Created new region file "/tmp/downstairs-kApwRwyr/region.json"
23622 Sep 22 23:14:28.314 INFO UUID: 46e0ee2f-0f49-4348-a954-78960194fdcd
23623 Sep 22 23:14:28.314 INFO Blocks per extent:5 Total Extents: 2
23624 Sep 22 23:14:28.314 INFO Downstairs has completed Negotiation, task: proc
23625 Sep 22 23:14:28.314 INFO Crucible Version: Crucible Version: 0.0.1
23626 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23627 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23628 rustc: 1.70.0 stable x86_64-unknown-illumos
23629 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23630 Sep 22 23:14:28.314 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23631 Sep 22 23:14:28.314 INFO Using address: 127.0.0.1:60580, task: main
23632 Sep 22 23:14:28.314 INFO [0] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
23633 Sep 22 23:14:28.314 INFO [0] Transition from WaitActive to WaitQuorum
23634 Sep 22 23:14:28.314 WARN [0] new RM replaced this: None
23635 Sep 22 23:14:28.314 INFO [0] Starts reconcile loop
23636 Sep 22 23:14:28.314 INFO Repair listens on 127.0.0.1:0, task: repair
23637 Sep 22 23:14:28.314 INFO [1] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
23638 Sep 22 23:14:28.314 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52587, task: repair
23639 Sep 22 23:14:28.314 INFO [1] Transition from WaitActive to WaitQuorum
23640 Sep 22 23:14:28.314 WARN [1] new RM replaced this: None
23641 Sep 22 23:14:28.314 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52587, task: repair
23642 Sep 22 23:14:28.314 INFO [1] Starts reconcile loop
23643 Sep 22 23:14:28.314 INFO listening, local_addr: 127.0.0.1:52587, task: repair
23644 Sep 22 23:14:28.314 INFO [2] aa7cbd55-9585-444d-917b-34b9d1d5a423 (76885fcd-caf3-4c24-a3a9-04dbfcef7b58) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
23645 Sep 22 23:14:28.314 INFO [2] Transition from WaitActive to WaitQuorum
23646 Sep 22 23:14:28.314 WARN [2] new RM replaced this: None
23647 Sep 22 23:14:28.314 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52587, task: repair
23648 Sep 22 23:14:28.315 INFO [2] Starts reconcile loop
23649 Sep 22 23:14:28.315 INFO Using repair address: 127.0.0.1:52587, task: main
23650 Sep 22 23:14:28.315 INFO No SSL acceptor configured, task: main
23651 Sep 22 23:14:28.315 INFO [0] 127.0.0.1:64713 task reports connection:true
23652 Sep 22 23:14:28.315 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 WaitQuorum WaitQuorum WaitQuorum
23653 Sep 22 23:14:28.315 INFO [0]R flush_numbers: [1, 1]
23654 Sep 22 23:14:28.315 INFO [0]R generation: [1, 1]
23655 Sep 22 23:14:28.315 INFO [0]R dirty: [false, false]
23656 Sep 22 23:14:28.315 INFO [1]R flush_numbers: [1, 1]
23657 Sep 22 23:14:28.315 INFO [1]R generation: [1, 1]
23658 Sep 22 23:14:28.315 INFO [1]R dirty: [false, false]
23659 Sep 22 23:14:28.315 INFO [2]R flush_numbers: [1, 1]
23660 Sep 22 23:14:28.315 INFO [2]R generation: [1, 1]
23661 Sep 22 23:14:28.315 INFO [2]R dirty: [false, false]
23662 Sep 22 23:14:28.315 INFO Max found gen is 2
23663 Sep 22 23:14:28.315 INFO Generation requested: 2 >= found:2
23664 Sep 22 23:14:28.315 INFO Next flush: 2
23665 Sep 22 23:14:28.315 INFO All extents match
23666 Sep 22 23:14:28.315 INFO No downstairs repair required
23667 Sep 22 23:14:28.315 INFO No initial repair work was required
23668 Sep 22 23:14:28.315 INFO Set Downstairs and Upstairs active
23669 Sep 22 23:14:28.315 INFO current number of open files limit 65536 is already the maximum
23670 Sep 22 23:14:28.315 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 is now active with session: 76885fcd-caf3-4c24-a3a9-04dbfcef7b58
23671 Sep 22 23:14:28.315 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 Set Active after no repair
23672 Sep 22 23:14:28.315 INFO Notify all downstairs, region set compare is done.
23673 Sep 22 23:14:28.315 INFO Created new region file "/tmp/downstairs-1NlQYu1c/region.json"
23674 Sep 22 23:14:28.315 INFO Set check for repair
23675 Sep 22 23:14:28.315 INFO [1] 127.0.0.1:60775 task reports connection:true
23676 Sep 22 23:14:28.315 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 Active Active Active
23677 Sep 22 23:14:28.315 INFO Set check for repair
23678 Sep 22 23:14:28.315 INFO [2] 127.0.0.1:35797 task reports connection:true
23679 Sep 22 23:14:28.315 INFO aa7cbd55-9585-444d-917b-34b9d1d5a423 Active Active Active
23680 Sep 22 23:14:28.315 INFO Set check for repair
23681 Sep 22 23:14:28.315 INFO [0] received reconcile message
23682 Sep 22 23:14:28.315 INFO [0] All repairs completed, exit
23683 Sep 22 23:14:28.315 INFO [0] Starts cmd_loop
23684 Sep 22 23:14:28.315 INFO [1] received reconcile message
23685 Sep 22 23:14:28.316 INFO [1] All repairs completed, exit
23686 Sep 22 23:14:28.316 INFO [1] Starts cmd_loop
23687 Sep 22 23:14:28.316 INFO [2] received reconcile message
23688 Sep 22 23:14:28.316 INFO [2] All repairs completed, exit
23689 Sep 22 23:14:28.316 INFO [2] Starts cmd_loop
23690 The guest has finished waiting for activation
23691 Sep 22 23:14:28.316 DEBG IO Read 1000 has deps []
23692 Sep 22 23:14:28.317 DEBG Read :1000 deps:[] res:true
23693 Sep 22 23:14:28.318 DEBG Read :1000 deps:[] res:true
23694 Sep 22 23:14:28.319 DEBG Read :1000 deps:[] res:true
23695 Sep 22 23:14:28.319 INFO current number of open files limit 65536 is already the maximum
23696 Sep 22 23:14:28.319 INFO Opened existing region file "/tmp/downstairs-1NlQYu1c/region.json"
23697 Sep 22 23:14:28.319 INFO Database read version 1
23698 Sep 22 23:14:28.319 INFO Database write version 1
23699 Sep 22 23:14:28.320 INFO UUID: 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936
23700 Sep 22 23:14:28.320 INFO Blocks per extent:5 Total Extents: 2
23701 Sep 22 23:14:28.321 INFO Crucible Version: Crucible Version: 0.0.1
23702 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23703 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23704 rustc: 1.70.0 stable x86_64-unknown-illumos
23705 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23706 Sep 22 23:14:28.321 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23707 Sep 22 23:14:28.321 INFO Using address: 127.0.0.1:59657, task: main
23708 Sep 22 23:14:28.321 INFO Repair listens on 127.0.0.1:0, task: repair
23709 Sep 22 23:14:28.321 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47357, task: repair
23710 Sep 22 23:14:28.321 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47357, task: repair
23711 Sep 22 23:14:28.321 INFO listening, local_addr: 127.0.0.1:47357, task: repair
23712 Sep 22 23:14:28.321 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47357, task: repair
23713 Sep 22 23:14:28.321 INFO Using repair address: 127.0.0.1:47357, task: main
23714 Sep 22 23:14:28.321 INFO No SSL acceptor configured, task: main
23715 Sep 22 23:14:28.321 INFO current number of open files limit 65536 is already the maximum
23716 Sep 22 23:14:28.321 INFO Created new region file "/tmp/downstairs-pDAr1mVj/region.json"
23717 Sep 22 23:14:28.323 DEBG [0] Read AckReady 1000, : downstairs
23718 Sep 22 23:14:28.325 INFO current number of open files limit 65536 is already the maximum
23719 Sep 22 23:14:28.325 INFO Opened existing region file "/tmp/downstairs-pDAr1mVj/region.json"
23720 Sep 22 23:14:28.325 INFO Database read version 1
23721 Sep 22 23:14:28.325 INFO Database write version 1
23722 Sep 22 23:14:28.325 DEBG [1] Read already AckReady 1000, : downstairs
23723 Sep 22 23:14:28.328 INFO UUID: 9706eaea-4b57-4d6f-860a-37f6ab1693f0
23724 Sep 22 23:14:28.328 INFO Blocks per extent:5 Total Extents: 2
23725 Sep 22 23:14:28.328 INFO Crucible Version: Crucible Version: 0.0.1
23726 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
23727 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
23728 rustc: 1.70.0 stable x86_64-unknown-illumos
23729 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
23730 Sep 22 23:14:28.328 INFO Upstairs <-> Downstairs Message Version: 4, task: main
23731 Sep 22 23:14:28.328 INFO Using address: 127.0.0.1:60352, task: main
23732 Sep 22 23:14:28.328 DEBG [2] Read already AckReady 1000, : downstairs
23733 Sep 22 23:14:28.328 DEBG up_ds_listen was notified
23734 Sep 22 23:14:28.328 DEBG up_ds_listen process 1000
23735 Sep 22 23:14:28.328 DEBG [A] ack job 1000:1, : downstairs
23736 Sep 22 23:14:28.328 INFO Repair listens on 127.0.0.1:0, task: repair
23737 Sep 22 23:14:28.328 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37633, task: repair
23738 Sep 22 23:14:28.328 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37633, task: repair
23739 Sep 22 23:14:28.328 INFO listening, local_addr: 127.0.0.1:37633, task: repair
23740 Sep 22 23:14:28.329 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37633, task: repair
23741 Sep 22 23:14:28.329 INFO Using repair address: 127.0.0.1:37633, task: main
23742 Sep 22 23:14:28.329 INFO No SSL acceptor configured, task: main
23743 Sep 22 23:14:28.329 DEBG up_ds_listen checked 1 jobs, back to waiting
23744 Sep 22 23:14:28.329 INFO Upstairs starts
23745 Sep 22 23:14:28.329 INFO Crucible Version: BuildInfo {
23746 version: "0.0.1",
23747 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
23748 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
23749 git_branch: "main",
23750 rustc_semver: "1.70.0",
23751 rustc_channel: "stable",
23752 rustc_host_triple: "x86_64-unknown-illumos",
23753 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
23754 cargo_triple: "x86_64-unknown-illumos",
23755 debug: true,
23756 opt_level: 0,
23757 }
23758 Sep 22 23:14:28.329 INFO Upstairs <-> Downstairs Message Version: 4
23759 Sep 22 23:14:28.329 INFO Crucible stats registered with UUID: 4aa9f481-1957-4d0f-b88c-338224283496
23760 Sep 22 23:14:28.329 INFO Crucible 4aa9f481-1957-4d0f-b88c-338224283496 has session id: 4055688a-4d6b-46d4-b6db-8d303b02a8c6
23761 Sep 22 23:14:28.329 INFO listening on 127.0.0.1:0, task: main
23762 Sep 22 23:14:28.329 INFO listening on 127.0.0.1:0, task: main
23763 Sep 22 23:14:28.329 INFO listening on 127.0.0.1:0, task: main
23764 Sep 22 23:14:28.329 INFO [0] connecting to 127.0.0.1:60580, looper: 0
23765 Sep 22 23:14:28.329 INFO [1] connecting to 127.0.0.1:59657, looper: 1
23766 Sep 22 23:14:28.329 INFO [2] connecting to 127.0.0.1:60352, looper: 2
23767 Sep 22 23:14:28.330 INFO up_listen starts, task: up_listen
23768 Sep 22 23:14:28.330 INFO Wait for all three downstairs to come online
23769 Sep 22 23:14:28.330 INFO Flush timeout: 0.5
23770 Sep 22 23:14:28.330 INFO accepted connection from 127.0.0.1:57962, task: main
23771 Sep 22 23:14:28.330 INFO accepted connection from 127.0.0.1:48077, task: main
23772 Sep 22 23:14:28.330 INFO accepted connection from 127.0.0.1:51233, task: main
23773 Sep 22 23:14:28.330 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 0
23774 Sep 22 23:14:28.330 INFO [0] Proc runs for 127.0.0.1:60580 in state New
23775 Sep 22 23:14:28.330 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 1
23776 Sep 22 23:14:28.330 INFO [1] Proc runs for 127.0.0.1:59657 in state New
23777 Sep 22 23:14:28.330 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 2
23778 Sep 22 23:14:28.330 INFO [2] Proc runs for 127.0.0.1:60352 in state New
23779 Sep 22 23:14:28.330 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23780 Sep 22 23:14:28.330 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } connected, version 4, task: proc
23781 Sep 22 23:14:28.331 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23782 Sep 22 23:14:28.331 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } connected, version 4, task: proc
23783 Sep 22 23:14:28.331 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23784 Sep 22 23:14:28.331 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } connected, version 4, task: proc
23785 Sep 22 23:14:28.331 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) New New New ds_transition to WaitActive
23786 Sep 22 23:14:28.331 INFO [0] Transition from New to WaitActive
23787 Sep 22 23:14:28.331 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) WaitActive New New ds_transition to WaitActive
23788 Sep 22 23:14:28.331 INFO [1] Transition from New to WaitActive
23789 Sep 22 23:14:28.331 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) WaitActive WaitActive New ds_transition to WaitActive
23790 Sep 22 23:14:28.331 INFO [2] Transition from New to WaitActive
23791 The guest has requested activation
23792 Sep 22 23:14:28.331 INFO 4aa9f481-1957-4d0f-b88c-338224283496 active request set
23793 Sep 22 23:14:28.331 INFO [0] received activate with gen 1
23794 Sep 22 23:14:28.331 INFO [0] client got ds_active_rx, promote! session 601739f4-d570-4d2f-845d-5eff25c7a668
23795 Sep 22 23:14:28.331 INFO [1] received activate with gen 1
23796 Sep 22 23:14:28.331 INFO [1] client got ds_active_rx, promote! session 601739f4-d570-4d2f-845d-5eff25c7a668
23797 Sep 22 23:14:28.331 INFO [2] received activate with gen 1
23798 Sep 22 23:14:28.331 INFO [2] client got ds_active_rx, promote! session 601739f4-d570-4d2f-845d-5eff25c7a668
23799 Sep 22 23:14:28.331 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } is now active (read-write)
23800 Sep 22 23:14:28.332 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } is now active (read-write)
23801 Sep 22 23:14:28.332 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } is now active (read-write)
23802 Sep 22 23:14:28.332 INFO [0] downstairs client at 127.0.0.1:60580 has UUID 46e0ee2f-0f49-4348-a954-78960194fdcd
23803 Sep 22 23:14:28.332 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 46e0ee2f-0f49-4348-a954-78960194fdcd, encrypted: true, database_read_version: 1, database_write_version: 1 }
23804 Sep 22 23:14:28.332 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
23805 Sep 22 23:14:28.332 INFO [1] downstairs client at 127.0.0.1:59657 has UUID 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936
23806 Sep 22 23:14:28.332 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936, encrypted: true, database_read_version: 1, database_write_version: 1 }
23807 Sep 22 23:14:28.332 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
23808 Sep 22 23:14:28.332 INFO [2] downstairs client at 127.0.0.1:60352 has UUID 9706eaea-4b57-4d6f-860a-37f6ab1693f0
23809 Sep 22 23:14:28.332 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9706eaea-4b57-4d6f-860a-37f6ab1693f0, encrypted: true, database_read_version: 1, database_write_version: 1 }
23810 Sep 22 23:14:28.332 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
23811 Sep 22 23:14:28.333 INFO Current flush_numbers [0..12]: [0, 0]
23812 Sep 22 23:14:28.333 INFO Downstairs has completed Negotiation, task: proc
23813 Sep 22 23:14:28.334 INFO Current flush_numbers [0..12]: [0, 0]
23814 Sep 22 23:14:28.335 INFO Downstairs has completed Negotiation, task: proc
23815 Sep 22 23:14:28.335 INFO Current flush_numbers [0..12]: [0, 0]
23816 Sep 22 23:14:28.335 INFO Downstairs has completed Negotiation, task: proc
23817 Sep 22 23:14:28.335 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
23818 Sep 22 23:14:28.335 INFO [0] Transition from WaitActive to WaitQuorum
23819 Sep 22 23:14:28.335 WARN [0] new RM replaced this: None
23820 Sep 22 23:14:28.335 INFO [0] Starts reconcile loop
23821 Sep 22 23:14:28.335 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
23822 Sep 22 23:14:28.336 INFO [1] Transition from WaitActive to WaitQuorum
23823 test test::test_pantry_bulk_write ... Sep 22 23:14:28.336 WARN [1] new RM replaced this: None
23824 ok
23825 Sep 22 23:14:28.336 INFO [1] Starts reconcile loop
23826 Sep 22 23:14:28.336 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
23827 Sep 22 23:14:28.336 INFO [2] Transition from WaitActive to WaitQuorum
23828 Sep 22 23:14:28.336 WARN [2] new RM replaced this: None
23829 Sep 22 23:14:28.336 INFO [2] Starts reconcile loop
23830 Sep 22 23:14:28.336 INFO current number of open files limit 65536 is already the maximum
23831 Sep 22 23:14:28.336 INFO [0] 127.0.0.1:60580 task reports connection:true
23832 Sep 22 23:14:28.336 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitQuorum WaitQuorum WaitQuorum
23833 Sep 22 23:14:28.336 INFO Created new region file "/tmp/downstairs-cOOWrUaJ/region.json"
23834 Sep 22 23:14:28.336 INFO [0]R flush_numbers: [0, 0]
23835 Sep 22 23:14:28.336 INFO [0]R generation: [0, 0]
23836 Sep 22 23:14:28.336 INFO [0]R dirty: [false, false]
23837 Sep 22 23:14:28.336 INFO [1]R flush_numbers: [0, 0]
23838 Sep 22 23:14:28.336 INFO [1]R generation: [0, 0]
23839 Sep 22 23:14:28.336 INFO [1]R dirty: [false, false]
23840 Sep 22 23:14:28.336 INFO [2]R flush_numbers: [0, 0]
23841 Sep 22 23:14:28.336 INFO [2]R generation: [0, 0]
23842 Sep 22 23:14:28.336 INFO [2]R dirty: [false, false]
23843 Sep 22 23:14:28.336 INFO Max found gen is 1
23844 Sep 22 23:14:28.336 INFO Generation requested: 1 >= found:1
23845 Sep 22 23:14:28.336 INFO Next flush: 1
23846 Sep 22 23:14:28.336 INFO All extents match
23847 Sep 22 23:14:28.336 INFO No downstairs repair required
23848 Sep 22 23:14:28.336 INFO No initial repair work was required
23849 Sep 22 23:14:28.336 INFO Set Downstairs and Upstairs active
23850 Sep 22 23:14:28.336 INFO 4aa9f481-1957-4d0f-b88c-338224283496 is now active with session: 601739f4-d570-4d2f-845d-5eff25c7a668
23851 Sep 22 23:14:28.336 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Set Active after no repair
23852 Sep 22 23:14:28.336 INFO Notify all downstairs, region set compare is done.
23853 Sep 22 23:14:28.336 INFO Set check for repair
23854 Sep 22 23:14:28.336 INFO [1] 127.0.0.1:59657 task reports connection:true
23855 Sep 22 23:14:28.336 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
23856 Sep 22 23:14:28.336 INFO Set check for repair
23857 Sep 22 23:14:28.336 INFO [2] 127.0.0.1:60352 task reports connection:true
23858 Sep 22 23:14:28.336 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
23859 Sep 22 23:14:28.336 INFO Set check for repair
23860 Sep 22 23:14:28.336 INFO [0] received reconcile message
23861 Sep 22 23:14:28.336 INFO [0] All repairs completed, exit
23862 Sep 22 23:14:28.336 INFO [0] Starts cmd_loop
23863 Sep 22 23:14:28.336 INFO [1] received reconcile message
23864 Sep 22 23:14:28.336 INFO [1] All repairs completed, exit
23865 Sep 22 23:14:28.336 INFO [1] Starts cmd_loop
23866 Sep 22 23:14:28.337 INFO [2] received reconcile message
23867 Sep 22 23:14:28.337 INFO [2] All repairs completed, exit
23868 Sep 22 23:14:28.337 INFO [2] Starts cmd_loop
23869 The guest has finished waiting for activation
23870 Sep 22 23:14:28.337 DEBG IO Read 1000 has deps []
23871 Sep 22 23:14:28.338 DEBG Read :1000 deps:[] res:true
23872 Sep 22 23:14:28.339 DEBG Read :1000 deps:[] res:true
23873 Sep 22 23:14:28.339 DEBG Read :1000 deps:[] res:true
23874 Sep 22 23:14:28.340 DEBG [0] Read AckReady 1000, : downstairs
23875 Sep 22 23:14:28.340 DEBG [1] Read already AckReady 1000, : downstairs
23876 Sep 22 23:14:28.340 DEBG [2] Read already AckReady 1000, : downstairs
23877 Sep 22 23:14:28.340 DEBG up_ds_listen was notified
23878 Sep 22 23:14:28.340 DEBG up_ds_listen process 1000
23879 Sep 22 23:14:28.340 DEBG [A] ack job 1000:1, : downstairs
23880 Sep 22 23:14:28.341 DEBG up_ds_listen checked 1 jobs, back to waiting
23881 Sep 22 23:14:28.341 INFO Request to deactivate this guest
23882 Sep 22 23:14:28.341 INFO 4aa9f481-1957-4d0f-b88c-338224283496 set deactivating.
23883 Sep 22 23:14:28.341 DEBG IO Flush 1001 has deps [JobId(1000)]
23884 Sep 22 23:14:28.341 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23885 Sep 22 23:14:28.341 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23886 Sep 22 23:14:28.341 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23887 Sep 22 23:14:28.341 INFO [0] check deactivate YES
23888 Sep 22 23:14:28.342 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) Active Active Active ds_transition to Deactivated
23889 Sep 22 23:14:28.342 INFO [0] Transition from Active to Deactivated
23890 Sep 22 23:14:28.342 INFO [1] check deactivate YES
23891 Sep 22 23:14:28.342 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) Deactivated Active Active ds_transition to Deactivated
23892 Sep 22 23:14:28.342 INFO [1] Transition from Active to Deactivated
23893 Sep 22 23:14:28.342 DEBG [2] deactivate flush 1001 done, : downstairs
23894 Sep 22 23:14:28.342 INFO [2] check deactivate YES
23895 Sep 22 23:14:28.342 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 (601739f4-d570-4d2f-845d-5eff25c7a668) Deactivated Deactivated Active ds_transition to Deactivated
23896 Sep 22 23:14:28.342 INFO [2] Transition from Active to Deactivated
23897 Sep 22 23:14:28.342 ERRO 127.0.0.1:60580: proc: [0] client work task ended, Ok(Err([0] exits after deactivation)), so we end too, looper: 0
23898 Sep 22 23:14:28.342 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Deactivated to New
23899 Sep 22 23:14:28.342 INFO deactivate transition checking...
23900 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23901 Sep 22 23:14:28.342 INFO deactivate_transition Deactivated NO
23902 Sep 22 23:14:28.342 INFO deactivate_transition Deactivated NO
23903 Sep 22 23:14:28.342 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:60580 closed, looper: 0
23904 Sep 22 23:14:28.342 ERRO 127.0.0.1:59657: proc: [1] client work task ended, Ok(Err([1] exits after deactivation)), so we end too, looper: 1
23905 Sep 22 23:14:28.342 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Deactivated to New
23906 Sep 22 23:14:28.342 INFO deactivate transition checking...
23907 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23908 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23909 Sep 22 23:14:28.342 INFO deactivate_transition Deactivated NO
23910 Sep 22 23:14:28.342 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:59657 closed, looper: 1
23911 Sep 22 23:14:28.342 DEBG up_ds_listen was notified
23912 Sep 22 23:14:28.342 DEBG up_ds_listen process 1001
23913 Sep 22 23:14:28.342 DEBG [A] ack job 1001:2, : downstairs
23914 Sep 22 23:14:28.342 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
23915 Sep 22 23:14:28.342 DEBG up_ds_listen checked 1 jobs, back to waiting
23916 Sep 22 23:14:28.342 ERRO 127.0.0.1:60352: proc: [2] client work task ended, Ok(Err([2] exits after deactivation)), so we end too, looper: 2
23917 Sep 22 23:14:28.342 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Deactivated to New
23918 Sep 22 23:14:28.342 INFO deactivate transition checking...
23919 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23920 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23921 Sep 22 23:14:28.342 INFO deactivate_transition New Maybe
23922 Sep 22 23:14:28.342 INFO All DS in the proper state! -> INIT
23923 Sep 22 23:14:28.342 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:60352 closed, looper: 2
23924 Sep 22 23:14:28.342 INFO [0] 127.0.0.1:60580 task reports connection:false
23925 Sep 22 23:14:28.342 INFO 4aa9f481-1957-4d0f-b88c-338224283496 New New New
23926 Sep 22 23:14:28.342 INFO [0] 127.0.0.1:60580 task reports offline
23927 Sep 22 23:14:28.342 INFO [1] 127.0.0.1:59657 task reports connection:false
23928 Sep 22 23:14:28.342 INFO 4aa9f481-1957-4d0f-b88c-338224283496 New New New
23929 Sep 22 23:14:28.342 INFO [1] 127.0.0.1:59657 task reports offline
23930 Sep 22 23:14:28.342 INFO [2] 127.0.0.1:60352 task reports connection:false
23931 Sep 22 23:14:28.342 INFO 4aa9f481-1957-4d0f-b88c-338224283496 New New New
23932 Sep 22 23:14:28.342 INFO [2] 127.0.0.1:60352 task reports offline
23933 note: configured to log to "/dev/stdout"
239342023-09-22T23:14:28.345ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:52411
239352023-09-22T23:14:28.345ZINFOcrucible-pantry: listen IP: 127.0.0.1:52411
23936 Sep 22 23:14:28.368 DEBG IO Flush 1001 has deps [JobId(1000)]
23937 Sep 22 23:14:28.370 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23938 Sep 22 23:14:28.370 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
23939 Sep 22 23:14:28.370 DEBG up_ds_listen was notified
23940 Sep 22 23:14:28.370 DEBG up_ds_listen process 1001
23941 Sep 22 23:14:28.370 DEBG [A] ack job 1001:2, : downstairs
23942 Sep 22 23:14:28.371 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
23943 Sep 22 23:14:28.371 DEBG up_ds_listen checked 1 jobs, back to waiting
23944 Sep 22 23:14:28.391 INFO current number of open files limit 65536 is already the maximum
23945 Sep 22 23:14:28.391 INFO Opened existing region file "/tmp/downstairs-6IEHRjPM/region.json"
23946 Sep 22 23:14:28.391 INFO Database read version 1
23947 Sep 22 23:14:28.391 INFO Database write version 1
23948 Sep 22 23:14:28.425 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } disconnected, 0 jobs left, task: main
23949 Sep 22 23:14:28.425 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } was previously active, clearing, task: main
23950 Sep 22 23:14:28.425 INFO connection (127.0.0.1:57962): all done
23951 Sep 22 23:14:28.425 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } disconnected, 0 jobs left, task: main
23952 Sep 22 23:14:28.426 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } was previously active, clearing, task: main
23953 Sep 22 23:14:28.426 INFO connection (127.0.0.1:51233): all done
23954 Sep 22 23:14:28.426 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } disconnected, 0 jobs left, task: main
23955 Sep 22 23:14:28.426 WARN upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 601739f4-d570-4d2f-845d-5eff25c7a668, gen: 1 } was previously active, clearing, task: main
23956 Sep 22 23:14:28.426 INFO connection (127.0.0.1:48077): all done
239572023-09-22T23:14:28.426ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:52411 remote_addr = 127.0.0.1:52249
239582023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): no entry exists for volume 7d24406e-7e52-4892-b53c-b8c4894a13ca, constructing...
239592023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): Upstairs starts
239602023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
239612023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
239622023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 4aa9f481-1957-4d0f-b88c-338224283496
239632023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): Crucible 4aa9f481-1957-4d0f-b88c-338224283496 has session id: 3adadc24-c69c-4056-8ffa-454bbc56b96f
239642023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:60580 looper = 0
239652023-09-22T23:14:28.427ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:59657 looper = 1
239662023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:60352 looper = 2
239672023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
239682023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
239692023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
239702023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): volume 7d24406e-7e52-4892-b53c-b8c4894a13ca constructed ok
23971 The guest has requested activation
239722023-09-22T23:14:28.428ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 active request set
23973 Sep 22 23:14:28.428 INFO accepted connection from 127.0.0.1:63586, task: main
23974 Sep 22 23:14:28.428 INFO accepted connection from 127.0.0.1:59585, task: main
23975 Sep 22 23:14:28.428 INFO accepted connection from 127.0.0.1:35200, task: main
239762023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected looper = 0
239772023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:60580 in state New
239782023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected looper = 1
239792023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:59657 in state New
239802023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected looper = 2
239812023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:60352 in state New
23982 Sep 22 23:14:28.429 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23983 Sep 22 23:14:28.429 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } connected, version 4, task: proc
23984 Sep 22 23:14:28.429 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23985 Sep 22 23:14:28.429 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } connected, version 4, task: proc
23986 Sep 22 23:14:28.429 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
23987 Sep 22 23:14:28.429 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } connected, version 4, task: proc
239882023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) New New New ds_transition to WaitActive
239892023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
239902023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 7f994951-798d-4ca9-84d4-c9557433fc1b
239912023-09-22T23:14:28.429ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) WaitActive New New ds_transition to WaitActive
239922023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
239932023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 7f994951-798d-4ca9-84d4-c9557433fc1b
239942023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) WaitActive WaitActive New ds_transition to WaitActive
239952023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
239962023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 7f994951-798d-4ca9-84d4-c9557433fc1b
23997 Sep 22 23:14:28.430 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } is now active (read-write)
23998 Sep 22 23:14:28.430 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } is now active (read-write)
23999 Sep 22 23:14:28.430 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } is now active (read-write)
240002023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:60580 has UUID 46e0ee2f-0f49-4348-a954-78960194fdcd
240012023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 46e0ee2f-0f49-4348-a954-78960194fdcd, encrypted: true, database_read_version: 1, database_write_version: 1 }
240022023-09-22T23:14:28.430ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
240032023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:59657 has UUID 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936
240042023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936, encrypted: true, database_read_version: 1, database_write_version: 1 }
240052023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
240062023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:60352 has UUID 9706eaea-4b57-4d6f-860a-37f6ab1693f0
240072023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9706eaea-4b57-4d6f-860a-37f6ab1693f0, encrypted: true, database_read_version: 1, database_write_version: 1 }
240082023-09-22T23:14:28.431ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
24009 Sep 22 23:14:28.431 INFO Current flush_numbers [0..12]: [0, 0]
24010 Sep 22 23:14:28.431 INFO Downstairs has completed Negotiation, task: proc
24011 Sep 22 23:14:28.431 INFO Current flush_numbers [0..12]: [0, 0]
24012 Sep 22 23:14:28.431 INFO Downstairs has completed Negotiation, task: proc
24013 Sep 22 23:14:28.432 INFO Current flush_numbers [0..12]: [0, 0]
24014 Sep 22 23:14:28.432 INFO Downstairs has completed Negotiation, task: proc
240152023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
240162023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
240172023-09-22T23:14:28.432ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
240182023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
240192023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
240202023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
240212023-09-22T23:14:28.432ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
240222023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
240232023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
240242023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
240252023-09-22T23:14:28.432ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
240262023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
240272023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:60580 task reports connection:true
240282023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 WaitQuorum WaitQuorum WaitQuorum
240292023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
240302023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
240312023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
240322023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
24033 The guest has finished waiting for activation
240342023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
240352023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
240362023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
240372023-09-22T23:14:28.432ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
240382023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
240392023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Max found gen is 1
240402023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
240412023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Next flush: 1
240422023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): All extents match
240432023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): No downstairs repair required
240442023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): No initial repair work was required
240452023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
240462023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 is now active with session: 7f994951-798d-4ca9-84d4-c9557433fc1b
240472023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Set Active after no repair
240482023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
240492023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Set check for repair
240502023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59657 task reports connection:true
240512023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
240522023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Set check for repair
240532023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60352 task reports connection:true
240542023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
240552023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): Set check for repair
240562023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [0] received reconcile message
240572023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
240582023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
240592023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [1] received reconcile message
240602023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
240612023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
240622023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [2] received reconcile message
240632023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
240642023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
240652023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): volume 7d24406e-7e52-4892-b53c-b8c4894a13ca activated ok
240662023-09-22T23:14:28.433ZINFOcrucible-pantry (datafile): volume 7d24406e-7e52-4892-b53c-b8c4894a13ca constructed and inserted ok
240672023-09-22T23:14:28.433ZINFOcrucible-pantry (dropshot): request completed latency_us = 6202 local_addr = 127.0.0.1:52411 method = POST remote_addr = 127.0.0.1:52249 req_id = ccc9200f-cdfc-4eec-bdd1-d6849c77d682 response_code = 200 uri = /crucible/pantry/0/volume/7d24406e-7e52-4892-b53c-b8c4894a13ca
240682023-09-22T23:14:28.434ZINFOcrucible-pantry (dropshot): request completed latency_us = 396 local_addr = 127.0.0.1:52411 method = POST remote_addr = 127.0.0.1:52249 req_id = 6f9c82fb-719d-49de-9513-7be45e883cf5 response_code = 200 uri = /crucible/pantry/0/volume/7d24406e-7e52-4892-b53c-b8c4894a13ca/import_from_url
24069 Sep 22 23:14:28.453 INFO current number of open files limit 65536 is already the maximum
24070 Sep 22 23:14:28.453 INFO Opened existing region file "/tmp/downstairs-57IWH8PC/region.json"
24071 Sep 22 23:14:28.453 INFO Database read version 1
24072 Sep 22 23:14:28.453 INFO Database write version 1
24073 Sep 22 23:14:28.532 DEBG Write :1000 deps:[] res:true
24074 Sep 22 23:14:28.533 DEBG Write :1000 deps:[] res:true
24075 Sep 22 23:14:28.534 DEBG Write :1000 deps:[] res:true
24076 Sep 22 23:14:28.536 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24077 Sep 22 23:14:28.536 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
24078 Sep 22 23:14:28.536 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
240792023-09-22T23:14:28.538ZINFOcrucible-pantry (dropshot): request completed latency_us = 13619 local_addr = 127.0.0.1:52411 method = GET remote_addr = 127.0.0.1:52249 req_id = b8b0a52f-d38c-4a65-94b8-cda1910c7ba1 response_code = 200 uri = /crucible/pantry/0/job/23606df9-c011-4c56-9be8-e905fe1ea6c9/ok
240802023-09-22T23:14:28.539ZINFOcrucible-pantry (datafile): detach removing entry for volume 7d24406e-7e52-4892-b53c-b8c4894a13ca
240812023-09-22T23:14:28.539ZINFOcrucible-pantry (datafile): detaching volume 7d24406e-7e52-4892-b53c-b8c4894a13ca
24082 Sep 22 23:14:28.539 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
24083 Sep 22 23:14:28.539 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
24084 Sep 22 23:14:28.539 DEBG Flush :1002 extent_limit None deps:[] res:true f:2 g:1
240852023-09-22T23:14:28.540ZINFOcrucible-pantry (datafile): Request to deactivate this guest
240862023-09-22T23:14:28.540ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 set deactivating.
240872023-09-22T23:14:28.540ZINFOcrucible-pantry (dropshot): request completed latency_us = 1882 local_addr = 127.0.0.1:52411 method = DELETE remote_addr = 127.0.0.1:52249 req_id = cc521b2c-c243-46d4-b701-ffef569983d0 response_code = 204 uri = /crucible/pantry/0/volume/7d24406e-7e52-4892-b53c-b8c4894a13ca
24088 Sep 22 23:14:28.541 INFO Upstairs starts
24089 Sep 22 23:14:28.541 INFO Crucible Version: BuildInfo {
24090 version: "0.0.1",
24091 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24092 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24093 git_branch: "main",
24094 rustc_semver: "1.70.0",
24095 rustc_channel: "stable",
24096 rustc_host_triple: "x86_64-unknown-illumos",
24097 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24098 cargo_triple: "x86_64-unknown-illumos",
24099 debug: true,
24100 opt_level: 0,
24101 }
24102 Sep 22 23:14:28.541 INFO Upstairs <-> Downstairs Message Version: 4
24103 Sep 22 23:14:28.541 INFO Crucible stats registered with UUID: 4aa9f481-1957-4d0f-b88c-338224283496
24104 Sep 22 23:14:28.541 INFO Crucible 4aa9f481-1957-4d0f-b88c-338224283496 has session id: 5347d05f-d3e8-4591-83a5-f28549ba8ac1
24105 Sep 22 23:14:28.541 INFO [0] connecting to 127.0.0.1:60580, looper: 0
24106 Sep 22 23:14:28.541 INFO [1] connecting to 127.0.0.1:59657, looper: 1
24107 Sep 22 23:14:28.541 INFO [2] connecting to 127.0.0.1:60352, looper: 2
24108 Sep 22 23:14:28.541 INFO up_listen starts, task: up_listen
24109 Sep 22 23:14:28.541 INFO Wait for all three downstairs to come online
24110 Sep 22 23:14:28.541 INFO Flush timeout: 0.5
24111 Sep 22 23:14:28.541 INFO accepted connection from 127.0.0.1:55344, task: main
24112 Sep 22 23:14:28.542 INFO accepted connection from 127.0.0.1:57999, task: main
24113 Sep 22 23:14:28.542 INFO accepted connection from 127.0.0.1:46059, task: main
24114 Sep 22 23:14:28.542 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 0
24115 Sep 22 23:14:28.542 INFO [0] Proc runs for 127.0.0.1:60580 in state New
24116 Sep 22 23:14:28.542 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 1
24117 Sep 22 23:14:28.542 INFO [1] Proc runs for 127.0.0.1:59657 in state New
24118 Sep 22 23:14:28.542 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 looper connected, looper: 2
24119 Sep 22 23:14:28.542 INFO [2] Proc runs for 127.0.0.1:60352 in state New
24120 Sep 22 23:14:28.542 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
24121 Sep 22 23:14:28.542 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } connected, version 4, task: proc
24122 Sep 22 23:14:28.542 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
24123 Sep 22 23:14:28.542 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } connected, version 4, task: proc
24124 Sep 22 23:14:28.543 INFO Connection request from 4aa9f481-1957-4d0f-b88c-338224283496 with version 4, task: proc
24125 Sep 22 23:14:28.543 INFO upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } connected, version 4, task: proc
24126 Sep 22 23:14:28.543 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) New New New ds_transition to WaitActive
24127 Sep 22 23:14:28.543 INFO [0] Transition from New to WaitActive
24128 Sep 22 23:14:28.543 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) WaitActive New New ds_transition to WaitActive
24129 Sep 22 23:14:28.543 INFO [1] Transition from New to WaitActive
24130 Sep 22 23:14:28.543 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) WaitActive WaitActive New ds_transition to WaitActive
24131 Sep 22 23:14:28.543 INFO [2] Transition from New to WaitActive
24132 The guest has requested activation
24133 Sep 22 23:14:28.543 INFO 4aa9f481-1957-4d0f-b88c-338224283496 active request set
24134 Sep 22 23:14:28.543 INFO [0] received activate with gen 3
24135 Sep 22 23:14:28.543 INFO [0] client got ds_active_rx, promote! session e1a9869f-30e8-4dcc-a2a4-de17a7736d6e
24136 Sep 22 23:14:28.543 INFO [1] received activate with gen 3
24137 Sep 22 23:14:28.543 INFO [1] client got ds_active_rx, promote! session e1a9869f-30e8-4dcc-a2a4-de17a7736d6e
24138 Sep 22 23:14:28.543 INFO [2] received activate with gen 3
24139 Sep 22 23:14:28.543 INFO [2] client got ds_active_rx, promote! session e1a9869f-30e8-4dcc-a2a4-de17a7736d6e
24140 Sep 22 23:14:28.544 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 }
24141 Sep 22 23:14:28.544 WARN Signaling to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } thread that UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is being promoted (read-write)
24142 Sep 22 23:14:28.544 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is now active (read-write)
24143 Sep 22 23:14:28.544 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 }
24144 Sep 22 23:14:28.544 WARN Signaling to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } thread that UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is being promoted (read-write)
24145 Sep 22 23:14:28.544 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is now active (read-write)
24146 Sep 22 23:14:28.544 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 }
24147 Sep 22 23:14:28.544 WARN Signaling to UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 } thread that UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is being promoted (read-write)
24148 Sep 22 23:14:28.544 INFO UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } is now active (read-write)
24149 Sep 22 23:14:28.544 WARN Another upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 }, task: main
24150 Sep 22 23:14:28.544 INFO connection (127.0.0.1:63586): all done
24151 Sep 22 23:14:28.544 WARN Another upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 }, task: main
24152 Sep 22 23:14:28.544 INFO connection (127.0.0.1:59585): all done
24153 Sep 22 23:14:28.544 WARN Another upstairs UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 4aa9f481-1957-4d0f-b88c-338224283496, session_id: 7f994951-798d-4ca9-84d4-c9557433fc1b, gen: 1 }, task: main
24154 Sep 22 23:14:28.544 INFO connection (127.0.0.1:35200): all done
241552023-09-22T23:14:28.545ZERROcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) cmd_loop saw YouAreNoLongerActive 4aa9f481-1957-4d0f-b88c-338224283496 e1a9869f-30e8-4dcc-a2a4-de17a7736d6e 3
241562023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) Active Active Active ds_transition to Disabled
241572023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
241582023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 set inactive, session 7f994951-798d-4ca9-84d4-c9557433fc1b
241592023-09-22T23:14:28.545ZERROcrucible-pantry (datafile): 127.0.0.1:60580: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 0
241602023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Disabled to Disconnected
241612023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [0] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:60580 closed looper = 0
241622023-09-22T23:14:28.545ZERROcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) cmd_loop saw YouAreNoLongerActive 4aa9f481-1957-4d0f-b88c-338224283496 e1a9869f-30e8-4dcc-a2a4-de17a7736d6e 3
241632023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) Disconnected Active Active ds_transition to Disabled
241642023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
241652023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 set inactive, session 7f994951-798d-4ca9-84d4-c9557433fc1b
241662023-09-22T23:14:28.545ZERROcrucible-pantry (datafile): 127.0.0.1:59657: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 1
241672023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Disabled to Disconnected
241682023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [1] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:59657 closed looper = 1
241692023-09-22T23:14:28.545ZERROcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) cmd_loop saw YouAreNoLongerActive 4aa9f481-1957-4d0f-b88c-338224283496 e1a9869f-30e8-4dcc-a2a4-de17a7736d6e 3
241702023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 (7f994951-798d-4ca9-84d4-c9557433fc1b) Disconnected Disconnected Active ds_transition to Disabled
241712023-09-22T23:14:28.545ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
241722023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 set inactive, session 7f994951-798d-4ca9-84d4-c9557433fc1b
241732023-09-22T23:14:28.546ZERROcrucible-pantry (datafile): 127.0.0.1:60352: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 1 looper = 2
241742023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 Gone missing, transition from Disabled to Disconnected
241752023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [2] 4aa9f481-1957-4d0f-b88c-338224283496 connection to 127.0.0.1:60352 closed looper = 2
241762023-09-22T23:14:28.546ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
241772023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:60580 task reports connection:false
241782023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Disconnected Disconnected Disconnected
241792023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:60580 task reports offline
241802023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59657 task reports connection:false
24181 Sep 22 23:14:28.546 INFO [0] downstairs client at 127.0.0.1:60580 has UUID 46e0ee2f-0f49-4348-a954-78960194fdcd
241822023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Disconnected Disconnected Disconnected
24183 Sep 22 23:14:28.546 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 46e0ee2f-0f49-4348-a954-78960194fdcd, encrypted: true, database_read_version: 1, database_write_version: 1 }
241842023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:59657 task reports offline
24185 Sep 22 23:14:28.546 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
241862023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60352 task reports connection:false
241872023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): 4aa9f481-1957-4d0f-b88c-338224283496 Disconnected Disconnected Disconnected
24188 Sep 22 23:14:28.546 INFO [1] downstairs client at 127.0.0.1:59657 has UUID 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936
241892023-09-22T23:14:28.546ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60352 task reports offline
24190 Sep 22 23:14:28.546 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 3b4e7e40-734f-4eaa-bb4c-32e1ad5c2936, encrypted: true, database_read_version: 1, database_write_version: 1 }
241912023-09-22T23:14:28.546ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
24192 Sep 22 23:14:28.546 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
241932023-09-22T23:14:28.546ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
24194 Sep 22 23:14:28.546 INFO [2] downstairs client at 127.0.0.1:60352 has UUID 9706eaea-4b57-4d6f-860a-37f6ab1693f0
24195 Sep 22 23:14:28.546 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 9706eaea-4b57-4d6f-860a-37f6ab1693f0, encrypted: true, database_read_version: 1, database_write_version: 1 }
24196 Sep 22 23:14:28.546 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitActive WaitActive WaitActive
24197 Sep 22 23:14:28.546 INFO Current flush_numbers [0..12]: [1, 1]
24198 Sep 22 23:14:28.547 INFO Downstairs has completed Negotiation, task: proc
24199 Sep 22 23:14:28.547 INFO Current flush_numbers [0..12]: [1, 1]
24200 Sep 22 23:14:28.548 INFO Downstairs has completed Negotiation, task: proc
24201 Sep 22 23:14:28.548 INFO Current flush_numbers [0..12]: [1, 1]
24202 Sep 22 23:14:28.548 INFO Downstairs has completed Negotiation, task: proc
24203 Sep 22 23:14:28.548 INFO [0] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
24204 Sep 22 23:14:28.548 INFO [0] Transition from WaitActive to WaitQuorum
24205 Sep 22 23:14:28.548 WARN [0] new RM replaced this: None
24206 Sep 22 23:14:28.548 INFO [0] Starts reconcile loop
24207 Sep 22 23:14:28.548 INFO [1] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
24208 Sep 22 23:14:28.548 INFO [1] Transition from WaitActive to WaitQuorum
24209 Sep 22 23:14:28.548 WARN [1] new RM replaced this: None
24210 Sep 22 23:14:28.548 INFO [1] Starts reconcile loop
24211 Sep 22 23:14:28.548 INFO [2] 4aa9f481-1957-4d0f-b88c-338224283496 (e1a9869f-30e8-4dcc-a2a4-de17a7736d6e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
24212 Sep 22 23:14:28.548 INFO [2] Transition from WaitActive to WaitQuorum
24213 Sep 22 23:14:28.548 WARN [2] new RM replaced this: None
24214 Sep 22 23:14:28.548 INFO [2] Starts reconcile loop
24215 Sep 22 23:14:28.548 INFO [0] 127.0.0.1:60580 task reports connection:true
24216 Sep 22 23:14:28.548 INFO 4aa9f481-1957-4d0f-b88c-338224283496 WaitQuorum WaitQuorum WaitQuorum
24217 Sep 22 23:14:28.548 INFO [0]R flush_numbers: [1, 1]
24218 Sep 22 23:14:28.548 INFO [0]R generation: [1, 1]
24219 Sep 22 23:14:28.548 INFO [0]R dirty: [false, false]
24220 Sep 22 23:14:28.549 INFO [1]R flush_numbers: [1, 1]
24221 Sep 22 23:14:28.549 INFO [1]R generation: [1, 1]
24222 Sep 22 23:14:28.549 INFO [1]R dirty: [false, false]
24223 Sep 22 23:14:28.549 INFO [2]R flush_numbers: [1, 1]
24224 Sep 22 23:14:28.549 INFO [2]R generation: [1, 1]
24225 Sep 22 23:14:28.549 INFO [2]R dirty: [false, false]
24226 Sep 22 23:14:28.549 INFO Max found gen is 2
24227 Sep 22 23:14:28.549 INFO Generation requested: 3 >= found:2
24228 Sep 22 23:14:28.549 INFO Next flush: 2
24229 Sep 22 23:14:28.549 INFO All extents match
24230 Sep 22 23:14:28.549 INFO No downstairs repair required
24231 Sep 22 23:14:28.549 INFO No initial repair work was required
24232 Sep 22 23:14:28.549 INFO Set Downstairs and Upstairs active
24233 Sep 22 23:14:28.549 INFO 4aa9f481-1957-4d0f-b88c-338224283496 is now active with session: e1a9869f-30e8-4dcc-a2a4-de17a7736d6e
24234 Sep 22 23:14:28.549 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Set Active after no repair
24235 Sep 22 23:14:28.549 INFO Notify all downstairs, region set compare is done.
24236 Sep 22 23:14:28.549 INFO Set check for repair
24237 Sep 22 23:14:28.549 INFO [1] 127.0.0.1:59657 task reports connection:true
24238 Sep 22 23:14:28.549 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
24239 Sep 22 23:14:28.549 INFO Set check for repair
24240 Sep 22 23:14:28.549 INFO [2] 127.0.0.1:60352 task reports connection:true
24241 Sep 22 23:14:28.549 INFO 4aa9f481-1957-4d0f-b88c-338224283496 Active Active Active
24242 Sep 22 23:14:28.549 INFO Set check for repair
24243 Sep 22 23:14:28.549 INFO [0] received reconcile message
24244 Sep 22 23:14:28.549 INFO [0] All repairs completed, exit
24245 Sep 22 23:14:28.549 INFO [0] Starts cmd_loop
24246 Sep 22 23:14:28.549 INFO [1] received reconcile message
24247 Sep 22 23:14:28.549 INFO [1] All repairs completed, exit
24248 Sep 22 23:14:28.549 INFO [1] Starts cmd_loop
24249 Sep 22 23:14:28.549 INFO [2] received reconcile message
24250 Sep 22 23:14:28.549 INFO [2] All repairs completed, exit
24251 Sep 22 23:14:28.549 INFO [2] Starts cmd_loop
24252 The guest has finished waiting for activation
24253 Sep 22 23:14:28.550 DEBG IO Read 1000 has deps []
24254 Sep 22 23:14:28.551 DEBG Read :1000 deps:[] res:true
24255 Sep 22 23:14:28.551 DEBG Read :1000 deps:[] res:true
24256 Sep 22 23:14:28.551 DEBG Read :1000 deps:[] res:true
24257 Sep 22 23:14:28.555 DEBG [0] Read AckReady 1000, : downstairs
24258 Sep 22 23:14:28.558 DEBG [1] Read already AckReady 1000, : downstairs
24259 Sep 22 23:14:28.560 DEBG [2] Read already AckReady 1000, : downstairs
24260 Sep 22 23:14:28.560 DEBG up_ds_listen was notified
24261 Sep 22 23:14:28.560 DEBG up_ds_listen process 1000
24262 Sep 22 23:14:28.561 DEBG [A] ack job 1000:1, : downstairs
24263 Sep 22 23:14:28.561 DEBG up_ds_listen checked 1 jobs, back to waiting
24264 test test::test_pantry_import_from_local_server ... ok
24265 Sep 22 23:14:28.572 INFO current number of open files limit 65536 is already the maximum
24266 Sep 22 23:14:28.572 INFO Created new region file "/tmp/downstairs-Xs2g0Vs2/region.json"
24267 Sep 22 23:14:28.617 INFO Checking if live repair is needed
24268 Sep 22 23:14:28.617 INFO No Live Repair required at this time
24269 Sep 22 23:14:28.636 INFO accepted connection from 127.0.0.1:56661, task: main
24270 Sep 22 23:14:28.636 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db looper connected, looper: 0
24271 Sep 22 23:14:28.636 INFO [0] Proc runs for 127.0.0.1:62845 in state Replaced
24272 Sep 22 23:14:28.637 INFO Connection request from a50fc3fb-3de2-4743-9c50-cc80cfba77db with version 4, task: proc
24273 Sep 22 23:14:28.637 INFO upstairs UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } connected, version 4, task: proc
24274 Sep 22 23:14:28.637 INFO [0] upstairs guest_io_ready=TRUE, promote! session 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8
24275 Sep 22 23:14:28.637 INFO UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } is now active (read-write)
24276 Sep 22 23:14:28.637 INFO [0] downstairs client at 127.0.0.1:62845 has UUID ae376576-6754-4185-9b1d-d23a94866419
24277 Sep 22 23:14:28.637 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: ae376576-6754-4185-9b1d-d23a94866419, encrypted: true, database_read_version: 1, database_write_version: 1 }
24278 Sep 22 23:14:28.637 WARN [0] replace downstairs uuid:122e249d-ed33-49c6-bb2e-b79a59ebf9a6 with ae376576-6754-4185-9b1d-d23a94866419
24279 Sep 22 23:14:28.637 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db Replaced Active Active
24280 Sep 22 23:14:28.637 INFO Current flush_numbers [0..12]: [0, 0]
24281 Sep 22 23:14:28.638 INFO Downstairs has completed Negotiation, task: proc
24282 Sep 22 23:14:28.638 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) Replaced Active Active ds_transition to LiveRepairReady
24283 Sep 22 23:14:28.638 INFO [0] Transition from Replaced to LiveRepairReady
24284 Sep 22 23:14:28.638 WARN [0] new RM replaced this: None
24285 Sep 22 23:14:28.638 WARN [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db Enter Ready for LiveRepair mode
24286 Sep 22 23:14:28.638 INFO [0] Starts cmd_loop
24287 Sep 22 23:14:28.638 INFO [0] 127.0.0.1:62845 task reports connection:true
24288 Sep 22 23:14:28.638 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db LiveRepairReady Active Active
24289 Sep 22 23:14:28.638 INFO Set check for repair
24290 Sep 22 23:14:28.681 INFO UUID: d2a767c4-8e80-4f99-8f17-2410bd865538
24291 Sep 22 23:14:28.681 INFO Blocks per extent:512 Total Extents: 188
24292 Sep 22 23:14:28.681 INFO Crucible Version: Crucible Version: 0.0.1
24293 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24294 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24295 rustc: 1.70.0 stable x86_64-unknown-illumos
24296 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24297 Sep 22 23:14:28.681 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24298 Sep 22 23:14:28.681 INFO Using address: 127.0.0.1:59093, task: main
24299 Sep 22 23:14:28.682 INFO Repair listens on 127.0.0.1:0, task: repair
24300 Sep 22 23:14:28.682 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36653, task: repair
24301 Sep 22 23:14:28.682 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36653, task: repair
24302 Sep 22 23:14:28.682 INFO listening, local_addr: 127.0.0.1:36653, task: repair
24303 Sep 22 23:14:28.682 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36653, task: repair
24304 Sep 22 23:14:28.682 INFO Using repair address: 127.0.0.1:36653, task: main
24305 Sep 22 23:14:28.682 INFO No SSL acceptor configured, task: main
24306 Sep 22 23:14:28.688 INFO listening on 127.0.0.1:0, task: main
24307 Sep 22 23:14:28.688 INFO current number of open files limit 65536 is already the maximum
24308 Sep 22 23:14:28.688 INFO Opened existing region file "/tmp/downstairs-kApwRwyr/region.json"
24309 Sep 22 23:14:28.688 INFO Database read version 1
24310 Sep 22 23:14:28.688 INFO Database write version 1
24311 Sep 22 23:14:28.711 INFO current number of open files limit 65536 is already the maximum
24312 Sep 22 23:14:28.711 INFO Opened existing region file "/tmp/downstairs-cOOWrUaJ/region.json"
24313 Sep 22 23:14:28.711 INFO Database read version 1
24314 Sep 22 23:14:28.711 INFO Database write version 1
24315 Sep 22 23:14:28.756 INFO current number of open files limit 65536 is already the maximum
24316 Sep 22 23:14:28.756 INFO Created new region file "/tmp/downstairs-i41xVTeA/region.json"
24317 Sep 22 23:14:28.756 INFO UUID: 10109090-7e8f-489a-9e4a-65ce6167d00b
24318 Sep 22 23:14:28.756 INFO Blocks per extent:512 Total Extents: 188
24319 Sep 22 23:14:28.756 INFO Crucible Version: Crucible Version: 0.0.1
24320 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24321 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24322 rustc: 1.70.0 stable x86_64-unknown-illumos
24323 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24324 Sep 22 23:14:28.756 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24325 Sep 22 23:14:28.756 INFO Using address: 127.0.0.1:62776, task: main
24326 Sep 22 23:14:28.757 INFO Repair listens on 127.0.0.1:0, task: repair
24327 Sep 22 23:14:28.757 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:61515, task: repair
24328 Sep 22 23:14:28.757 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:61515, task: repair
24329 Sep 22 23:14:28.757 INFO listening, local_addr: 127.0.0.1:61515, task: repair
24330 Sep 22 23:14:28.757 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:61515, task: repair
24331 Sep 22 23:14:28.757 INFO Using repair address: 127.0.0.1:61515, task: main
24332 Sep 22 23:14:28.757 INFO No SSL acceptor configured, task: main
24333 Sep 22 23:14:28.804 INFO UUID: bec31c6c-c0ce-43db-8467-8d94b73527a2
24334 Sep 22 23:14:28.805 INFO Blocks per extent:512 Total Extents: 188
24335 Sep 22 23:14:28.805 INFO Crucible Version: Crucible Version: 0.0.1
24336 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24337 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24338 rustc: 1.70.0 stable x86_64-unknown-illumos
24339 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24340 Sep 22 23:14:28.805 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24341 Sep 22 23:14:28.805 INFO Using address: 127.0.0.1:62296, task: main
24342 Sep 22 23:14:28.805 INFO Repair listens on 127.0.0.1:0, task: repair
24343 Sep 22 23:14:28.805 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51430, task: repair
24344 Sep 22 23:14:28.805 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51430, task: repair
24345 Sep 22 23:14:28.805 INFO listening, local_addr: 127.0.0.1:51430, task: repair
24346 Sep 22 23:14:28.805 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51430, task: repair
24347 Sep 22 23:14:28.805 INFO Using repair address: 127.0.0.1:51430, task: main
24348 Sep 22 23:14:28.805 INFO No SSL acceptor configured, task: main
24349 Sep 22 23:14:28.821 INFO current number of open files limit 65536 is already the maximum
24350 Sep 22 23:14:28.821 INFO Created new region file "/tmp/downstairs-OiIt4Fiz/region.json"
24351 Sep 22 23:14:28.857 INFO current number of open files limit 65536 is already the maximum
24352 Sep 22 23:14:28.857 INFO Opened existing region file "/tmp/downstairs-Xs2g0Vs2/region.json"
24353 Sep 22 23:14:28.857 INFO Database read version 1
24354 Sep 22 23:14:28.857 INFO Database write version 1
24355 Sep 22 23:14:28.866 INFO current number of open files limit 65536 is already the maximum
24356 Sep 22 23:14:28.866 INFO Created new region file "/tmp/downstairs-H2YCEjAN/region.json"
24357 Sep 22 23:14:28.874 INFO Checking if live repair is needed
24358 Sep 22 23:14:28.874 INFO No Live Repair required at this time
24359 Sep 22 23:14:28.890 INFO accepted connection from 127.0.0.1:35498, task: main
24360 Sep 22 23:14:28.890 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 looper connected, looper: 0
24361 Sep 22 23:14:28.890 INFO [0] Proc runs for 127.0.0.1:56689 in state Replaced
24362 Sep 22 23:14:28.890 INFO Connection request from e94af85e-3796-4fab-91a1-f12add9c3020 with version 4, task: proc
24363 Sep 22 23:14:28.890 INFO upstairs UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } connected, version 4, task: proc
24364 Sep 22 23:14:28.890 INFO [0] upstairs guest_io_ready=TRUE, promote! session a86ae319-11ad-4b2d-a80b-330efc92ebef
24365 Sep 22 23:14:28.890 INFO UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } is now active (read-write)
24366 Sep 22 23:14:28.891 INFO [0] downstairs client at 127.0.0.1:56689 has UUID b7c0d6d3-333f-4423-864b-ebd0befef183
24367 Sep 22 23:14:28.891 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b7c0d6d3-333f-4423-864b-ebd0befef183, encrypted: true, database_read_version: 1, database_write_version: 1 }
24368 Sep 22 23:14:28.891 WARN [0] replace downstairs uuid:30596aa6-8959-4f8c-98b8-ac91fdc30fc1 with b7c0d6d3-333f-4423-864b-ebd0befef183
24369 Sep 22 23:14:28.891 INFO e94af85e-3796-4fab-91a1-f12add9c3020 Replaced Active Active
24370 Sep 22 23:14:28.891 INFO Current flush_numbers [0..12]: [0, 0]
24371 Sep 22 23:14:28.891 INFO Downstairs has completed Negotiation, task: proc
24372 Sep 22 23:14:28.891 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) Replaced Active Active ds_transition to LiveRepairReady
24373 Sep 22 23:14:28.891 INFO [0] Transition from Replaced to LiveRepairReady
24374 Sep 22 23:14:28.891 WARN [0] new RM replaced this: None
24375 Sep 22 23:14:28.891 WARN [0] e94af85e-3796-4fab-91a1-f12add9c3020 Enter Ready for LiveRepair mode
24376 Sep 22 23:14:28.891 INFO [0] Starts cmd_loop
24377 Sep 22 23:14:28.892 INFO [0] 127.0.0.1:56689 task reports connection:true
24378 Sep 22 23:14:28.892 INFO e94af85e-3796-4fab-91a1-f12add9c3020 LiveRepairReady Active Active
24379 Sep 22 23:14:28.892 INFO Set check for repair
24380 Sep 22 23:14:28.989 INFO UUID: df366b3b-ce8f-4a81-b993-9efbc4225b81
24381 Sep 22 23:14:28.989 INFO Blocks per extent:512 Total Extents: 188
24382 Sep 22 23:14:28.989 INFO Crucible Version: Crucible Version: 0.0.1
24383 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24384 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24385 rustc: 1.70.0 stable x86_64-unknown-illumos
24386 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24387 Sep 22 23:14:28.989 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24388 Sep 22 23:14:28.989 INFO Using address: 127.0.0.1:64149, task: main
24389 Sep 22 23:14:28.989 INFO Repair listens on 127.0.0.1:0, task: repair
24390 Sep 22 23:14:28.989 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46213, task: repair
24391 Sep 22 23:14:28.989 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46213, task: repair
24392 Sep 22 23:14:28.989 INFO listening, local_addr: 127.0.0.1:46213, task: repair
24393 Sep 22 23:14:28.990 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46213, task: repair
24394 Sep 22 23:14:28.990 INFO Using repair address: 127.0.0.1:46213, task: main
24395 Sep 22 23:14:28.990 INFO No SSL acceptor configured, task: main
24396 Sep 22 23:14:29.015 INFO UUID: b38dafac-2da7-4558-aae1-959c564193b2
24397 Sep 22 23:14:29.016 INFO Blocks per extent:512 Total Extents: 188
24398 Sep 22 23:14:29.016 INFO Crucible Version: Crucible Version: 0.0.1
24399 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24400 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24401 rustc: 1.70.0 stable x86_64-unknown-illumos
24402 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24403 Sep 22 23:14:29.016 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24404 Sep 22 23:14:29.016 INFO Using address: 127.0.0.1:38920, task: main
24405 Sep 22 23:14:29.016 INFO Repair listens on 127.0.0.1:0, task: repair
24406 Sep 22 23:14:29.016 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49456, task: repair
24407 Sep 22 23:14:29.016 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49456, task: repair
24408 Sep 22 23:14:29.016 INFO listening, local_addr: 127.0.0.1:49456, task: repair
24409 Sep 22 23:14:29.016 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49456, task: repair
24410 Sep 22 23:14:29.016 INFO Using repair address: 127.0.0.1:49456, task: main
24411 Sep 22 23:14:29.016 INFO No SSL acceptor configured, task: main
24412 Sep 22 23:14:29.058 INFO listening on 127.0.0.1:0, task: main
24413 Sep 22 23:14:29.059 INFO current number of open files limit 65536 is already the maximum
24414 Sep 22 23:14:29.059 INFO Opened existing region file "/tmp/downstairs-i41xVTeA/region.json"
24415 Sep 22 23:14:29.059 INFO Database read version 1
24416 Sep 22 23:14:29.059 INFO Database write version 1
24417 Sep 22 23:14:29.062 INFO current number of open files limit 65536 is already the maximum
24418 Sep 22 23:14:29.062 INFO Created new region file "/tmp/downstairs-gGpp9x1g/region.json"
24419 Sep 22 23:14:29.076 INFO current number of open files limit 65536 is already the maximum
24420 Sep 22 23:14:29.076 INFO Created new region file "/tmp/downstairs-uiRa834G/region.json"
24421 Sep 22 23:14:29.128 INFO listening on 127.0.0.1:0, task: main
24422 Sep 22 23:14:29.129 INFO current number of open files limit 65536 is already the maximum
24423 Sep 22 23:14:29.129 INFO Opened existing region file "/tmp/downstairs-OiIt4Fiz/region.json"
24424 Sep 22 23:14:29.129 INFO Database read version 1
24425 Sep 22 23:14:29.129 INFO Database write version 1
24426 Sep 22 23:14:29.207 INFO listening on 127.0.0.1:0, task: main
24427 Sep 22 23:14:29.207 INFO current number of open files limit 65536 is already the maximum
24428 Sep 22 23:14:29.207 INFO Opened existing region file "/tmp/downstairs-H2YCEjAN/region.json"
24429 Sep 22 23:14:29.207 INFO Database read version 1
24430 Sep 22 23:14:29.207 INFO Database write version 1
24431 Sep 22 23:14:29.240 INFO UUID: ed5caa6d-3818-4a41-afcf-7deb29720fa0
24432 Sep 22 23:14:29.240 INFO Blocks per extent:512 Total Extents: 188
24433 Sep 22 23:14:29.240 INFO Crucible Version: Crucible Version: 0.0.1
24434 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24435 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24436 rustc: 1.70.0 stable x86_64-unknown-illumos
24437 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24438 Sep 22 23:14:29.240 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24439 Sep 22 23:14:29.240 INFO Using address: 127.0.0.1:46850, task: main
24440 Sep 22 23:14:29.240 INFO Repair listens on 127.0.0.1:0, task: repair
24441 Sep 22 23:14:29.240 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45771, task: repair
24442 Sep 22 23:14:29.240 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45771, task: repair
24443 Sep 22 23:14:29.241 INFO listening, local_addr: 127.0.0.1:45771, task: repair
24444 Sep 22 23:14:29.241 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45771, task: repair
24445 Sep 22 23:14:29.241 INFO Using repair address: 127.0.0.1:45771, task: main
24446 Sep 22 23:14:29.241 INFO No SSL acceptor configured, task: main
24447 Sep 22 23:14:29.330 INFO current number of open files limit 65536 is already the maximum
24448 Sep 22 23:14:29.331 INFO Created new region file "/tmp/downstairs-XicHDLp1/region.json"
24449 Sep 22 23:14:29.416 INFO listening on 127.0.0.1:0, task: main
24450 Sep 22 23:14:29.417 INFO current number of open files limit 65536 is already the maximum
24451 Sep 22 23:14:29.417 INFO Opened existing region file "/tmp/downstairs-gGpp9x1g/region.json"
24452 Sep 22 23:14:29.417 INFO Database read version 1
24453 Sep 22 23:14:29.417 INFO Database write version 1
24454 Sep 22 23:14:29.452 INFO listening on 127.0.0.1:0, task: main
24455 Sep 22 23:14:29.453 INFO current number of open files limit 65536 is already the maximum
24456 Sep 22 23:14:29.453 INFO Opened existing region file "/tmp/downstairs-uiRa834G/region.json"
24457 Sep 22 23:14:29.453 INFO Database read version 1
24458 Sep 22 23:14:29.453 INFO Database write version 1
24459 Sep 22 23:14:29.462 INFO UUID: a0cbf067-d00b-4cf2-ba16-d1cbb9ac9209
24460 Sep 22 23:14:29.462 INFO Blocks per extent:512 Total Extents: 188
24461 Sep 22 23:14:29.462 INFO Crucible Version: Crucible Version: 0.0.1
24462 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24463 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24464 rustc: 1.70.0 stable x86_64-unknown-illumos
24465 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24466 Sep 22 23:14:29.462 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24467 Sep 22 23:14:29.462 INFO Using address: 127.0.0.1:46586, task: main
24468 Sep 22 23:14:29.462 INFO Repair listens on 127.0.0.1:0, task: repair
24469 Sep 22 23:14:29.462 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:56696, task: repair
24470 Sep 22 23:14:29.462 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:56696, task: repair
24471 Sep 22 23:14:29.462 INFO listening, local_addr: 127.0.0.1:56696, task: repair
24472 Sep 22 23:14:29.463 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:56696, task: repair
24473 Sep 22 23:14:29.463 INFO Using repair address: 127.0.0.1:56696, task: main
24474 Sep 22 23:14:29.463 INFO No SSL acceptor configured, task: main
24475 Sep 22 23:14:29.517 INFO UUID: 02d6cbbb-c0cd-42b6-b416-45900a2ec4e0
24476 Sep 22 23:14:29.517 INFO Blocks per extent:512 Total Extents: 188
24477 Sep 22 23:14:29.518 INFO Crucible Version: Crucible Version: 0.0.1
24478 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24479 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24480 rustc: 1.70.0 stable x86_64-unknown-illumos
24481 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24482 Sep 22 23:14:29.518 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24483 Sep 22 23:14:29.518 INFO Using address: 127.0.0.1:60728, task: main
24484 Sep 22 23:14:29.518 INFO Repair listens on 127.0.0.1:0, task: repair
24485 Sep 22 23:14:29.518 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:59468, task: repair
24486 Sep 22 23:14:29.518 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:59468, task: repair
24487 Sep 22 23:14:29.518 INFO listening, local_addr: 127.0.0.1:59468, task: repair
24488 Sep 22 23:14:29.518 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:59468, task: repair
24489 Sep 22 23:14:29.518 INFO Using repair address: 127.0.0.1:59468, task: main
24490 Sep 22 23:14:29.518 INFO No SSL acceptor configured, task: main
24491 Sep 22 23:14:29.548 INFO Upstairs starts
24492 Sep 22 23:14:29.548 INFO Crucible Version: BuildInfo {
24493 version: "0.0.1",
24494 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24495 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24496 git_branch: "main",
24497 rustc_semver: "1.70.0",
24498 rustc_channel: "stable",
24499 rustc_host_triple: "x86_64-unknown-illumos",
24500 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24501 cargo_triple: "x86_64-unknown-illumos",
24502 debug: true,
24503 opt_level: 0,
24504 }
24505 Sep 22 23:14:29.548 INFO Upstairs <-> Downstairs Message Version: 4
24506 Sep 22 23:14:29.548 INFO Crucible stats registered with UUID: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6
24507 Sep 22 23:14:29.548 INFO Crucible 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 has session id: 6e6def71-e8d5-48fa-90f4-5751ac48d9ee
24508 Sep 22 23:14:29.549 INFO listening on 127.0.0.1:0, task: main
24509 Sep 22 23:14:29.549 INFO [0] connecting to 127.0.0.1:53298, looper: 0
24510 Sep 22 23:14:29.549 INFO [1] connecting to 127.0.0.1:59093, looper: 1
24511 Sep 22 23:14:29.549 INFO [2] connecting to 127.0.0.1:46586, looper: 2
24512 Sep 22 23:14:29.549 INFO up_listen starts, task: up_listen
24513 Sep 22 23:14:29.549 INFO Wait for all three downstairs to come online
24514 Sep 22 23:14:29.549 INFO Flush timeout: 0.5
24515 Sep 22 23:14:29.549 INFO accepted connection from 127.0.0.1:43836, task: main
24516 Sep 22 23:14:29.550 INFO [0] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 looper connected, looper: 0
24517 Sep 22 23:14:29.550 INFO [0] Proc runs for 127.0.0.1:53298 in state New
24518 Sep 22 23:14:29.550 INFO [2] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 looper connected, looper: 2
24519 Sep 22 23:14:29.550 INFO [2] Proc runs for 127.0.0.1:46586 in state New
24520 Sep 22 23:14:29.550 INFO accepted connection from 127.0.0.1:46522, task: main
24521 Sep 22 23:14:29.550 INFO [1] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 looper connected, looper: 1
24522 Sep 22 23:14:29.550 INFO [1] Proc runs for 127.0.0.1:59093 in state New
24523 Sep 22 23:14:29.550 INFO accepted connection from 127.0.0.1:49426, task: main
24524 Sep 22 23:14:29.550 INFO Connection request from 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 with version 4, task: proc
24525 Sep 22 23:14:29.550 INFO upstairs UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } connected, version 4, task: proc
24526 Sep 22 23:14:29.550 INFO Connection request from 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 with version 4, task: proc
24527 Sep 22 23:14:29.550 INFO upstairs UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } connected, version 4, task: proc
24528 Sep 22 23:14:29.551 INFO Connection request from 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 with version 4, task: proc
24529 Sep 22 23:14:29.551 INFO upstairs UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } connected, version 4, task: proc
24530 The guest has requested activation
24531 Sep 22 23:14:29.551 INFO [0] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) New New New ds_transition to WaitActive
24532 Sep 22 23:14:29.551 INFO [0] Transition from New to WaitActive
24533 Sep 22 23:14:29.551 INFO [2] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) WaitActive New New ds_transition to WaitActive
24534 Sep 22 23:14:29.551 INFO [2] Transition from New to WaitActive
24535 Sep 22 23:14:29.551 INFO [1] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) WaitActive New WaitActive ds_transition to WaitActive
24536 Sep 22 23:14:29.551 INFO [1] Transition from New to WaitActive
24537 Sep 22 23:14:29.551 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 active request set
24538 Sep 22 23:14:29.551 INFO [0] received activate with gen 1
24539 Sep 22 23:14:29.551 INFO [0] client got ds_active_rx, promote! session 61abb1c6-aab0-4918-867b-523054aca8f6
24540 Sep 22 23:14:29.551 INFO [1] received activate with gen 1
24541 Sep 22 23:14:29.551 INFO [1] client got ds_active_rx, promote! session 61abb1c6-aab0-4918-867b-523054aca8f6
24542 Sep 22 23:14:29.551 INFO [2] received activate with gen 1
24543 Sep 22 23:14:29.551 INFO [2] client got ds_active_rx, promote! session 61abb1c6-aab0-4918-867b-523054aca8f6
24544 Sep 22 23:14:29.552 INFO UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } is now active (read-write)
24545 Sep 22 23:14:29.552 INFO UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } is now active (read-write)
24546 Sep 22 23:14:29.552 INFO UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } is now active (read-write)
24547 Sep 22 23:14:29.553 INFO [0] downstairs client at 127.0.0.1:53298 has UUID bbec14f8-e0c9-4fdd-8394-c7675df3174b
24548 Sep 22 23:14:29.553 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bbec14f8-e0c9-4fdd-8394-c7675df3174b, encrypted: true, database_read_version: 1, database_write_version: 1 }
24549 Sep 22 23:14:29.553 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 WaitActive WaitActive WaitActive
24550 Sep 22 23:14:29.553 INFO [2] downstairs client at 127.0.0.1:46586 has UUID a0cbf067-d00b-4cf2-ba16-d1cbb9ac9209
24551 Sep 22 23:14:29.553 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: a0cbf067-d00b-4cf2-ba16-d1cbb9ac9209, encrypted: true, database_read_version: 1, database_write_version: 1 }
24552 Sep 22 23:14:29.553 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 WaitActive WaitActive WaitActive
24553 Sep 22 23:14:29.553 INFO [1] downstairs client at 127.0.0.1:59093 has UUID d2a767c4-8e80-4f99-8f17-2410bd865538
24554 Sep 22 23:14:29.553 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: d2a767c4-8e80-4f99-8f17-2410bd865538, encrypted: true, database_read_version: 1, database_write_version: 1 }
24555 Sep 22 23:14:29.553 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 WaitActive WaitActive WaitActive
24556 Sep 22 23:14:29.559 INFO UUID: ede072e9-d3f2-4884-bb86-eea53898f9d9
24557 Sep 22 23:14:29.559 INFO Blocks per extent:512 Total Extents: 188
24558 Sep 22 23:14:29.559 INFO Crucible Version: Crucible Version: 0.0.1
24559 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24560 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24561 rustc: 1.70.0 stable x86_64-unknown-illumos
24562 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24563 Sep 22 23:14:29.559 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24564 Sep 22 23:14:29.559 INFO Using address: 127.0.0.1:62299, task: main
24565 Sep 22 23:14:29.559 INFO Repair listens on 127.0.0.1:0, task: repair
24566 Sep 22 23:14:29.559 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:63792, task: repair
24567 Sep 22 23:14:29.559 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:63792, task: repair
24568 Sep 22 23:14:29.559 INFO listening, local_addr: 127.0.0.1:63792, task: repair
24569 Sep 22 23:14:29.560 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:63792, task: repair
24570 Sep 22 23:14:29.560 INFO Using repair address: 127.0.0.1:63792, task: main
24571 Sep 22 23:14:29.560 INFO No SSL acceptor configured, task: main
24572 Sep 22 23:14:29.573 INFO current number of open files limit 65536 is already the maximum
24573 Sep 22 23:14:29.573 INFO Created new region file "/tmp/downstairs-ybEDH4hM/region.json"
24574 Sep 22 23:14:29.605 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24575 Sep 22 23:14:29.610 INFO listening on 127.0.0.1:0, task: main
24576 Sep 22 23:14:29.610 INFO current number of open files limit 65536 is already the maximum
24577 Sep 22 23:14:29.610 INFO Opened existing region file "/tmp/downstairs-XicHDLp1/region.json"
24578 Sep 22 23:14:29.610 INFO Database read version 1
24579 Sep 22 23:14:29.610 INFO Database write version 1
24580 Sep 22 23:14:29.612 INFO current number of open files limit 65536 is already the maximum
24581 Sep 22 23:14:29.612 INFO Created new region file "/tmp/downstairs-GUu8sV6m/region.json"
24582 Sep 22 23:14:29.614 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24583 Sep 22 23:14:29.624 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24584 Sep 22 23:14:29.636 WARN a50fc3fb-3de2-4743-9c50-cc80cfba77db request to replace downstairs 127.0.0.1:42970 with 127.0.0.1:62845
24585 Sep 22 23:14:29.636 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db found new target: 127.0.0.1:62845 at 0
24586 Sep 22 23:14:29.636 INFO Waiting for replacement to finish
24587 Sep 22 23:14:29.639 INFO Checking if live repair is needed
24588 Sep 22 23:14:29.639 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) LiveRepairReady Active Active ds_transition to LiveRepair
24589 Sep 22 23:14:29.639 INFO [0] Transition from LiveRepairReady to LiveRepair
24590 Sep 22 23:14:29.639 INFO Live Repair started
24591 Sep 22 23:14:29.640 WARN Live Repair main task begins., task: repair
24592 Sep 22 23:14:29.640 INFO Start Live Repair of extents 0 to 2, task: repair
24593 Sep 22 23:14:29.640 INFO Start extent 0 repair, task: repair
24594 Sep 22 23:14:29.640 DEBG RE:0 Repair extent begins
24595 Sep 22 23:14:29.640 DEBG Create new job ids for 0, : downstairs
24596 Sep 22 23:14:29.640 INFO RE:0 repair extent with ids 1002,1003,1004,1005 deps:[]
24597 Sep 22 23:14:29.640 DEBG Enqueue repair job 1005, : downstairs
24598 Sep 22 23:14:29.640 DEBG Enqueue repair job 1002, : downstairs
24599 Sep 22 23:14:29.640 INFO RE:0 close id:1002 queued, notify DS
24600 Sep 22 23:14:29.640 INFO RE:0 Wait for result from close command 1002:3
24601 Sep 22 23:14:29.640 DEBG [0] 1002 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
24602 Sep 22 23:14:29.640 DEBG [0] 1002 Remove check < min repaired:1002 from deps:[], : downstairs
24603 Sep 22 23:14:29.640 INFO [0] 1002 final dependency list [], : downstairs
24604 Sep 22 23:14:29.640 DEBG [0] 1005 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
24605 Sep 22 23:14:29.640 DEBG [0] 1005 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
24606 Sep 22 23:14:29.640 INFO [0] 1005 final dependency list [JobId(1002), JobId(1003), JobId(1004)], : downstairs
24607 Sep 22 23:14:29.641 DEBG Flush just extent 0 with f:2 and g:1
24608 Sep 22 23:14:29.642 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
24609 Sep 22 23:14:29.642 DEBG Flush just extent 0 with f:2 and g:1
24610 Sep 22 23:14:29.643 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
24611 Sep 22 23:14:29.645 DEBG JustClose :1002 extent 0 deps:[] res:true
24612 Sep 22 23:14:29.645 DEBG [1] ELC got g:1 f:1 d:false
24613 Sep 22 23:14:29.645 DEBG [2] ELC got g:1 f:1 d:false
24614 Sep 22 23:14:29.645 DEBG [0] ELC got g:0 f:0 d:false
24615 Sep 22 23:14:29.645 DEBG [0] ExtentFlushClose 1002 AckReady, : downstairs
24616 Sep 22 23:14:29.645 DEBG up_ds_listen was notified
24617 Sep 22 23:14:29.645 DEBG up_ds_listen process 1002
24618 Sep 22 23:14:29.645 DEBG [A] ack job 1002:3, : downstairs
24619 Sep 22 23:14:29.645 DEBG up_ds_listen checked 1 jobs, back to waiting
24620 Sep 22 23:14:29.645 DEBG Extent 0 id:1002 Done
24621 Sep 22 23:14:29.645 DEBG Get repair info for 2 source, : downstairs
24622 Sep 22 23:14:29.645 DEBG Get repair info for 0 bad, : downstairs
24623 Sep 22 23:14:29.645 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
24624 Sep 22 23:14:29.645 DEBG Enqueue repair job 1003, : downstairs
24625 Sep 22 23:14:29.645 INFO RE:0 Wait for result from repair command 1003:4
24626 Sep 22 23:14:29.645 DEBG [0] 1003 Remove check skipped:{JobId(1001)} from deps:[JobId(1002)], : downstairs
24627 Sep 22 23:14:29.645 DEBG [0] 1003 Remove check < min repaired:1002 from deps:[JobId(1002)], : downstairs
24628 Sep 22 23:14:29.645 INFO [0] 1003 final dependency list [JobId(1002)], : downstairs
24629 Sep 22 23:14:29.646 DEBG Received NoOP 1003
24630 Sep 22 23:14:29.646 DEBG Received NoOP 1003
24631 Sep 22 23:14:29.646 DEBG Received ExtentLiveRepair 1003
24632 Sep 22 23:14:29.646 DEBG Work of: LiveNoOp 1003
24633 Sep 22 23:14:29.646 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
24634 Sep 22 23:14:29.646 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24635 Sep 22 23:14:29.646 DEBG Work of: LiveNoOp 1003
24636 Sep 22 23:14:29.646 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
24637 Sep 22 23:14:29.646 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24638 Sep 22 23:14:29.646 DEBG ExtentLiveRepair: extent 0 sra:127.0.0.1:44564
24639 Sep 22 23:14:29.646 INFO Created copy dir "/tmp/downstairs-zdyp8uYh/00/000/000.copy"
24640 Sep 22 23:14:29.692 INFO Downstairs has completed Negotiation, task: proc
24641 Sep 22 23:14:29.696 INFO Downstairs has completed Negotiation, task: proc
24642 Sep 22 23:14:29.698 INFO Downstairs has completed Negotiation, task: proc
24643 Sep 22 23:14:29.699 INFO [0] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
24644 Sep 22 23:14:29.699 INFO [0] Transition from WaitActive to WaitQuorum
24645 Sep 22 23:14:29.699 WARN [0] new RM replaced this: None
24646 Sep 22 23:14:29.699 INFO [0] Starts reconcile loop
24647 Sep 22 23:14:29.699 INFO [2] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
24648 Sep 22 23:14:29.699 INFO [2] Transition from WaitActive to WaitQuorum
24649 Sep 22 23:14:29.699 WARN [2] new RM replaced this: None
24650 Sep 22 23:14:29.699 INFO [2] Starts reconcile loop
24651 Sep 22 23:14:29.700 INFO [1] 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 (61abb1c6-aab0-4918-867b-523054aca8f6) WaitQuorum WaitActive WaitQuorum ds_transition to WaitQuorum
24652 Sep 22 23:14:29.700 INFO [1] Transition from WaitActive to WaitQuorum
24653 Sep 22 23:14:29.700 WARN [1] new RM replaced this: None
24654 Sep 22 23:14:29.700 INFO [1] Starts reconcile loop
24655 Sep 22 23:14:29.700 INFO [0] 127.0.0.1:53298 task reports connection:true
24656 Sep 22 23:14:29.700 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 WaitQuorum WaitQuorum WaitQuorum
24657 Sep 22 23:14:29.700 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24658 Sep 22 23:14:29.700 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24659 Sep 22 23:14:29.700 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24660 Sep 22 23:14:29.700 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24661 Sep 22 23:14:29.700 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24662 Sep 22 23:14:29.700 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24663 Sep 22 23:14:29.700 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24664 Sep 22 23:14:29.700 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24665 Sep 22 23:14:29.700 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
24666 Sep 22 23:14:29.700 INFO Max found gen is 1
24667 Sep 22 23:14:29.700 INFO Generation requested: 1 >= found:1
24668 Sep 22 23:14:29.700 INFO Next flush: 1
24669 Sep 22 23:14:29.700 INFO All extents match
24670 Sep 22 23:14:29.700 INFO No downstairs repair required
24671 Sep 22 23:14:29.700 INFO No initial repair work was required
24672 Sep 22 23:14:29.700 INFO Set Downstairs and Upstairs active
24673 Sep 22 23:14:29.700 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 is now active with session: 61abb1c6-aab0-4918-867b-523054aca8f6
24674 Sep 22 23:14:29.700 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 Set Active after no repair
24675 Sep 22 23:14:29.700 INFO Notify all downstairs, region set compare is done.
24676 Sep 22 23:14:29.700 INFO Set check for repair
24677 Sep 22 23:14:29.700 INFO [2] 127.0.0.1:46586 task reports connection:true
24678 Sep 22 23:14:29.700 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 Active Active Active
24679 Sep 22 23:14:29.700 INFO Set check for repair
24680 Sep 22 23:14:29.700 INFO [1] 127.0.0.1:59093 task reports connection:true
24681 Sep 22 23:14:29.701 INFO 4cf66ffa-a12c-4555-b256-f1d6116cf5e6 Active Active Active
24682 Sep 22 23:14:29.701 INFO Set check for repair
24683 Sep 22 23:14:29.701 INFO [0] received reconcile message
24684 Sep 22 23:14:29.701 INFO [0] All repairs completed, exit
24685 Sep 22 23:14:29.701 INFO [0] Starts cmd_loop
24686 Sep 22 23:14:29.701 INFO [1] received reconcile message
24687 Sep 22 23:14:29.701 INFO [1] All repairs completed, exit
24688 Sep 22 23:14:29.701 INFO [1] Starts cmd_loop
24689 Sep 22 23:14:29.701 INFO [2] received reconcile message
24690 Sep 22 23:14:29.701 INFO [2] All repairs completed, exit
24691 Sep 22 23:14:29.701 INFO [2] Starts cmd_loop
24692 The guest has finished waiting for activation
24693 Sep 22 23:14:29.709 INFO UUID: 1361b8e1-8898-44ce-892b-e72b28f2a9df
24694 Sep 22 23:14:29.709 INFO Blocks per extent:512 Total Extents: 188
24695 Sep 22 23:14:29.709 INFO Crucible Version: Crucible Version: 0.0.1
24696 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24697 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24698 rustc: 1.70.0 stable x86_64-unknown-illumos
24699 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24700 Sep 22 23:14:29.709 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24701 Sep 22 23:14:29.709 INFO Using address: 127.0.0.1:58182, task: main
24702 Sep 22 23:14:29.710 INFO Repair listens on 127.0.0.1:0, task: repair
24703 Sep 22 23:14:29.710 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:45179, task: repair
24704 Sep 22 23:14:29.710 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:45179, task: repair
24705 Sep 22 23:14:29.710 INFO listening, local_addr: 127.0.0.1:45179, task: repair
24706 Sep 22 23:14:29.710 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:45179, task: repair
24707 Sep 22 23:14:29.710 INFO Using repair address: 127.0.0.1:45179, task: main
24708 Sep 22 23:14:29.710 INFO No SSL acceptor configured, task: main
24709 Sep 22 23:14:29.723 INFO UUID: 14399b3b-6e6e-4995-afe0-74c1441f0833
24710 Sep 22 23:14:29.723 INFO Blocks per extent:512 Total Extents: 188
24711 Sep 22 23:14:29.723 INFO Crucible Version: Crucible Version: 0.0.1
24712 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
24713 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
24714 rustc: 1.70.0 stable x86_64-unknown-illumos
24715 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
24716 Sep 22 23:14:29.723 INFO Upstairs <-> Downstairs Message Version: 4, task: main
24717 Sep 22 23:14:29.723 INFO Using address: 127.0.0.1:44435, task: main
24718 Sep 22 23:14:29.723 INFO Repair listens on 127.0.0.1:0, task: repair
24719 Sep 22 23:14:29.723 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36093, task: repair
24720 Sep 22 23:14:29.723 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36093, task: repair
24721 Sep 22 23:14:29.724 INFO listening, local_addr: 127.0.0.1:36093, task: repair
24722 Sep 22 23:14:29.724 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36093, task: repair
24723 Sep 22 23:14:29.724 INFO Using repair address: 127.0.0.1:36093, task: main
24724 Sep 22 23:14:29.724 INFO No SSL acceptor configured, task: main
24725 Sep 22 23:14:29.742 INFO accepted connection, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24726 Sep 22 23:14:29.743 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: 88e31efa-976c-4991-8564-1f82328c288c, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24727 Sep 22 23:14:29.743 INFO request completed, latency_us: 396, response_code: 200, uri: /extent/0/files, method: GET, req_id: 88e31efa-976c-4991-8564-1f82328c288c, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24728 Sep 22 23:14:29.743 INFO eid:0 Found repair files: ["000", "000.db"]
24729 Sep 22 23:14:29.744 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: 5f899b13-0308-44ae-b02b-57bb9a351324, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24730 Sep 22 23:14:29.744 INFO request completed, latency_us: 615, response_code: 200, uri: /newextent/0/data, method: GET, req_id: 5f899b13-0308-44ae-b02b-57bb9a351324, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24731 Sep 22 23:14:29.746 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: f8869a74-2fd7-4e08-bf49-7420fd09c6da, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24732 Sep 22 23:14:29.746 INFO request completed, latency_us: 370, response_code: 200, uri: /newextent/0/db, method: GET, req_id: f8869a74-2fd7-4e08-bf49-7420fd09c6da, remote_addr: 127.0.0.1:49176, local_addr: 127.0.0.1:44564, task: repair
24733 Sep 22 23:14:29.747 INFO Repair files downloaded, move directory "/tmp/downstairs-zdyp8uYh/00/000/000.copy" to "/tmp/downstairs-zdyp8uYh/00/000/000.replace"
24734 Sep 22 23:14:29.747 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24735 Sep 22 23:14:29.748 INFO Copy files from "/tmp/downstairs-zdyp8uYh/00/000/000.replace" in "/tmp/downstairs-zdyp8uYh/00/000"
24736 Sep 22 23:14:29.748 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000/000"
24737 Sep 22 23:14:29.748 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000/000.db"
24738 Sep 22 23:14:29.748 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24739 Sep 22 23:14:29.749 INFO Move directory "/tmp/downstairs-zdyp8uYh/00/000/000.replace" to "/tmp/downstairs-zdyp8uYh/00/000/000.completed"
24740 Sep 22 23:14:29.749 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24741 Sep 22 23:14:29.749 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24742 Sep 22 23:14:29.749 DEBG LiveRepair:1003 extent 0 deps:[JobId(1002)] res:true
24743 Sep 22 23:14:29.749 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24744 Sep 22 23:14:29.749 DEBG [0] ExtentLiveRepair AckReady 1003, : downstairs
24745 Sep 22 23:14:29.749 DEBG up_ds_listen was notified
24746 Sep 22 23:14:29.749 DEBG up_ds_listen process 1003
24747 Sep 22 23:14:29.749 DEBG [A] ack job 1003:4, : downstairs
24748 Sep 22 23:14:29.749 DEBG up_ds_listen checked 1 jobs, back to waiting
24749 Sep 22 23:14:29.749 DEBG Extent 0 id:1003 Done
24750 Sep 22 23:14:29.750 DEBG Enqueue repair job 1004, : downstairs
24751 Sep 22 23:14:29.750 INFO RE:0 Wait for result from NoOp command 1004:5
24752 Sep 22 23:14:29.750 DEBG [0] 1004 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003)], : downstairs
24753 Sep 22 23:14:29.750 DEBG [0] 1004 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003)], : downstairs
24754 Sep 22 23:14:29.750 INFO [0] 1004 final dependency list [JobId(1002), JobId(1003)], : downstairs
24755 Sep 22 23:14:29.750 DEBG Received NoOP 1004
24756 Sep 22 23:14:29.750 DEBG Received NoOP 1004
24757 Sep 22 23:14:29.750 DEBG Received NoOP 1004
24758 Sep 22 23:14:29.750 DEBG Work of: LiveNoOp 1004
24759 Sep 22 23:14:29.750 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24760 Sep 22 23:14:29.753 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24761 Sep 22 23:14:29.753 DEBG Work of: LiveNoOp 1004
24762 Sep 22 23:14:29.753 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24763 Sep 22 23:14:29.754 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24764 Sep 22 23:14:29.755 DEBG Work of: LiveNoOp 1004
24765 Sep 22 23:14:29.755 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
24766 Sep 22 23:14:29.756 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
24767 Sep 22 23:14:29.756 DEBG [0] ExtentLiveNoOp AckReady 1004, : downstairs
24768 Sep 22 23:14:29.756 DEBG [0] ExtentLiveReopen AckReady 1005, : downstairs
24769 Sep 22 23:14:29.756 DEBG up_ds_listen was notified
24770 Sep 22 23:14:29.756 DEBG up_ds_listen process 1004
24771 Sep 22 23:14:29.756 DEBG [A] ack job 1004:5, : downstairs
24772 Sep 22 23:14:29.756 DEBG up_ds_listen process 1005
24773 Sep 22 23:14:29.756 DEBG [A] ack job 1005:6, : downstairs
24774 Sep 22 23:14:29.756 DEBG up_ds_listen checked 2 jobs, back to waiting
24775 Sep 22 23:14:29.756 DEBG up_ds_listen was notified
24776 Sep 22 23:14:29.756 DEBG up_ds_listen checked 0 jobs, back to waiting
24777 Sep 22 23:14:29.756 DEBG Extent 0 id:1004 Done
24778 Sep 22 23:14:29.756 INFO RE:0 Wait for result from reopen command 1005:6
24779 Sep 22 23:14:29.756 DEBG Extent 0 id:1005 Done
24780 Sep 22 23:14:29.756 INFO Start extent 1 repair, task: repair
24781 Sep 22 23:14:29.756 DEBG RE:1 Repair extent begins
24782 Sep 22 23:14:29.756 DEBG Create new job ids for 1, : downstairs
24783 Sep 22 23:14:29.756 INFO RE:1 repair extent with ids 1006,1007,1008,1009 deps:[]
24784 Sep 22 23:14:29.756 DEBG Enqueue repair job 1009, : downstairs
24785 Sep 22 23:14:29.756 DEBG Enqueue repair job 1006, : downstairs
24786 Sep 22 23:14:29.756 INFO RE:1 close id:1006 queued, notify DS
24787 Sep 22 23:14:29.756 INFO RE:1 Wait for result from close command 1006:7
24788 Sep 22 23:14:29.756 DEBG [0] 1006 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
24789 Sep 22 23:14:29.757 DEBG [0] 1006 Remove check < min repaired:1002 from deps:[], : downstairs
24790 Sep 22 23:14:29.757 INFO [0] 1006 final dependency list [], : downstairs
24791 Sep 22 23:14:29.757 DEBG [0] 1009 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
24792 Sep 22 23:14:29.757 DEBG [0] 1009 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
24793 Sep 22 23:14:29.757 INFO [0] 1009 final dependency list [JobId(1006), JobId(1007), JobId(1008)], : downstairs
24794 Sep 22 23:14:29.757 DEBG Flush just extent 1 with f:3 and g:1
24795 Sep 22 23:14:29.758 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
24796 Sep 22 23:14:29.758 DEBG Flush just extent 1 with f:3 and g:1
24797 Sep 22 23:14:29.759 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
24798 Sep 22 23:14:29.760 DEBG JustClose :1006 extent 1 deps:[] res:true
24799 Sep 22 23:14:29.761 DEBG [1] ELC got g:1 f:1 d:false
24800 Sep 22 23:14:29.761 DEBG [2] ELC got g:1 f:1 d:false
24801 Sep 22 23:14:29.761 DEBG [0] ELC got g:0 f:0 d:false
24802 Sep 22 23:14:29.761 DEBG [0] ExtentFlushClose 1006 AckReady, : downstairs
24803 Sep 22 23:14:29.761 DEBG up_ds_listen was notified
24804 Sep 22 23:14:29.761 DEBG up_ds_listen process 1006
24805 Sep 22 23:14:29.761 DEBG [A] ack job 1006:7, : downstairs
24806 Sep 22 23:14:29.761 DEBG up_ds_listen checked 1 jobs, back to waiting
24807 Sep 22 23:14:29.761 DEBG Extent 1 id:1006 Done
24808 Sep 22 23:14:29.761 DEBG Get repair info for 2 source, : downstairs
24809 Sep 22 23:14:29.761 DEBG Get repair info for 0 bad, : downstairs
24810 Sep 22 23:14:29.761 INFO Repair for extent 1 s:2 d:[ClientId(0)], : downstairs
24811 Sep 22 23:14:29.761 DEBG Enqueue repair job 1007, : downstairs
24812 Sep 22 23:14:29.761 INFO RE:1 Wait for result from repair command 1007:8
24813 Sep 22 23:14:29.761 DEBG [0] 1007 Remove check skipped:{JobId(1001)} from deps:[JobId(1006)], : downstairs
24814 Sep 22 23:14:29.761 DEBG [0] 1007 Remove check < min repaired:1002 from deps:[JobId(1006)], : downstairs
24815 Sep 22 23:14:29.761 INFO [0] 1007 final dependency list [JobId(1006)], : downstairs
24816 Sep 22 23:14:29.762 DEBG Received NoOP 1007
24817 Sep 22 23:14:29.762 DEBG Received NoOP 1007
24818 Sep 22 23:14:29.762 DEBG Received ExtentLiveRepair 1007
24819 Sep 22 23:14:29.762 DEBG Work of: LiveNoOp 1007
24820 Sep 22 23:14:29.762 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
24821 Sep 22 23:14:29.762 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24822 Sep 22 23:14:29.762 DEBG Work of: LiveNoOp 1007
24823 Sep 22 23:14:29.762 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
24824 Sep 22 23:14:29.762 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24825 Sep 22 23:14:29.762 DEBG ExtentLiveRepair: extent 1 sra:127.0.0.1:44564
24826 Sep 22 23:14:29.762 INFO Created copy dir "/tmp/downstairs-zdyp8uYh/00/000/001.copy"
24827 Sep 22 23:14:29.783 INFO Upstairs starts
24828 Sep 22 23:14:29.783 INFO Crucible Version: BuildInfo {
24829 version: "0.0.1",
24830 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
24831 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
24832 git_branch: "main",
24833 rustc_semver: "1.70.0",
24834 rustc_channel: "stable",
24835 rustc_host_triple: "x86_64-unknown-illumos",
24836 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
24837 cargo_triple: "x86_64-unknown-illumos",
24838 debug: true,
24839 opt_level: 0,
24840 }
24841 Sep 22 23:14:29.783 INFO Upstairs <-> Downstairs Message Version: 4
24842 Sep 22 23:14:29.783 INFO Crucible stats registered with UUID: 6e5c0f70-fd56-4280-9d20-71288e488216
24843 Sep 22 23:14:29.783 INFO Crucible 6e5c0f70-fd56-4280-9d20-71288e488216 has session id: 5375fd1f-f6a1-4405-99f6-d882ff308715
24844 Sep 22 23:14:29.784 INFO listening on 127.0.0.1:0, task: main
24845 Sep 22 23:14:29.784 INFO [0] connecting to 127.0.0.1:42762, looper: 0
24846 Sep 22 23:14:29.784 INFO [1] connecting to 127.0.0.1:64149, looper: 1
24847 Sep 22 23:14:29.784 INFO [2] connecting to 127.0.0.1:58182, looper: 2
24848 Sep 22 23:14:29.784 INFO up_listen starts, task: up_listen
24849 Sep 22 23:14:29.784 INFO Wait for all three downstairs to come online
24850 Sep 22 23:14:29.784 INFO Flush timeout: 0.5
24851 Sep 22 23:14:29.784 INFO accepted connection from 127.0.0.1:49772, task: main
24852 Sep 22 23:14:29.784 INFO accepted connection from 127.0.0.1:48323, task: main
24853 Sep 22 23:14:29.784 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 0
24854 Sep 22 23:14:29.784 INFO [0] Proc runs for 127.0.0.1:42762 in state New
24855 Sep 22 23:14:29.785 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 1
24856 Sep 22 23:14:29.785 INFO [1] Proc runs for 127.0.0.1:64149 in state New
24857 Sep 22 23:14:29.785 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 2
24858 Sep 22 23:14:29.785 INFO [2] Proc runs for 127.0.0.1:58182 in state New
24859 Sep 22 23:14:29.785 INFO accepted connection from 127.0.0.1:59274, task: main
24860 Sep 22 23:14:29.785 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
24861 Sep 22 23:14:29.785 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
24862 Sep 22 23:14:29.785 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
24863 Sep 22 23:14:29.785 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
24864 Sep 22 23:14:29.785 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
24865 Sep 22 23:14:29.785 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
24866 The guest has requested activation
24867 Sep 22 23:14:29.785 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) New New New ds_transition to WaitActive
24868 Sep 22 23:14:29.785 INFO [0] Transition from New to WaitActive
24869 Sep 22 23:14:29.786 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) WaitActive New New ds_transition to WaitActive
24870 Sep 22 23:14:29.786 INFO [1] Transition from New to WaitActive
24871 Sep 22 23:14:29.786 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) WaitActive WaitActive New ds_transition to WaitActive
24872 Sep 22 23:14:29.786 INFO [2] Transition from New to WaitActive
24873 Sep 22 23:14:29.786 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 active request set
24874 Sep 22 23:14:29.786 INFO [0] received activate with gen 1
24875 Sep 22 23:14:29.786 INFO [0] client got ds_active_rx, promote! session f334c95e-b851-4a8a-a731-3fb69e42e934
24876 Sep 22 23:14:29.786 INFO [1] received activate with gen 1
24877 Sep 22 23:14:29.786 INFO [1] client got ds_active_rx, promote! session f334c95e-b851-4a8a-a731-3fb69e42e934
24878 Sep 22 23:14:29.786 INFO [2] received activate with gen 1
24879 Sep 22 23:14:29.786 INFO [2] client got ds_active_rx, promote! session f334c95e-b851-4a8a-a731-3fb69e42e934
24880 Sep 22 23:14:29.786 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } is now active (read-write)
24881 Sep 22 23:14:29.787 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } is now active (read-write)
24882 Sep 22 23:14:29.787 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } is now active (read-write)
24883 Sep 22 23:14:29.787 INFO [0] downstairs client at 127.0.0.1:42762 has UUID 353b066e-6f4d-4802-9336-0f9c4c2ea130
24884 Sep 22 23:14:29.787 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 353b066e-6f4d-4802-9336-0f9c4c2ea130, encrypted: true, database_read_version: 1, database_write_version: 1 }
24885 Sep 22 23:14:29.787 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
24886 Sep 22 23:14:29.787 INFO [1] downstairs client at 127.0.0.1:64149 has UUID df366b3b-ce8f-4a81-b993-9efbc4225b81
24887 Sep 22 23:14:29.787 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: df366b3b-ce8f-4a81-b993-9efbc4225b81, encrypted: true, database_read_version: 1, database_write_version: 1 }
24888 Sep 22 23:14:29.787 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
24889 Sep 22 23:14:29.787 INFO [2] downstairs client at 127.0.0.1:58182 has UUID 1361b8e1-8898-44ce-892b-e72b28f2a9df
24890 Sep 22 23:14:29.787 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1361b8e1-8898-44ce-892b-e72b28f2a9df, encrypted: true, database_read_version: 1, database_write_version: 1 }
24891 Sep 22 23:14:29.787 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
24892 Sep 22 23:14:29.798 INFO current number of open files limit 65536 is already the maximum
24893 Sep 22 23:14:29.798 INFO Created new region file "/tmp/downstairs-fjJjawz1/region.json"
24894 Sep 22 23:14:29.835 INFO listening on 127.0.0.1:0, task: main
24895 Sep 22 23:14:29.835 INFO current number of open files limit 65536 is already the maximum
24896 Sep 22 23:14:29.835 INFO Opened existing region file "/tmp/downstairs-ybEDH4hM/region.json"
24897 Sep 22 23:14:29.835 INFO Database read version 1
24898 Sep 22 23:14:29.835 INFO Database write version 1
24899 Sep 22 23:14:29.842 INFO accepted connection, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24900 Sep 22 23:14:29.843 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: 20b49939-e2a8-4c0d-99f2-91d72757c7d7, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24901 Sep 22 23:14:29.843 INFO request completed, latency_us: 233, response_code: 200, uri: /extent/1/files, method: GET, req_id: 20b49939-e2a8-4c0d-99f2-91d72757c7d7, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24902 Sep 22 23:14:29.843 INFO eid:1 Found repair files: ["001", "001.db"]
24903 Sep 22 23:14:29.844 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: 9d9eb051-b271-4896-81c3-281dff2a10de, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24904 Sep 22 23:14:29.844 INFO request completed, latency_us: 380, response_code: 200, uri: /newextent/1/data, method: GET, req_id: 9d9eb051-b271-4896-81c3-281dff2a10de, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24905 Sep 22 23:14:29.845 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: 3953507f-608f-44c8-a979-2bf7485083a7, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24906 Sep 22 23:14:29.845 INFO request completed, latency_us: 335, response_code: 200, uri: /newextent/1/db, method: GET, req_id: 3953507f-608f-44c8-a979-2bf7485083a7, remote_addr: 127.0.0.1:65045, local_addr: 127.0.0.1:44564, task: repair
24907 Sep 22 23:14:29.846 INFO Repair files downloaded, move directory "/tmp/downstairs-zdyp8uYh/00/000/001.copy" to "/tmp/downstairs-zdyp8uYh/00/000/001.replace"
24908 Sep 22 23:14:29.846 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24909 Sep 22 23:14:29.847 INFO Copy files from "/tmp/downstairs-zdyp8uYh/00/000/001.replace" in "/tmp/downstairs-zdyp8uYh/00/000"
24910 Sep 22 23:14:29.847 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000/001"
24911 Sep 22 23:14:29.847 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000/001.db"
24912 Sep 22 23:14:29.847 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24913 Sep 22 23:14:29.847 INFO Move directory "/tmp/downstairs-zdyp8uYh/00/000/001.replace" to "/tmp/downstairs-zdyp8uYh/00/000/001.completed"
24914 Sep 22 23:14:29.847 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24915 Sep 22 23:14:29.847 DEBG fsync completed for: "/tmp/downstairs-zdyp8uYh/00/000"
24916 Sep 22 23:14:29.847 DEBG LiveRepair:1007 extent 1 deps:[JobId(1006)] res:true
24917 Sep 22 23:14:29.847 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: a50fc3fb-3de2-4743-9c50-cc80cfba77db, session_id: 2b698136-bbd2-4fcb-ab7c-5f19b2267eb8, gen: 1 } waiting on 1 deps, role: work
24918 Sep 22 23:14:29.848 DEBG [0] ExtentLiveRepair AckReady 1007, : downstairs
24919 Sep 22 23:14:29.848 DEBG up_ds_listen was notified
24920 Sep 22 23:14:29.848 DEBG up_ds_listen process 1007
24921 Sep 22 23:14:29.848 DEBG [A] ack job 1007:8, : downstairs
24922 Sep 22 23:14:29.848 DEBG up_ds_listen checked 1 jobs, back to waiting
24923 Sep 22 23:14:29.848 DEBG Extent 1 id:1007 Done
24924 Sep 22 23:14:29.848 DEBG Enqueue repair job 1008, : downstairs
24925 Sep 22 23:14:29.848 INFO RE:1 Wait for result from NoOp command 1008:9
24926 Sep 22 23:14:29.848 DEBG [0] 1008 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007)], : downstairs
24927 Sep 22 23:14:29.848 DEBG [0] 1008 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007)], : downstairs
24928 Sep 22 23:14:29.848 INFO [0] 1008 final dependency list [JobId(1006), JobId(1007)], : downstairs
24929 Sep 22 23:14:29.848 DEBG Received NoOP 1008
24930 Sep 22 23:14:29.848 DEBG Received NoOP 1008
24931 Sep 22 23:14:29.848 DEBG Received NoOP 1008
24932 Sep 22 23:14:29.849 DEBG Work of: LiveNoOp 1008
24933 Sep 22 23:14:29.849 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
24934 Sep 22 23:14:29.850 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
24935 Sep 22 23:14:29.850 DEBG Work of: LiveNoOp 1008
24936 Sep 22 23:14:29.850 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
24937 Sep 22 23:14:29.852 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
24938 Sep 22 23:14:29.852 DEBG Work of: LiveNoOp 1008
24939 Sep 22 23:14:29.852 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
24940 Sep 22 23:14:29.853 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
24941 Sep 22 23:14:29.854 DEBG [0] ExtentLiveNoOp AckReady 1008, : downstairs
24942 Sep 22 23:14:29.854 DEBG [0] ExtentLiveReopen AckReady 1009, : downstairs
24943 Sep 22 23:14:29.854 DEBG up_ds_listen was notified
24944 Sep 22 23:14:29.854 DEBG up_ds_listen process 1008
24945 Sep 22 23:14:29.854 DEBG [A] ack job 1008:9, : downstairs
24946 Sep 22 23:14:29.854 DEBG up_ds_listen process 1009
24947 Sep 22 23:14:29.854 DEBG [A] ack job 1009:10, : downstairs
24948 Sep 22 23:14:29.854 DEBG up_ds_listen checked 2 jobs, back to waiting
24949 Sep 22 23:14:29.854 DEBG up_ds_listen was notified
24950 Sep 22 23:14:29.854 DEBG up_ds_listen checked 0 jobs, back to waiting
24951 Sep 22 23:14:29.854 DEBG Extent 1 id:1008 Done
24952 Sep 22 23:14:29.854 INFO RE:1 Wait for result from reopen command 1009:10
24953 Sep 22 23:14:29.854 DEBG Extent 1 id:1009 Done
24954 Sep 22 23:14:29.854 DEBG IO Flush 1010 has deps [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)]
24955 Sep 22 23:14:29.854 INFO LiveRepair final flush submitted
24956 Sep 22 23:14:29.854 DEBG [0] 1010 Remove check skipped:{JobId(1001)} from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
24957 Sep 22 23:14:29.854 DEBG [0] 1010 Remove check < min repaired:1002 from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
24958 Sep 22 23:14:29.854 INFO [0] 1010 final dependency list [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
24959 Sep 22 23:14:29.854 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
24960 Sep 22 23:14:29.855 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
24961 Sep 22 23:14:29.855 DEBG Flush :1010 extent_limit Some(1) deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
24962 Sep 22 23:14:29.855 DEBG up_ds_listen was notified
24963 Sep 22 23:14:29.855 DEBG up_ds_listen process 1010
24964 Sep 22 23:14:29.855 DEBG [A] ack job 1010:11, : downstairs
24965 Sep 22 23:14:29.855 DEBG [rc] retire 1010 clears [JobId(1002), JobId(1003), JobId(1004), JobId(1005), JobId(1006), JobId(1007), JobId(1008), JobId(1009), JobId(1010)], : downstairs
24966 Sep 22 23:14:29.855 DEBG up_ds_listen checked 1 jobs, back to waiting
24967 Sep 22 23:14:29.855 INFO LiveRepair final flush completed
24968 Sep 22 23:14:29.855 INFO [0] a50fc3fb-3de2-4743-9c50-cc80cfba77db (2b698136-bbd2-4fcb-ab7c-5f19b2267eb8) LiveRepair Active Active ds_transition to Active
24969 Sep 22 23:14:29.855 INFO [0] Transition from LiveRepair to Active
24970 Sep 22 23:14:29.855 WARN Live Repair returns Ok(())
24971 Sep 22 23:14:29.873 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24972 Sep 22 23:14:29.876 INFO listening on 127.0.0.1:0, task: main
24973 Sep 22 23:14:29.876 INFO current number of open files limit 65536 is already the maximum
24974 Sep 22 23:14:29.876 INFO Opened existing region file "/tmp/downstairs-GUu8sV6m/region.json"
24975 Sep 22 23:14:29.876 INFO Database read version 1
24976 Sep 22 23:14:29.876 INFO Database write version 1
24977 Sep 22 23:14:29.878 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24978 Sep 22 23:14:29.883 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
24979 Sep 22 23:14:29.889 WARN e94af85e-3796-4fab-91a1-f12add9c3020 request to replace downstairs 127.0.0.1:52905 with 127.0.0.1:56689
24980 Sep 22 23:14:29.889 INFO e94af85e-3796-4fab-91a1-f12add9c3020 found new target: 127.0.0.1:56689 at 0
24981 Waiting for replacement to finish
24982 Sep 22 23:14:29.893 INFO Checking if live repair is needed
24983 Sep 22 23:14:29.893 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) LiveRepairReady Active Active ds_transition to LiveRepair
24984 Sep 22 23:14:29.893 INFO [0] Transition from LiveRepairReady to LiveRepair
24985 Sep 22 23:14:29.893 INFO Live Repair started
24986 Sep 22 23:14:29.893 WARN Live Repair main task begins., task: repair
24987 Sep 22 23:14:29.893 INFO Start Live Repair of extents 0 to 2, task: repair
24988 Sep 22 23:14:29.893 INFO Start extent 0 repair, task: repair
24989 Sep 22 23:14:29.893 DEBG RE:0 Repair extent begins
24990 Sep 22 23:14:29.893 DEBG Create new job ids for 0, : downstairs
24991 Sep 22 23:14:29.893 INFO RE:0 repair extent with ids 1002,1003,1004,1005 deps:[]
24992 Sep 22 23:14:29.893 DEBG Enqueue repair job 1005, : downstairs
24993 Sep 22 23:14:29.893 DEBG Enqueue repair job 1002, : downstairs
24994 Sep 22 23:14:29.893 INFO RE:0 close id:1002 queued, notify DS
24995 Sep 22 23:14:29.893 INFO RE:0 Wait for result from close command 1002:3
24996 Sep 22 23:14:29.893 DEBG [0] 1002 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
24997 Sep 22 23:14:29.893 DEBG [0] 1002 Remove check < min repaired:1002 from deps:[], : downstairs
24998 Sep 22 23:14:29.893 INFO [0] 1002 final dependency list [], : downstairs
24999 Sep 22 23:14:29.894 DEBG [0] 1005 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
25000 Sep 22 23:14:29.894 DEBG [0] 1005 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003), JobId(1004)], : downstairs
25001 Sep 22 23:14:29.894 INFO [0] 1005 final dependency list [JobId(1002), JobId(1003), JobId(1004)], : downstairs
25002 Sep 22 23:14:29.894 DEBG Flush just extent 0 with f:2 and g:1
25003 Sep 22 23:14:29.895 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
25004 Sep 22 23:14:29.895 DEBG Flush just extent 0 with f:2 and g:1
25005 Sep 22 23:14:29.896 DEBG FlushClose:1002 extent 0 deps:[] res:true f:2 g:1
25006 Sep 22 23:14:29.897 DEBG JustClose :1002 extent 0 deps:[] res:true
25007 Sep 22 23:14:29.897 DEBG [1] ELC got g:1 f:1 d:false
25008 Sep 22 23:14:29.897 DEBG [2] ELC got g:1 f:1 d:false
25009 Sep 22 23:14:29.897 DEBG [0] ELC got g:0 f:0 d:false
25010 Sep 22 23:14:29.897 DEBG [0] ExtentFlushClose 1002 AckReady, : downstairs
25011 Sep 22 23:14:29.897 DEBG up_ds_listen was notified
25012 Sep 22 23:14:29.897 DEBG up_ds_listen process 1002
25013 Sep 22 23:14:29.897 DEBG [A] ack job 1002:3, : downstairs
25014 Sep 22 23:14:29.897 DEBG up_ds_listen checked 1 jobs, back to waiting
25015 Sep 22 23:14:29.897 DEBG Extent 0 id:1002 Done
25016 Sep 22 23:14:29.897 DEBG Get repair info for 2 source, : downstairs
25017 Sep 22 23:14:29.897 DEBG Get repair info for 0 bad, : downstairs
25018 Sep 22 23:14:29.898 INFO Repair for extent 0 s:2 d:[ClientId(0)], : downstairs
25019 Sep 22 23:14:29.898 DEBG Enqueue repair job 1003, : downstairs
25020 Sep 22 23:14:29.898 INFO RE:0 Wait for result from repair command 1003:4
25021 Sep 22 23:14:29.898 DEBG [0] 1003 Remove check skipped:{JobId(1001)} from deps:[JobId(1002)], : downstairs
25022 Sep 22 23:14:29.898 DEBG [0] 1003 Remove check < min repaired:1002 from deps:[JobId(1002)], : downstairs
25023 Sep 22 23:14:29.898 INFO [0] 1003 final dependency list [JobId(1002)], : downstairs
25024 Sep 22 23:14:29.898 DEBG Received NoOP 1003
25025 Sep 22 23:14:29.898 DEBG Received NoOP 1003
25026 Sep 22 23:14:29.898 DEBG Received ExtentLiveRepair 1003
25027 Sep 22 23:14:29.898 DEBG Work of: LiveNoOp 1003
25028 Sep 22 23:14:29.898 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
25029 Sep 22 23:14:29.898 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25030 Sep 22 23:14:29.898 DEBG Work of: LiveNoOp 1003
25031 Sep 22 23:14:29.898 DEBG LiveNoOp :1003 deps:[JobId(1002)] res:true
25032 Sep 22 23:14:29.898 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25033 Sep 22 23:14:29.898 DEBG ExtentLiveRepair: extent 0 sra:127.0.0.1:54607
25034 Sep 22 23:14:29.898 INFO Created copy dir "/tmp/downstairs-08law0EE/00/000/000.copy"
25035 Sep 22 23:14:29.928 INFO Downstairs has completed Negotiation, task: proc
25036 Sep 22 23:14:29.930 INFO UUID: 2b517ebe-5910-495c-8a3d-f94697b298ea
25037 Sep 22 23:14:29.930 INFO Blocks per extent:512 Total Extents: 188
25038 Sep 22 23:14:29.930 INFO Downstairs has completed Negotiation, task: proc
25039 Sep 22 23:14:29.930 INFO Crucible Version: Crucible Version: 0.0.1
25040 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25041 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25042 rustc: 1.70.0 stable x86_64-unknown-illumos
25043 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25044 Sep 22 23:14:29.930 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25045 Sep 22 23:14:29.930 INFO Using address: 127.0.0.1:50214, task: main
25046 Sep 22 23:14:29.931 INFO Repair listens on 127.0.0.1:0, task: repair
25047 Sep 22 23:14:29.931 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55632, task: repair
25048 Sep 22 23:14:29.931 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55632, task: repair
25049 Sep 22 23:14:29.931 INFO listening, local_addr: 127.0.0.1:55632, task: repair
25050 Sep 22 23:14:29.931 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55632, task: repair
25051 Sep 22 23:14:29.932 INFO Using repair address: 127.0.0.1:55632, task: main
25052 Sep 22 23:14:29.932 INFO No SSL acceptor configured, task: main
25053 Sep 22 23:14:29.932 INFO Downstairs has completed Negotiation, task: proc
25054 Sep 22 23:14:29.933 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
25055 Sep 22 23:14:29.933 INFO [0] Transition from WaitActive to WaitQuorum
25056 Sep 22 23:14:29.933 WARN [0] new RM replaced this: None
25057 Sep 22 23:14:29.933 INFO [0] Starts reconcile loop
25058 Sep 22 23:14:29.933 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
25059 Sep 22 23:14:29.933 INFO [1] Transition from WaitActive to WaitQuorum
25060 Sep 22 23:14:29.933 WARN [1] new RM replaced this: None
25061 Sep 22 23:14:29.933 INFO [1] Starts reconcile loop
25062 Sep 22 23:14:29.933 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
25063 Sep 22 23:14:29.933 INFO [2] Transition from WaitActive to WaitQuorum
25064 Sep 22 23:14:29.933 WARN [2] new RM replaced this: None
25065 Sep 22 23:14:29.934 INFO [2] Starts reconcile loop
25066 Sep 22 23:14:29.934 INFO [0] 127.0.0.1:42762 task reports connection:true
25067 Sep 22 23:14:29.934 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitQuorum WaitQuorum WaitQuorum
25068 Sep 22 23:14:29.934 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25069 Sep 22 23:14:29.934 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25070 Sep 22 23:14:29.934 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25071 Sep 22 23:14:29.934 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25072 Sep 22 23:14:29.934 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25073 Sep 22 23:14:29.934 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25074 Sep 22 23:14:29.934 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25075 Sep 22 23:14:29.934 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25076 Sep 22 23:14:29.934 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25077 Sep 22 23:14:29.934 INFO Max found gen is 1
25078 Sep 22 23:14:29.934 INFO Generation requested: 1 >= found:1
25079 Sep 22 23:14:29.934 INFO Next flush: 1
25080 Sep 22 23:14:29.934 INFO All extents match
25081 Sep 22 23:14:29.934 INFO No downstairs repair required
25082 Sep 22 23:14:29.934 INFO No initial repair work was required
25083 Sep 22 23:14:29.934 INFO Set Downstairs and Upstairs active
25084 Sep 22 23:14:29.934 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 is now active with session: f334c95e-b851-4a8a-a731-3fb69e42e934
25085 Sep 22 23:14:29.934 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Set Active after no repair
25086 Sep 22 23:14:29.934 INFO Notify all downstairs, region set compare is done.
25087 Sep 22 23:14:29.934 INFO Set check for repair
25088 Sep 22 23:14:29.934 INFO [1] 127.0.0.1:64149 task reports connection:true
25089 Sep 22 23:14:29.934 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Active Active Active
25090 Sep 22 23:14:29.934 INFO Set check for repair
25091 Sep 22 23:14:29.934 INFO [2] 127.0.0.1:58182 task reports connection:true
25092 Sep 22 23:14:29.934 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Active Active Active
25093 Sep 22 23:14:29.934 INFO Set check for repair
25094 Sep 22 23:14:29.934 INFO [0] received reconcile message
25095 Sep 22 23:14:29.934 INFO [0] All repairs completed, exit
25096 Sep 22 23:14:29.935 INFO [0] Starts cmd_loop
25097 Sep 22 23:14:29.935 INFO [1] received reconcile message
25098 Sep 22 23:14:29.935 INFO [1] All repairs completed, exit
25099 Sep 22 23:14:29.935 INFO [1] Starts cmd_loop
25100 Sep 22 23:14:29.935 INFO [2] received reconcile message
25101 Sep 22 23:14:29.935 INFO [2] All repairs completed, exit
25102 Sep 22 23:14:29.935 INFO [2] Starts cmd_loop
25103 The guest has finished waiting for activation
25104 Sep 22 23:14:29.972 INFO current number of open files limit 65536 is already the maximum
25105 Sep 22 23:14:29.973 INFO Created new region file "/tmp/downstairs-1HhZJsN0/region.json"
25106 Sep 22 23:14:29.987 INFO accepted connection, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25107 Sep 22 23:14:29.987 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: ec26dd66-293b-4bdf-9cf0-321bcfe2059a, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25108 Sep 22 23:14:29.987 INFO request completed, latency_us: 243, response_code: 200, uri: /extent/0/files, method: GET, req_id: ec26dd66-293b-4bdf-9cf0-321bcfe2059a, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25109 Sep 22 23:14:29.988 INFO eid:0 Found repair files: ["000", "000.db"]
25110 Sep 22 23:14:29.988 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: ba561005-233a-402f-8ab5-b4db21257bd6, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25111 Sep 22 23:14:29.988 INFO request completed, latency_us: 443, response_code: 200, uri: /newextent/0/data, method: GET, req_id: ba561005-233a-402f-8ab5-b4db21257bd6, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25112 Sep 22 23:14:29.989 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: c37d79fe-c802-4dc0-ae49-a16967640313, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25113 Sep 22 23:14:29.990 INFO request completed, latency_us: 367, response_code: 200, uri: /newextent/0/db, method: GET, req_id: c37d79fe-c802-4dc0-ae49-a16967640313, remote_addr: 127.0.0.1:40413, local_addr: 127.0.0.1:54607, task: repair
25114 Sep 22 23:14:29.991 INFO Repair files downloaded, move directory "/tmp/downstairs-08law0EE/00/000/000.copy" to "/tmp/downstairs-08law0EE/00/000/000.replace"
25115 Sep 22 23:14:29.991 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25116 Sep 22 23:14:29.992 INFO Copy files from "/tmp/downstairs-08law0EE/00/000/000.replace" in "/tmp/downstairs-08law0EE/00/000"
25117 Sep 22 23:14:29.992 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000/000"
25118 Sep 22 23:14:29.992 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000/000.db"
25119 Sep 22 23:14:29.992 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25120 Sep 22 23:14:29.992 INFO Move directory "/tmp/downstairs-08law0EE/00/000/000.replace" to "/tmp/downstairs-08law0EE/00/000/000.completed"
25121 Sep 22 23:14:29.992 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25122 Sep 22 23:14:29.992 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25123 Sep 22 23:14:29.992 DEBG LiveRepair:1003 extent 0 deps:[JobId(1002)] res:true
25124 Sep 22 23:14:29.992 WARN 1005 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25125 Sep 22 23:14:29.993 DEBG [0] ExtentLiveRepair AckReady 1003, : downstairs
25126 Sep 22 23:14:29.993 DEBG up_ds_listen was notified
25127 Sep 22 23:14:29.993 DEBG up_ds_listen process 1003
25128 Sep 22 23:14:29.993 DEBG [A] ack job 1003:4, : downstairs
25129 Sep 22 23:14:29.993 DEBG up_ds_listen checked 1 jobs, back to waiting
25130 Sep 22 23:14:29.993 DEBG Extent 0 id:1003 Done
25131 Sep 22 23:14:29.993 DEBG Enqueue repair job 1004, : downstairs
25132 Sep 22 23:14:29.993 INFO RE:0 Wait for result from NoOp command 1004:5
25133 Sep 22 23:14:29.993 DEBG [0] 1004 Remove check skipped:{JobId(1001)} from deps:[JobId(1002), JobId(1003)], : downstairs
25134 Sep 22 23:14:29.993 DEBG [0] 1004 Remove check < min repaired:1002 from deps:[JobId(1002), JobId(1003)], : downstairs
25135 Sep 22 23:14:29.993 INFO [0] 1004 final dependency list [JobId(1002), JobId(1003)], : downstairs
25136 Sep 22 23:14:29.993 DEBG Received NoOP 1004
25137 Sep 22 23:14:29.993 DEBG Received NoOP 1004
25138 Sep 22 23:14:29.994 DEBG Received NoOP 1004
25139 Sep 22 23:14:29.994 DEBG Work of: LiveNoOp 1004
25140 Sep 22 23:14:29.994 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25141 Sep 22 23:14:29.995 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25142 Sep 22 23:14:29.995 DEBG Work of: LiveNoOp 1004
25143 Sep 22 23:14:29.995 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25144 Sep 22 23:14:29.997 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25145 Sep 22 23:14:29.997 DEBG Work of: LiveNoOp 1004
25146 Sep 22 23:14:29.997 DEBG LiveNoOp :1004 deps:[JobId(1002), JobId(1003)] res:true
25147 Sep 22 23:14:30.001 DEBG LiveReopen:1005 extent 0 deps:[JobId(1002), JobId(1003), JobId(1004)] res:true
25148 Sep 22 23:14:30.002 DEBG [0] ExtentLiveNoOp AckReady 1004, : downstairs
25149 Sep 22 23:14:30.002 DEBG [0] ExtentLiveReopen AckReady 1005, : downstairs
25150 Sep 22 23:14:30.002 DEBG up_ds_listen was notified
25151 Sep 22 23:14:30.002 DEBG up_ds_listen process 1004
25152 Sep 22 23:14:30.002 DEBG [A] ack job 1004:5, : downstairs
25153 Sep 22 23:14:30.002 DEBG up_ds_listen process 1005
25154 Sep 22 23:14:30.002 DEBG [A] ack job 1005:6, : downstairs
25155 Sep 22 23:14:30.002 DEBG up_ds_listen checked 2 jobs, back to waiting
25156 Sep 22 23:14:30.002 DEBG up_ds_listen was notified
25157 Sep 22 23:14:30.002 DEBG up_ds_listen checked 0 jobs, back to waiting
25158 Sep 22 23:14:30.002 DEBG Extent 0 id:1004 Done
25159 Sep 22 23:14:30.002 INFO RE:0 Wait for result from reopen command 1005:6
25160 Sep 22 23:14:30.002 DEBG Extent 0 id:1005 Done
25161 Sep 22 23:14:30.002 INFO Start extent 1 repair, task: repair
25162 Sep 22 23:14:30.002 DEBG RE:1 Repair extent begins
25163 Sep 22 23:14:30.002 DEBG Create new job ids for 1, : downstairs
25164 Sep 22 23:14:30.002 INFO RE:1 repair extent with ids 1006,1007,1008,1009 deps:[]
25165 Sep 22 23:14:30.002 DEBG Enqueue repair job 1009, : downstairs
25166 Sep 22 23:14:30.002 DEBG Enqueue repair job 1006, : downstairs
25167 Sep 22 23:14:30.002 INFO RE:1 close id:1006 queued, notify DS
25168 Sep 22 23:14:30.002 INFO RE:1 Wait for result from close command 1006:7
25169 Sep 22 23:14:30.002 DEBG [0] 1006 Remove check skipped:{JobId(1001)} from deps:[], : downstairs
25170 Sep 22 23:14:30.003 DEBG [0] 1006 Remove check < min repaired:1002 from deps:[], : downstairs
25171 Sep 22 23:14:30.003 INFO [0] 1006 final dependency list [], : downstairs
25172 Sep 22 23:14:30.003 DEBG [0] 1009 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
25173 Sep 22 23:14:30.003 DEBG [0] 1009 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007), JobId(1008)], : downstairs
25174 Sep 22 23:14:30.003 INFO [0] 1009 final dependency list [JobId(1006), JobId(1007), JobId(1008)], : downstairs
25175 Sep 22 23:14:30.003 DEBG Flush just extent 1 with f:3 and g:1
25176 Sep 22 23:14:30.004 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
25177 Sep 22 23:14:30.004 DEBG Flush just extent 1 with f:3 and g:1
25178 Sep 22 23:14:30.006 DEBG FlushClose:1006 extent 1 deps:[] res:true f:3 g:1
25179 Sep 22 23:14:30.007 DEBG JustClose :1006 extent 1 deps:[] res:true
25180 Sep 22 23:14:30.007 DEBG [1] ELC got g:1 f:1 d:false
25181 Sep 22 23:14:30.007 DEBG [2] ELC got g:1 f:1 d:false
25182 Sep 22 23:14:30.007 DEBG [0] ELC got g:0 f:0 d:false
25183 Sep 22 23:14:30.007 DEBG [0] ExtentFlushClose 1006 AckReady, : downstairs
25184 Sep 22 23:14:30.007 DEBG up_ds_listen was notified
25185 Sep 22 23:14:30.007 DEBG up_ds_listen process 1006
25186 Sep 22 23:14:30.007 DEBG [A] ack job 1006:7, : downstairs
25187 Sep 22 23:14:30.007 DEBG up_ds_listen checked 1 jobs, back to waiting
25188 Sep 22 23:14:30.007 DEBG Extent 1 id:1006 Done
25189 Sep 22 23:14:30.007 DEBG Get repair info for 2 source, : downstairs
25190 Sep 22 23:14:30.007 DEBG Get repair info for 0 bad, : downstairs
25191 Sep 22 23:14:30.007 INFO Repair for extent 1 s:2 d:[ClientId(0)], : downstairs
25192 Sep 22 23:14:30.007 DEBG Enqueue repair job 1007, : downstairs
25193 Sep 22 23:14:30.007 INFO RE:1 Wait for result from repair command 1007:8
25194 Sep 22 23:14:30.008 DEBG [0] 1007 Remove check skipped:{JobId(1001)} from deps:[JobId(1006)], : downstairs
25195 Sep 22 23:14:30.008 DEBG [0] 1007 Remove check < min repaired:1002 from deps:[JobId(1006)], : downstairs
25196 Sep 22 23:14:30.008 INFO [0] 1007 final dependency list [JobId(1006)], : downstairs
25197 Sep 22 23:14:30.008 DEBG Received NoOP 1007
25198 Sep 22 23:14:30.008 DEBG Received NoOP 1007
25199 Sep 22 23:14:30.008 DEBG Received ExtentLiveRepair 1007
25200 Sep 22 23:14:30.008 DEBG Work of: LiveNoOp 1007
25201 Sep 22 23:14:30.008 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
25202 Sep 22 23:14:30.008 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25203 Sep 22 23:14:30.008 DEBG Work of: LiveNoOp 1007
25204 Sep 22 23:14:30.008 DEBG LiveNoOp :1007 deps:[JobId(1006)] res:true
25205 Sep 22 23:14:30.008 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25206 Sep 22 23:14:30.008 DEBG ExtentLiveRepair: extent 1 sra:127.0.0.1:54607
25207 Sep 22 23:14:30.009 INFO Created copy dir "/tmp/downstairs-08law0EE/00/000/001.copy"
25208 Sep 22 23:14:30.094 INFO listening on 127.0.0.1:0, task: main
25209 Sep 22 23:14:30.094 INFO current number of open files limit 65536 is already the maximum
25210 Sep 22 23:14:30.094 INFO Opened existing region file "/tmp/downstairs-fjJjawz1/region.json"
25211 Sep 22 23:14:30.094 INFO Database read version 1
25212 Sep 22 23:14:30.094 INFO Database write version 1
25213 Sep 22 23:14:30.120 INFO accepted connection, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25214 Sep 22 23:14:30.120 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: c0d11ec1-ae70-4353-a172-0ff1da32ca7e, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25215 Sep 22 23:14:30.121 INFO request completed, latency_us: 318, response_code: 200, uri: /extent/1/files, method: GET, req_id: c0d11ec1-ae70-4353-a172-0ff1da32ca7e, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25216 Sep 22 23:14:30.121 INFO eid:1 Found repair files: ["001", "001.db"]
25217 Sep 22 23:14:30.121 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: 99f42dac-fa1b-4fdb-8823-813c7b7cd69f, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25218 Sep 22 23:14:30.122 INFO request completed, latency_us: 459, response_code: 200, uri: /newextent/1/data, method: GET, req_id: 99f42dac-fa1b-4fdb-8823-813c7b7cd69f, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25219 Sep 22 23:14:30.123 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: de091ddb-886a-40f5-8b4f-79405fac48ac, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25220 Sep 22 23:14:30.123 INFO request completed, latency_us: 381, response_code: 200, uri: /newextent/1/db, method: GET, req_id: de091ddb-886a-40f5-8b4f-79405fac48ac, remote_addr: 127.0.0.1:65506, local_addr: 127.0.0.1:54607, task: repair
25221 Sep 22 23:14:30.124 INFO Repair files downloaded, move directory "/tmp/downstairs-08law0EE/00/000/001.copy" to "/tmp/downstairs-08law0EE/00/000/001.replace"
25222 Sep 22 23:14:30.124 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25223 Sep 22 23:14:30.125 INFO UUID: 65afeef9-2fd5-4a00-9b98-8d6c60e46268
25224 Sep 22 23:14:30.125 INFO Blocks per extent:512 Total Extents: 188
25225 Sep 22 23:14:30.125 INFO Crucible Version: Crucible Version: 0.0.1
25226 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25227 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25228 rustc: 1.70.0 stable x86_64-unknown-illumos
25229 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25230 Sep 22 23:14:30.125 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25231 Sep 22 23:14:30.125 INFO Using address: 127.0.0.1:62859, task: main
25232 Sep 22 23:14:30.125 INFO Copy files from "/tmp/downstairs-08law0EE/00/000/001.replace" in "/tmp/downstairs-08law0EE/00/000"
25233 Sep 22 23:14:30.125 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000/001"
25234 Sep 22 23:14:30.126 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000/001.db"
25235 Sep 22 23:14:30.126 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25236 Sep 22 23:14:30.126 INFO Move directory "/tmp/downstairs-08law0EE/00/000/001.replace" to "/tmp/downstairs-08law0EE/00/000/001.completed"
25237 Sep 22 23:14:30.126 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25238 Sep 22 23:14:30.126 DEBG fsync completed for: "/tmp/downstairs-08law0EE/00/000"
25239 Sep 22 23:14:30.126 DEBG LiveRepair:1007 extent 1 deps:[JobId(1006)] res:true
25240 Sep 22 23:14:30.126 INFO Repair listens on 127.0.0.1:0, task: repair
25241 Sep 22 23:14:30.126 WARN 1009 job ELiveReopen for connection UpstairsConnection { upstairs_id: e94af85e-3796-4fab-91a1-f12add9c3020, session_id: a86ae319-11ad-4b2d-a80b-330efc92ebef, gen: 1 } waiting on 1 deps, role: work
25242 Sep 22 23:14:30.126 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47266, task: repair
25243 Sep 22 23:14:30.126 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47266, task: repair
25244 Sep 22 23:14:30.126 INFO listening, local_addr: 127.0.0.1:47266, task: repair
25245 Sep 22 23:14:30.126 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47266, task: repair
25246 Sep 22 23:14:30.126 DEBG [0] ExtentLiveRepair AckReady 1007, : downstairs
25247 Sep 22 23:14:30.126 INFO Using repair address: 127.0.0.1:47266, task: main
25248 Sep 22 23:14:30.126 INFO No SSL acceptor configured, task: main
25249 Sep 22 23:14:30.126 DEBG up_ds_listen was notified
25250 Sep 22 23:14:30.126 DEBG up_ds_listen process 1007
25251 Sep 22 23:14:30.126 DEBG [A] ack job 1007:8, : downstairs
25252 Sep 22 23:14:30.126 DEBG up_ds_listen checked 1 jobs, back to waiting
25253 Sep 22 23:14:30.126 DEBG Extent 1 id:1007 Done
25254 Sep 22 23:14:30.127 DEBG Enqueue repair job 1008, : downstairs
25255 Sep 22 23:14:30.127 INFO RE:1 Wait for result from NoOp command 1008:9
25256 Sep 22 23:14:30.127 DEBG [0] 1008 Remove check skipped:{JobId(1001)} from deps:[JobId(1006), JobId(1007)], : downstairs
25257 Sep 22 23:14:30.127 DEBG [0] 1008 Remove check < min repaired:1002 from deps:[JobId(1006), JobId(1007)], : downstairs
25258 Sep 22 23:14:30.127 INFO [0] 1008 final dependency list [JobId(1006), JobId(1007)], : downstairs
25259 Sep 22 23:14:30.127 DEBG Received NoOP 1008
25260 Sep 22 23:14:30.127 DEBG Received NoOP 1008
25261 Sep 22 23:14:30.127 DEBG Received NoOP 1008
25262 Sep 22 23:14:30.127 DEBG Work of: LiveNoOp 1008
25263 Sep 22 23:14:30.127 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25264 Sep 22 23:14:30.129 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25265 Sep 22 23:14:30.129 DEBG Work of: LiveNoOp 1008
25266 Sep 22 23:14:30.129 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25267 Sep 22 23:14:30.131 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25268 Sep 22 23:14:30.131 DEBG Work of: LiveNoOp 1008
25269 Sep 22 23:14:30.131 DEBG LiveNoOp :1008 deps:[JobId(1006), JobId(1007)] res:true
25270 Sep 22 23:14:30.133 DEBG LiveReopen:1009 extent 1 deps:[JobId(1006), JobId(1007), JobId(1008)] res:true
25271 Sep 22 23:14:30.133 DEBG [0] ExtentLiveNoOp AckReady 1008, : downstairs
25272 Sep 22 23:14:30.134 DEBG [0] ExtentLiveReopen AckReady 1009, : downstairs
25273 Sep 22 23:14:30.134 DEBG up_ds_listen was notified
25274 Sep 22 23:14:30.134 DEBG up_ds_listen process 1008
25275 Sep 22 23:14:30.134 DEBG [A] ack job 1008:9, : downstairs
25276 Sep 22 23:14:30.134 DEBG up_ds_listen process 1009
25277 Sep 22 23:14:30.134 DEBG [A] ack job 1009:10, : downstairs
25278 Sep 22 23:14:30.134 DEBG up_ds_listen checked 2 jobs, back to waiting
25279 Sep 22 23:14:30.134 DEBG up_ds_listen was notified
25280 Sep 22 23:14:30.134 DEBG up_ds_listen checked 0 jobs, back to waiting
25281 Sep 22 23:14:30.134 DEBG Extent 1 id:1008 Done
25282 Sep 22 23:14:30.134 INFO RE:1 Wait for result from reopen command 1009:10
25283 Sep 22 23:14:30.134 DEBG Extent 1 id:1009 Done
25284 Sep 22 23:14:30.134 DEBG IO Flush 1010 has deps [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)]
25285 Sep 22 23:14:30.134 INFO LiveRepair final flush submitted
25286 Sep 22 23:14:30.134 DEBG [0] 1010 Remove check skipped:{JobId(1001)} from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25287 Sep 22 23:14:30.134 DEBG [0] 1010 Remove check < min repaired:1002 from deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25288 Sep 22 23:14:30.134 INFO [0] 1010 final dependency list [JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)], : downstairs
25289 Sep 22 23:14:30.135 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25290 Sep 22 23:14:30.135 DEBG Flush :1010 extent_limit None deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25291 Sep 22 23:14:30.135 DEBG Flush :1010 extent_limit Some(1) deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002)] res:true f:4 g:1
25292 Sep 22 23:14:30.135 DEBG up_ds_listen was notified
25293 Sep 22 23:14:30.135 DEBG up_ds_listen process 1010
25294 Sep 22 23:14:30.135 DEBG [A] ack job 1010:11, : downstairs
25295 Sep 22 23:14:30.135 DEBG [rc] retire 1010 clears [JobId(1002), JobId(1003), JobId(1004), JobId(1005), JobId(1006), JobId(1007), JobId(1008), JobId(1009), JobId(1010)], : downstairs
25296 Sep 22 23:14:30.135 DEBG up_ds_listen checked 1 jobs, back to waiting
25297 Sep 22 23:14:30.135 INFO LiveRepair final flush completed
25298 Sep 22 23:14:30.135 INFO [0] e94af85e-3796-4fab-91a1-f12add9c3020 (a86ae319-11ad-4b2d-a80b-330efc92ebef) LiveRepair Active Active ds_transition to Active
25299 Sep 22 23:14:30.135 INFO [0] Transition from LiveRepair to Active
25300 Sep 22 23:14:30.135 WARN Live Repair returns Ok(())
25301 Sep 22 23:14:30.158 INFO UUID: 6e66ae97-3dfc-40c6-a1d0-7d1c5297fdbf
25302 Sep 22 23:14:30.158 INFO Blocks per extent:512 Total Extents: 188
25303 Sep 22 23:14:30.159 INFO Crucible Version: Crucible Version: 0.0.1
25304 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25305 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25306 rustc: 1.70.0 stable x86_64-unknown-illumos
25307 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25308 Sep 22 23:14:30.159 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25309 Sep 22 23:14:30.159 INFO Using address: 127.0.0.1:44532, task: main
25310 Sep 22 23:14:30.159 INFO Repair listens on 127.0.0.1:0, task: repair
25311 Sep 22 23:14:30.159 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35145, task: repair
25312 Sep 22 23:14:30.159 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35145, task: repair
25313 Sep 22 23:14:30.159 INFO listening, local_addr: 127.0.0.1:35145, task: repair
25314 Sep 22 23:14:30.159 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35145, task: repair
25315 Sep 22 23:14:30.159 INFO Using repair address: 127.0.0.1:35145, task: main
25316 Sep 22 23:14:30.159 INFO No SSL acceptor configured, task: main
25317 note: configured to log to "/dev/stdout"
253182023-09-22T23:14:30.173ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:45536
253192023-09-22T23:14:30.173ZINFOcrucible-pantry: listen IP: 127.0.0.1:45536
25320 note: configured to log to "/dev/stdout"
253212023-09-22T23:14:30.196ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:34477
253222023-09-22T23:14:30.196ZINFOcrucible-pantry: listen IP: 127.0.0.1:34477
25323 Sep 22 23:14:30.213 INFO listening on 127.0.0.1:0, task: main
25324 Sep 22 23:14:30.213 INFO current number of open files limit 65536 is already the maximum
25325 Sep 22 23:14:30.213 INFO Opened existing region file "/tmp/downstairs-1HhZJsN0/region.json"
25326 Sep 22 23:14:30.213 INFO Database read version 1
25327 Sep 22 23:14:30.213 INFO Database write version 1
25328 Sep 22 23:14:30.256 INFO UUID: 9da41dd6-4cdf-4b03-8df1-d13ee59384a3
25329 Sep 22 23:14:30.256 INFO Blocks per extent:512 Total Extents: 188
25330 Sep 22 23:14:30.256 INFO Crucible Version: Crucible Version: 0.0.1
25331 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25332 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25333 rustc: 1.70.0 stable x86_64-unknown-illumos
25334 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25335 Sep 22 23:14:30.257 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25336 Sep 22 23:14:30.257 INFO Using address: 127.0.0.1:51108, task: main
25337 Sep 22 23:14:30.257 INFO Repair listens on 127.0.0.1:0, task: repair
25338 Sep 22 23:14:30.257 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55881, task: repair
25339 Sep 22 23:14:30.257 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55881, task: repair
25340 Sep 22 23:14:30.257 INFO listening, local_addr: 127.0.0.1:55881, task: repair
25341 Sep 22 23:14:30.257 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55881, task: repair
25342 Sep 22 23:14:30.257 INFO Using repair address: 127.0.0.1:55881, task: main
25343 Sep 22 23:14:30.257 INFO No SSL acceptor configured, task: main
25344 Sep 22 23:14:30.267 INFO listening on 127.0.0.1:0, task: main
253452023-09-22T23:14:30.267ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:45536 remote_addr = 127.0.0.1:41882
253462023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): no entry exists for volume a9fc305b-73b1-411e-9d00-c6edbf74297f, constructing...
253472023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): Upstairs starts
253482023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
253492023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
253502023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92
253512023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): Crucible d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 has session id: 86116c5e-f3b4-46a4-818a-c22a616d06ea
253522023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:62776 looper = 0
253532023-09-22T23:14:30.268ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:60728 looper = 1
253542023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:62859 looper = 2
253552023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
253562023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
253572023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
253582023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): volume a9fc305b-73b1-411e-9d00-c6edbf74297f constructed ok
25359 The guest has requested activation
253602023-09-22T23:14:30.269ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 active request set
25361 Sep 22 23:14:30.270 INFO accepted connection from 127.0.0.1:56292, task: main
253622023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [0] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 looper connected looper = 0
253632023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:62776 in state New
253642023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [1] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 looper connected looper = 1
253652023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:60728 in state New
253662023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [2] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 looper connected looper = 2
253672023-09-22T23:14:30.270ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:62859 in state New
25368 Sep 22 23:14:30.270 INFO accepted connection from 127.0.0.1:61683, task: main
25369 Sep 22 23:14:30.270 INFO accepted connection from 127.0.0.1:52790, task: main
25370 Sep 22 23:14:30.270 INFO Connection request from d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 with version 4, task: proc
25371 Sep 22 23:14:30.270 INFO upstairs UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } connected, version 4, task: proc
25372 Sep 22 23:14:30.270 INFO Connection request from d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 with version 4, task: proc
25373 Sep 22 23:14:30.270 INFO upstairs UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } connected, version 4, task: proc
25374 Sep 22 23:14:30.271 INFO Connection request from d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 with version 4, task: proc
25375 Sep 22 23:14:30.271 INFO upstairs UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } connected, version 4, task: proc
253762023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [0] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) New New New ds_transition to WaitActive
253772023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
253782023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef
253792023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [1] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) WaitActive New New ds_transition to WaitActive
253802023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
253812023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef
253822023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [2] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) WaitActive WaitActive New ds_transition to WaitActive
253832023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
253842023-09-22T23:14:30.271ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef
25385 Sep 22 23:14:30.272 INFO UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } is now active (read-write)
25386 Sep 22 23:14:30.272 INFO UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } is now active (read-write)
25387 Sep 22 23:14:30.272 INFO UpstairsConnection { upstairs_id: d37ce371-0ed6-44c3-bb8f-e1d703ba2c92, session_id: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef, gen: 1 } is now active (read-write)
25388 note: configured to log to "/dev/stdout"
253892023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:62776 has UUID 10109090-7e8f-489a-9e4a-65ce6167d00b
253902023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 10109090-7e8f-489a-9e4a-65ce6167d00b, encrypted: true, database_read_version: 1, database_write_version: 1 }
253912023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 WaitActive WaitActive WaitActive
253922023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:60728 has UUID 02d6cbbb-c0cd-42b6-b416-45900a2ec4e0
253932023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 02d6cbbb-c0cd-42b6-b416-45900a2ec4e0, encrypted: true, database_read_version: 1, database_write_version: 1 }
253942023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 WaitActive WaitActive WaitActive
253952023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:62859 has UUID 65afeef9-2fd5-4a00-9b98-8d6c60e46268
253962023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 65afeef9-2fd5-4a00-9b98-8d6c60e46268, encrypted: true, database_read_version: 1, database_write_version: 1 }
253972023-09-22T23:14:30.273ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 WaitActive WaitActive WaitActive
253982023-09-22T23:14:30.274ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:61933
253992023-09-22T23:14:30.274ZINFOcrucible-pantry: listen IP: 127.0.0.1:61933
25400 Sep 22 23:14:30.292 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25401 Sep 22 23:14:30.295 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25402 Sep 22 23:14:30.298 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25403 Sep 22 23:14:30.301 INFO UUID: ef580430-d0f3-4a91-9bfb-605bdf8f791b
25404 Sep 22 23:14:30.301 INFO Blocks per extent:512 Total Extents: 188
25405 Sep 22 23:14:30.301 INFO Crucible Version: Crucible Version: 0.0.1
25406 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
25407 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
25408 rustc: 1.70.0 stable x86_64-unknown-illumos
25409 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
25410 Sep 22 23:14:30.301 INFO Upstairs <-> Downstairs Message Version: 4, task: main
25411 Sep 22 23:14:30.301 INFO Using address: 127.0.0.1:47631, task: main
25412 Sep 22 23:14:30.302 INFO Repair listens on 127.0.0.1:0, task: repair
25413 Sep 22 23:14:30.302 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52204, task: repair
25414 Sep 22 23:14:30.302 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52204, task: repair
25415 Sep 22 23:14:30.302 INFO listening, local_addr: 127.0.0.1:52204, task: repair
25416 Sep 22 23:14:30.302 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52204, task: repair
25417 Sep 22 23:14:30.302 INFO Using repair address: 127.0.0.1:52204, task: main
25418 Sep 22 23:14:30.302 INFO No SSL acceptor configured, task: main
25419 note: configured to log to "/dev/stdout"
254202023-09-22T23:14:30.316ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:33566
254212023-09-22T23:14:30.316ZINFOcrucible-pantry: listen IP: 127.0.0.1:33566
25422 Sep 22 23:14:30.326 INFO Downstairs has completed Negotiation, task: proc
25423 Sep 22 23:14:30.328 INFO Downstairs has completed Negotiation, task: proc
25424 Sep 22 23:14:30.329 INFO Downstairs has completed Negotiation, task: proc
254252023-09-22T23:14:30.329ZINFOcrucible-pantry (datafile): [0] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
254262023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
254272023-09-22T23:14:30.330ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
254282023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
254292023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [1] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
254302023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
254312023-09-22T23:14:30.330ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
254322023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
254332023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [2] d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 (1e2cd9af-31e3-4c00-a3ff-558b5ef140ef) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
254342023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
254352023-09-22T23:14:30.330ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
254362023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
254372023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:62776 task reports connection:true
254382023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 WaitQuorum WaitQuorum WaitQuorum
254392023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254402023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254412023-09-22T23:14:30.330ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
254422023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254432023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254442023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25445 The guest has finished waiting for activation
254462023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254472023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
254482023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
254492023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Max found gen is 1
254502023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
254512023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Next flush: 1
254522023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): All extents match
254532023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): No downstairs repair required
254542023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): No initial repair work was required
254552023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
254562023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 is now active with session: 1e2cd9af-31e3-4c00-a3ff-558b5ef140ef
254572023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 Set Active after no repair
254582023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
254592023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Set check for repair
254602023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:60728 task reports connection:true
254612023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 Active Active Active
254622023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Set check for repair
254632023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:62859 task reports connection:true
254642023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 Active Active Active
254652023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): Set check for repair
254662023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [0] received reconcile message
254672023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
254682023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
254692023-09-22T23:14:30.331ZINFOcrucible-pantry (datafile): [1] received reconcile message
254702023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
254712023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
254722023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): [2] received reconcile message
254732023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
254742023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
254752023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): volume a9fc305b-73b1-411e-9d00-c6edbf74297f activated ok
254762023-09-22T23:14:30.332ZINFOcrucible-pantry (datafile): volume a9fc305b-73b1-411e-9d00-c6edbf74297f constructed and inserted ok
254772023-09-22T23:14:30.332ZINFOcrucible-pantry (dropshot): request completed latency_us = 63626 local_addr = 127.0.0.1:45536 method = POST remote_addr = 127.0.0.1:41882 req_id = 26f10781-7085-42d0-af89-960debed8e37 response_code = 200 uri = /crucible/pantry/0/volume/a9fc305b-73b1-411e-9d00-c6edbf74297f
25478 Sep 22 23:14:30.333 INFO listening on 127.0.0.1:0, task: main
254792023-09-22T23:14:30.333ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:34477 remote_addr = 127.0.0.1:51700
254802023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): no entry exists for volume cc43b43c-a066-4496-841d-0f340d8a3b1d, constructing...
254812023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Upstairs starts
254822023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
254832023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
254842023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 18b01c87-5587-4e4c-af37-0e404569a230
254852023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Crucible 18b01c87-5587-4e4c-af37-0e404569a230 has session id: 06e81f5d-ec2e-4b30-923b-a0b5fee55cdb
254862023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:62296 looper = 0
254872023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:62299 looper = 1
254882023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:44532 looper = 2
254892023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
254902023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
254912023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
254922023-09-22T23:14:30.334ZINFOcrucible-pantry (datafile): volume cc43b43c-a066-4496-841d-0f340d8a3b1d constructed ok
25493 The guest has requested activation
254942023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 active request set
254952023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected looper = 0
254962023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:62296 in state New
254972023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected looper = 1
254982023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:62299 in state New
25499 Sep 22 23:14:30.335 INFO accepted connection from 127.0.0.1:35197, task: main
255002023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected looper = 2
255012023-09-22T23:14:30.335ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:44532 in state New
25502 Sep 22 23:14:30.335 INFO accepted connection from 127.0.0.1:57231, task: main
25503 Sep 22 23:14:30.335 INFO accepted connection from 127.0.0.1:41210, task: main
25504 Sep 22 23:14:30.335 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25505 Sep 22 23:14:30.335 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } connected, version 4, task: proc
25506 Sep 22 23:14:30.335 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25507 Sep 22 23:14:30.335 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } connected, version 4, task: proc
25508 Sep 22 23:14:30.335 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25509 Sep 22 23:14:30.335 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } connected, version 4, task: proc
255102023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) New New New ds_transition to WaitActive
255112023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
255122023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 12531aa6-b93c-4bc2-9fe7-f954b135051a
255132023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) WaitActive New New ds_transition to WaitActive
255142023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
255152023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 12531aa6-b93c-4bc2-9fe7-f954b135051a
255162023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) WaitActive WaitActive New ds_transition to WaitActive
255172023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
255182023-09-22T23:14:30.336ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 12531aa6-b93c-4bc2-9fe7-f954b135051a
25519 Sep 22 23:14:30.336 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } is now active (read-write)
25520 Sep 22 23:14:30.336 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } is now active (read-write)
25521 Sep 22 23:14:30.336 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } is now active (read-write)
255222023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:62296 has UUID bec31c6c-c0ce-43db-8467-8d94b73527a2
255232023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bec31c6c-c0ce-43db-8467-8d94b73527a2, encrypted: true, database_read_version: 1, database_write_version: 1 }
255242023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
255252023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:62299 has UUID ede072e9-d3f2-4884-bb86-eea53898f9d9
255262023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: ede072e9-d3f2-4884-bb86-eea53898f9d9, encrypted: true, database_read_version: 1, database_write_version: 1 }
255272023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
255282023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:44532 has UUID 6e66ae97-3dfc-40c6-a1d0-7d1c5297fdbf
255292023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 6e66ae97-3dfc-40c6-a1d0-7d1c5297fdbf, encrypted: true, database_read_version: 1, database_write_version: 1 }
255302023-09-22T23:14:30.337ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
25531 Sep 22 23:14:30.349 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25532 Sep 22 23:14:30.351 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25533 Sep 22 23:14:30.353 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25534 Sep 22 23:14:30.371 INFO Downstairs has completed Negotiation, task: proc
25535 Sep 22 23:14:30.372 INFO Downstairs has completed Negotiation, task: proc
25536 Sep 22 23:14:30.373 INFO Downstairs has completed Negotiation, task: proc
255372023-09-22T23:14:30.373ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
255382023-09-22T23:14:30.373ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
255392023-09-22T23:14:30.373ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
255402023-09-22T23:14:30.373ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
255412023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
255422023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
255432023-09-22T23:14:30.374ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
255442023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
255452023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
255462023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
255472023-09-22T23:14:30.374ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
255482023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
255492023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:62296 task reports connection:true
255502023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 WaitQuorum WaitQuorum WaitQuorum
25551 The guest has finished waiting for activation
255522023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255532023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255542023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
255552023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255562023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255572023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
255582023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255592023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
255602023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
255612023-09-22T23:14:30.374ZINFOcrucible-pantry (datafile): Max found gen is 1
255622023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
255632023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Next flush: 1
255642023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): All extents match
255652023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): No downstairs repair required
255662023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): No initial repair work was required
255672023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
255682023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 is now active with session: 12531aa6-b93c-4bc2-9fe7-f954b135051a
255692023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Set Active after no repair
255702023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
255712023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Set check for repair
255722023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:62299 task reports connection:true
255732023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Active Active Active
255742023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Set check for repair
255752023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:44532 task reports connection:true
255762023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Active Active Active
255772023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): Set check for repair
255782023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [0] received reconcile message
255792023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
255802023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
255812023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [1] received reconcile message
255822023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
255832023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
255842023-09-22T23:14:30.375ZINFOcrucible-pantry (datafile): [2] received reconcile message
255852023-09-22T23:14:30.376ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
255862023-09-22T23:14:30.376ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
255872023-09-22T23:14:30.376ZINFOcrucible-pantry (datafile): volume cc43b43c-a066-4496-841d-0f340d8a3b1d activated ok
255882023-09-22T23:14:30.376ZINFOcrucible-pantry (datafile): volume cc43b43c-a066-4496-841d-0f340d8a3b1d constructed and inserted ok
255892023-09-22T23:14:30.376ZINFOcrucible-pantry (dropshot): request completed latency_us = 41099 local_addr = 127.0.0.1:34477 method = POST remote_addr = 127.0.0.1:51700 req_id = 7cf62e10-5608-45f3-a8e8-04d75ee2f23f response_code = 200 uri = /crucible/pantry/0/volume/cc43b43c-a066-4496-841d-0f340d8a3b1d
25590 Sep 22 23:14:30.441 INFO listening on 127.0.0.1:0, task: main
255912023-09-22T23:14:30.441ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:61933 remote_addr = 127.0.0.1:58570
255922023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): no entry exists for volume d6736968-99ba-4ebf-b4ff-15afb0904fed, constructing...
255932023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): Upstairs starts
255942023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
255952023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
255962023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 0d1229ab-bd36-49b6-95e1-d6287586b840
255972023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): Crucible 0d1229ab-bd36-49b6-95e1-d6287586b840 has session id: e49fc84a-8167-42c1-b25f-144be76533fa
255982023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:38920 looper = 0
255992023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:44435 looper = 1
256002023-09-22T23:14:30.442ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:51108 looper = 2
256012023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
256022023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
256032023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
256042023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): volume d6736968-99ba-4ebf-b4ff-15afb0904fed constructed ok
25605 The guest has requested activation
256062023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 active request set
256072023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 0
256082023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:38920 in state New
256092023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 1
256102023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:44435 in state New
256112023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 2
256122023-09-22T23:14:30.443ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:51108 in state New
25613 Sep 22 23:14:30.444 INFO accepted connection from 127.0.0.1:59183, task: main
25614 Sep 22 23:14:30.444 INFO accepted connection from 127.0.0.1:49569, task: main
25615 Sep 22 23:14:30.444 INFO accepted connection from 127.0.0.1:64659, task: main
25616 Sep 22 23:14:30.444 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
25617 Sep 22 23:14:30.444 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
25618 Sep 22 23:14:30.444 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
25619 Sep 22 23:14:30.444 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
25620 Sep 22 23:14:30.444 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
25621 Sep 22 23:14:30.444 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
256222023-09-22T23:14:30.444ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) New New New ds_transition to WaitActive
256232023-09-22T23:14:30.444ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
256242023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 16060f7a-9986-4db1-95c8-74c4346b949d
256252023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitActive New New ds_transition to WaitActive
256262023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
256272023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 16060f7a-9986-4db1-95c8-74c4346b949d
256282023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitActive WaitActive New ds_transition to WaitActive
256292023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
256302023-09-22T23:14:30.445ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 16060f7a-9986-4db1-95c8-74c4346b949d
25631 Sep 22 23:14:30.445 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } is now active (read-write)
25632 Sep 22 23:14:30.446 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } is now active (read-write)
25633 Sep 22 23:14:30.446 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } is now active (read-write)
256342023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:38920 has UUID b38dafac-2da7-4558-aae1-959c564193b2
256352023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b38dafac-2da7-4558-aae1-959c564193b2, encrypted: true, database_read_version: 1, database_write_version: 1 }
256362023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
256372023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:44435 has UUID 14399b3b-6e6e-4995-afe0-74c1441f0833
256382023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 14399b3b-6e6e-4995-afe0-74c1441f0833, encrypted: true, database_read_version: 1, database_write_version: 1 }
256392023-09-22T23:14:30.446ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
256402023-09-22T23:14:30.447ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:51108 has UUID 9da41dd6-4cdf-4b03-8df1-d13ee59384a3
256412023-09-22T23:14:30.447ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 9da41dd6-4cdf-4b03-8df1-d13ee59384a3, encrypted: true, database_read_version: 1, database_write_version: 1 }
256422023-09-22T23:14:30.447ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
25643 Sep 22 23:14:30.461 INFO listening on 127.0.0.1:0, task: main
256442023-09-22T23:14:30.461ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:33566 remote_addr = 127.0.0.1:49478
256452023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): no entry exists for volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f, constructing...
256462023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): Upstairs starts
256472023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
256482023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
256492023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 982ad9a6-37b1-4b89-8804-8ba0c1300cac
256502023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): Crucible 982ad9a6-37b1-4b89-8804-8ba0c1300cac has session id: 92eb453c-9169-4bc7-aded-578f614d6e24
256512023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:46850 looper = 0
256522023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:50214 looper = 1
256532023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:47631 looper = 2
256542023-09-22T23:14:30.462ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
256552023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
256562023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
256572023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f constructed ok
25658 The guest has requested activation
256592023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac active request set
256602023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [0] 982ad9a6-37b1-4b89-8804-8ba0c1300cac looper connected looper = 0
256612023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:46850 in state New
256622023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [1] 982ad9a6-37b1-4b89-8804-8ba0c1300cac looper connected looper = 1
256632023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:50214 in state New
256642023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [2] 982ad9a6-37b1-4b89-8804-8ba0c1300cac looper connected looper = 2
256652023-09-22T23:14:30.463ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:47631 in state New
25666 Sep 22 23:14:30.463 INFO accepted connection from 127.0.0.1:61789, task: main
25667 Sep 22 23:14:30.464 INFO accepted connection from 127.0.0.1:58381, task: main
25668 Sep 22 23:14:30.464 INFO accepted connection from 127.0.0.1:50053, task: main
25669 Sep 22 23:14:30.464 INFO Connection request from 982ad9a6-37b1-4b89-8804-8ba0c1300cac with version 4, task: proc
25670 Sep 22 23:14:30.464 INFO upstairs UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } connected, version 4, task: proc
25671 Sep 22 23:14:30.464 INFO Connection request from 982ad9a6-37b1-4b89-8804-8ba0c1300cac with version 4, task: proc
25672 Sep 22 23:14:30.464 INFO upstairs UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } connected, version 4, task: proc
25673 Sep 22 23:14:30.464 INFO Connection request from 982ad9a6-37b1-4b89-8804-8ba0c1300cac with version 4, task: proc
25674 Sep 22 23:14:30.464 INFO upstairs UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } connected, version 4, task: proc
256752023-09-22T23:14:30.464ZINFOcrucible-pantry (datafile): [0] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) New New New ds_transition to WaitActive
256762023-09-22T23:14:30.464ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
256772023-09-22T23:14:30.464ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 1dc57d2b-9138-4f1d-8c86-6d444ab88226
256782023-09-22T23:14:30.464ZINFOcrucible-pantry (datafile): [1] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) WaitActive New New ds_transition to WaitActive
256792023-09-22T23:14:30.465ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
256802023-09-22T23:14:30.465ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 1dc57d2b-9138-4f1d-8c86-6d444ab88226
256812023-09-22T23:14:30.465ZINFOcrucible-pantry (datafile): [2] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) WaitActive WaitActive New ds_transition to WaitActive
256822023-09-22T23:14:30.465ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
256832023-09-22T23:14:30.465ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 1dc57d2b-9138-4f1d-8c86-6d444ab88226
25684 Sep 22 23:14:30.465 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25685 Sep 22 23:14:30.465 INFO UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } is now active (read-write)
25686 Sep 22 23:14:30.465 INFO UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } is now active (read-write)
25687 Sep 22 23:14:30.466 INFO UpstairsConnection { upstairs_id: 982ad9a6-37b1-4b89-8804-8ba0c1300cac, session_id: 1dc57d2b-9138-4f1d-8c86-6d444ab88226, gen: 1 } is now active (read-write)
256882023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:46850 has UUID ed5caa6d-3818-4a41-afcf-7deb29720fa0
256892023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: ed5caa6d-3818-4a41-afcf-7deb29720fa0, encrypted: true, database_read_version: 1, database_write_version: 1 }
256902023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac WaitActive WaitActive WaitActive
256912023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:50214 has UUID 2b517ebe-5910-495c-8a3d-f94697b298ea
256922023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 2b517ebe-5910-495c-8a3d-f94697b298ea, encrypted: true, database_read_version: 1, database_write_version: 1 }
256932023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac WaitActive WaitActive WaitActive
256942023-09-22T23:14:30.466ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:47631 has UUID ef580430-d0f3-4a91-9bfb-605bdf8f791b
256952023-09-22T23:14:30.467ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: ef580430-d0f3-4a91-9bfb-605bdf8f791b, encrypted: true, database_read_version: 1, database_write_version: 1 }
256962023-09-22T23:14:30.467ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac WaitActive WaitActive WaitActive
25697 Sep 22 23:14:30.468 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25698 Sep 22 23:14:30.470 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25699 Sep 22 23:14:30.485 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25700 Sep 22 23:14:30.487 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25701 Sep 22 23:14:30.490 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25702 Sep 22 23:14:30.500 INFO Downstairs has completed Negotiation, task: proc
25703 Sep 22 23:14:30.502 INFO Downstairs has completed Negotiation, task: proc
25704 Sep 22 23:14:30.503 INFO Downstairs has completed Negotiation, task: proc
257052023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
257062023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
257072023-09-22T23:14:30.504ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
257082023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
257092023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
257102023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
257112023-09-22T23:14:30.504ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
257122023-09-22T23:14:30.504ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
257132023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
257142023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
257152023-09-22T23:14:30.505ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
257162023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
257172023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:38920 task reports connection:true
257182023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitQuorum WaitQuorum WaitQuorum
257192023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257202023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257212023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257222023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257232023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25724 The guest has finished waiting for activation
257252023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257262023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257272023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257282023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257292023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): Max found gen is 1
257302023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
257312023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): Next flush: 1
257322023-09-22T23:14:30.505ZINFOcrucible-pantry (datafile): All extents match
257332023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): No downstairs repair required
257342023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): No initial repair work was required
257352023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
257362023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 is now active with session: 16060f7a-9986-4db1-95c8-74c4346b949d
257372023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Set Active after no repair
257382023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
257392023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): Set check for repair
257402023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:44435 task reports connection:true
257412023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Active Active Active
257422023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): Set check for repair
257432023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:51108 task reports connection:true
257442023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Active Active Active
257452023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): Set check for repair
257462023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [0] received reconcile message
257472023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
257482023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
257492023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [1] received reconcile message
257502023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
257512023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
257522023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [2] received reconcile message
257532023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
257542023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
257552023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): volume d6736968-99ba-4ebf-b4ff-15afb0904fed activated ok
257562023-09-22T23:14:30.506ZINFOcrucible-pantry (datafile): volume d6736968-99ba-4ebf-b4ff-15afb0904fed constructed and inserted ok
257572023-09-22T23:14:30.506ZINFOcrucible-pantry (dropshot): request completed latency_us = 64358 local_addr = 127.0.0.1:61933 method = POST remote_addr = 127.0.0.1:58570 req_id = 67684343-1827-49ac-9b89-0c30bb14fbb3 response_code = 200 uri = /crucible/pantry/0/volume/d6736968-99ba-4ebf-b4ff-15afb0904fed
257582023-09-22T23:14:30.507ZINFOcrucible-pantry (dropshot): request completed latency_us = 285 local_addr = 127.0.0.1:61933 method = POST remote_addr = 127.0.0.1:58570 req_id = 96eae84f-8a8c-4cc9-bd62-02581d9bae36 response_code = 200 uri = /crucible/pantry/0/volume/d6736968-99ba-4ebf-b4ff-15afb0904fed/import_from_url
25759 Sep 22 23:14:30.518 INFO Downstairs has completed Negotiation, task: proc
25760 Sep 22 23:14:30.519 INFO Downstairs has completed Negotiation, task: proc
25761 Sep 22 23:14:30.521 INFO Downstairs has completed Negotiation, task: proc
257622023-09-22T23:14:30.521ZINFOcrucible-pantry (datafile): [0] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
257632023-09-22T23:14:30.521ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
257642023-09-22T23:14:30.521ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
257652023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
257662023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [1] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
257672023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
257682023-09-22T23:14:30.522ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
257692023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
257702023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [2] 982ad9a6-37b1-4b89-8804-8ba0c1300cac (1dc57d2b-9138-4f1d-8c86-6d444ab88226) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
257712023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
257722023-09-22T23:14:30.522ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
257732023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
257742023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:46850 task reports connection:true
257752023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac WaitQuorum WaitQuorum WaitQuorum
257762023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257772023-09-22T23:14:30.522ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257782023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257792023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257802023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25781 The guest has finished waiting for activation
257822023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257832023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257842023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
257852023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
257862023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Max found gen is 1
257872023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
257882023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Next flush: 1
257892023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): All extents match
257902023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): No downstairs repair required
257912023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): No initial repair work was required
257922023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
257932023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac is now active with session: 1dc57d2b-9138-4f1d-8c86-6d444ab88226
257942023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac Set Active after no repair
257952023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
257962023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Set check for repair
257972023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:50214 task reports connection:true
257982023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac Active Active Active
257992023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Set check for repair
258002023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:47631 task reports connection:true
258012023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac Active Active Active
258022023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): Set check for repair
258032023-09-22T23:14:30.523ZINFOcrucible-pantry (datafile): [0] received reconcile message
258042023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
258052023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
258062023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [1] received reconcile message
258072023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
258082023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
258092023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [2] received reconcile message
258102023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
258112023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
258122023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f activated ok
258132023-09-22T23:14:30.524ZINFOcrucible-pantry (datafile): volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f constructed and inserted ok
258142023-09-22T23:14:30.524ZINFOcrucible-pantry (dropshot): request completed latency_us = 61734 local_addr = 127.0.0.1:33566 method = POST remote_addr = 127.0.0.1:49478 req_id = 37479699-b3cc-4173-8cc6-c0412bade8aa response_code = 200 uri = /crucible/pantry/0/volume/1e088c74-6c1b-4846-90a3-1bfe70bc4f7f
258152023-09-22T23:14:30.524ZINFOcrucible-pantry (dropshot): request completed latency_us = 257 local_addr = 127.0.0.1:33566 method = POST remote_addr = 127.0.0.1:49478 req_id = 54248e53-6b49-4caa-ae27-adf1891e3262 response_code = 200 uri = /crucible/pantry/0/volume/1e088c74-6c1b-4846-90a3-1bfe70bc4f7f/import_from_url
258162023-09-22T23:14:30.623ZINFOcrucible-pantry (dropshot): request completed latency_us = 256 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = 078a732a-b54e-44e7-9aaa-34ba8eaef819 response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
258172023-09-22T23:14:30.638ZINFOcrucible-pantry (dropshot): request completed latency_us = 196 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = 3197f6f8-dc99-4bf0-9eb3-3fe2fe0b15c9 response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/is_finished
258182023-09-22T23:14:30.654ZINFOcrucible-pantry (dropshot): request completed latency_us = 276862 local_addr = 127.0.0.1:45536 method = POST remote_addr = 127.0.0.1:41882 req_id = 736f6533-2ad4-4087-a4e2-eadc39266dcd response_code = 204 uri = /crucible/pantry/0/volume/a9fc305b-73b1-411e-9d00-c6edbf74297f/bulk_write
258192023-09-22T23:14:30.697ZINFOcrucible-pantry (dropshot): request completed latency_us = 293338 local_addr = 127.0.0.1:34477 method = POST remote_addr = 127.0.0.1:51700 req_id = 4f5a16bb-68a7-4f21-abea-ef2b76fa4866 response_code = 204 uri = /crucible/pantry/0/volume/cc43b43c-a066-4496-841d-0f340d8a3b1d/bulk_write
258202023-09-22T23:14:30.701ZINFOcrucible-pantry (datafile): detach removing entry for volume cc43b43c-a066-4496-841d-0f340d8a3b1d
258212023-09-22T23:14:30.701ZINFOcrucible-pantry (datafile): detaching volume cc43b43c-a066-4496-841d-0f340d8a3b1d
25822 Sep 22 23:14:30.768 DEBG Write :1000 deps:[] res:true
25823 Sep 22 23:14:30.776 DEBG Read :1001 deps:[JobId(1000)] res:true
25824 Sep 22 23:14:30.794 DEBG Write :1000 deps:[] res:true
25825 Sep 22 23:14:30.810 DEBG Write :1000 deps:[] res:true
25826 Sep 22 23:14:30.823 DEBG Write :1000 deps:[] res:true
25827 Sep 22 23:14:30.825 DEBG Write :1000 deps:[] res:true
25828 Sep 22 23:14:30.830 DEBG Read :1001 deps:[JobId(1000)] res:true
25829 Sep 22 23:14:30.830 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
25830 Sep 22 23:14:30.831 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
25831 Sep 22 23:14:30.831 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
258322023-09-22T23:14:30.832ZINFOcrucible-pantry (datafile): Request to deactivate this guest
258332023-09-22T23:14:30.832ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 set deactivating.
258342023-09-22T23:14:30.832ZINFOcrucible-pantry (dropshot): request completed latency_us = 131485 local_addr = 127.0.0.1:34477 method = DELETE remote_addr = 127.0.0.1:51700 req_id = 9ed47dc6-fe17-4e98-8cef-19b14e551325 response_code = 204 uri = /crucible/pantry/0/volume/cc43b43c-a066-4496-841d-0f340d8a3b1d
25835 Sep 22 23:14:30.832 INFO Upstairs starts
25836 Sep 22 23:14:30.832 INFO Crucible Version: BuildInfo {
25837 version: "0.0.1",
25838 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
25839 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
25840 git_branch: "main",
25841 rustc_semver: "1.70.0",
25842 rustc_channel: "stable",
25843 rustc_host_triple: "x86_64-unknown-illumos",
25844 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
25845 cargo_triple: "x86_64-unknown-illumos",
25846 debug: true,
25847 opt_level: 0,
25848 }
25849 Sep 22 23:14:30.832 INFO Upstairs <-> Downstairs Message Version: 4
25850 Sep 22 23:14:30.832 INFO Crucible stats registered with UUID: 18b01c87-5587-4e4c-af37-0e404569a230
25851 Sep 22 23:14:30.832 INFO Crucible 18b01c87-5587-4e4c-af37-0e404569a230 has session id: 9fb860c1-25bf-4ac3-8286-f7b4bbced8f6
25852 Sep 22 23:14:30.833 INFO [0] connecting to 127.0.0.1:62296, looper: 0
25853 Sep 22 23:14:30.833 INFO [1] connecting to 127.0.0.1:62299, looper: 1
25854 Sep 22 23:14:30.833 INFO [2] connecting to 127.0.0.1:44532, looper: 2
25855 Sep 22 23:14:30.833 INFO up_listen starts, task: up_listen
25856 Sep 22 23:14:30.833 INFO Wait for all three downstairs to come online
25857 Sep 22 23:14:30.833 INFO Flush timeout: 0.5
25858 Sep 22 23:14:30.833 INFO accepted connection from 127.0.0.1:59426, task: main
25859 Sep 22 23:14:30.833 INFO accepted connection from 127.0.0.1:53704, task: main
25860 Sep 22 23:14:30.833 INFO [0] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected, looper: 0
25861 Sep 22 23:14:30.833 INFO [0] Proc runs for 127.0.0.1:62296 in state New
25862 Sep 22 23:14:30.833 INFO [1] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected, looper: 1
25863 Sep 22 23:14:30.833 INFO [1] Proc runs for 127.0.0.1:62299 in state New
25864 Sep 22 23:14:30.833 INFO [2] 18b01c87-5587-4e4c-af37-0e404569a230 looper connected, looper: 2
25865 Sep 22 23:14:30.833 INFO [2] Proc runs for 127.0.0.1:44532 in state New
25866 Sep 22 23:14:30.833 INFO accepted connection from 127.0.0.1:45845, task: main
25867 Sep 22 23:14:30.834 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25868 Sep 22 23:14:30.834 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } connected, version 4, task: proc
25869 Sep 22 23:14:30.834 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25870 Sep 22 23:14:30.834 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } connected, version 4, task: proc
25871 Sep 22 23:14:30.834 INFO Connection request from 18b01c87-5587-4e4c-af37-0e404569a230 with version 4, task: proc
25872 Sep 22 23:14:30.834 INFO upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } connected, version 4, task: proc
25873 Sep 22 23:14:30.834 INFO [0] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) New New New ds_transition to WaitActive
25874 Sep 22 23:14:30.834 INFO [0] Transition from New to WaitActive
25875 Sep 22 23:14:30.834 INFO [1] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) WaitActive New New ds_transition to WaitActive
25876 Sep 22 23:14:30.834 INFO [1] Transition from New to WaitActive
25877 Sep 22 23:14:30.834 INFO [2] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) WaitActive WaitActive New ds_transition to WaitActive
25878 Sep 22 23:14:30.834 INFO [2] Transition from New to WaitActive
25879 The guest has requested activation
25880 Sep 22 23:14:30.835 INFO 18b01c87-5587-4e4c-af37-0e404569a230 active request set
25881 Sep 22 23:14:30.835 INFO [0] received activate with gen 2
25882 Sep 22 23:14:30.835 INFO [0] client got ds_active_rx, promote! session e8b82b10-5353-499d-a44d-e0dd5d13af34
25883 Sep 22 23:14:30.835 INFO [1] received activate with gen 2
25884 Sep 22 23:14:30.835 INFO [1] client got ds_active_rx, promote! session e8b82b10-5353-499d-a44d-e0dd5d13af34
25885 Sep 22 23:14:30.835 INFO [2] received activate with gen 2
25886 Sep 22 23:14:30.835 INFO [2] client got ds_active_rx, promote! session e8b82b10-5353-499d-a44d-e0dd5d13af34
25887 Sep 22 23:14:30.835 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 }
25888 Sep 22 23:14:30.835 WARN Signaling to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } thread that UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is being promoted (read-write)
25889 Sep 22 23:14:30.835 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 }
25890 Sep 22 23:14:30.835 WARN Signaling to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } thread that UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is being promoted (read-write)
25891 Sep 22 23:14:30.835 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 }
25892 Sep 22 23:14:30.835 WARN Signaling to UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 } thread that UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is being promoted (read-write)
25893 Sep 22 23:14:30.836 WARN Another upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 }, task: main
25894 Sep 22 23:14:30.836 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is now active (read-write)
25895 Sep 22 23:14:30.836 WARN Another upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 }, task: main
25896 Sep 22 23:14:30.836 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is now active (read-write)
25897 Sep 22 23:14:30.836 WARN Another upstairs UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: 12531aa6-b93c-4bc2-9fe7-f954b135051a, gen: 1 }, task: main
25898 Sep 22 23:14:30.836 INFO UpstairsConnection { upstairs_id: 18b01c87-5587-4e4c-af37-0e404569a230, session_id: e8b82b10-5353-499d-a44d-e0dd5d13af34, gen: 2 } is now active (read-write)
25899 Sep 22 23:14:30.836 INFO connection (127.0.0.1:35197): all done
25900 Sep 22 23:14:30.836 INFO connection (127.0.0.1:57231): all done
25901 Sep 22 23:14:30.836 INFO connection (127.0.0.1:41210): all done
259022023-09-22T23:14:30.836ZERROcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) cmd_loop saw YouAreNoLongerActive 18b01c87-5587-4e4c-af37-0e404569a230 e8b82b10-5353-499d-a44d-e0dd5d13af34 2
259032023-09-22T23:14:30.836ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) Active Active Active ds_transition to Disabled
259042023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
259052023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 set inactive, session 12531aa6-b93c-4bc2-9fe7-f954b135051a
259062023-09-22T23:14:30.837ZERROcrucible-pantry (datafile): 127.0.0.1:62296: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
259072023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 Gone missing, transition from Disabled to Disconnected
259082023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [0] 18b01c87-5587-4e4c-af37-0e404569a230 connection to 127.0.0.1:62296 closed looper = 0
259092023-09-22T23:14:30.837ZERROcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) cmd_loop saw YouAreNoLongerActive 18b01c87-5587-4e4c-af37-0e404569a230 e8b82b10-5353-499d-a44d-e0dd5d13af34 2
259102023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) Disconnected Active Active ds_transition to Disabled
259112023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
259122023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 set inactive, session 12531aa6-b93c-4bc2-9fe7-f954b135051a
259132023-09-22T23:14:30.837ZERROcrucible-pantry (datafile): 127.0.0.1:62299: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
259142023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 Gone missing, transition from Disabled to Disconnected
259152023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] 18b01c87-5587-4e4c-af37-0e404569a230 connection to 127.0.0.1:62299 closed looper = 1
259162023-09-22T23:14:30.837ZERROcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) cmd_loop saw YouAreNoLongerActive 18b01c87-5587-4e4c-af37-0e404569a230 e8b82b10-5353-499d-a44d-e0dd5d13af34 2
259172023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 (12531aa6-b93c-4bc2-9fe7-f954b135051a) Disconnected Disconnected Active ds_transition to Disabled
259182023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
259192023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 set inactive, session 12531aa6-b93c-4bc2-9fe7-f954b135051a
259202023-09-22T23:14:30.837ZERROcrucible-pantry (datafile): 127.0.0.1:44532: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
259212023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 Gone missing, transition from Disabled to Disconnected
259222023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] 18b01c87-5587-4e4c-af37-0e404569a230 connection to 127.0.0.1:44532 closed looper = 2
259232023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:62296 task reports connection:false
259242023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Disconnected Disconnected Disconnected
259252023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:62296 task reports offline
259262023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:62299 task reports connection:false
259272023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Disconnected Disconnected Disconnected
259282023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:62299 task reports offline
259292023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:44532 task reports connection:false
259302023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): 18b01c87-5587-4e4c-af37-0e404569a230 Disconnected Disconnected Disconnected
259312023-09-22T23:14:30.837ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:44532 task reports offline
259322023-09-22T23:14:30.838ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
259332023-09-22T23:14:30.838ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
259342023-09-22T23:14:30.838ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
25935 Sep 22 23:14:30.838 INFO [0] downstairs client at 127.0.0.1:62296 has UUID bec31c6c-c0ce-43db-8467-8d94b73527a2
25936 Sep 22 23:14:30.838 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bec31c6c-c0ce-43db-8467-8d94b73527a2, encrypted: true, database_read_version: 1, database_write_version: 1 }
25937 Sep 22 23:14:30.838 INFO 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
25938 Sep 22 23:14:30.838 INFO [1] downstairs client at 127.0.0.1:62299 has UUID ede072e9-d3f2-4884-bb86-eea53898f9d9
25939 Sep 22 23:14:30.838 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: ede072e9-d3f2-4884-bb86-eea53898f9d9, encrypted: true, database_read_version: 1, database_write_version: 1 }
25940 Sep 22 23:14:30.838 INFO 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
25941 Sep 22 23:14:30.838 INFO [2] downstairs client at 127.0.0.1:44532 has UUID 6e66ae97-3dfc-40c6-a1d0-7d1c5297fdbf
25942 Sep 22 23:14:30.838 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 6e66ae97-3dfc-40c6-a1d0-7d1c5297fdbf, encrypted: true, database_read_version: 1, database_write_version: 1 }
25943 Sep 22 23:14:30.838 INFO 18b01c87-5587-4e4c-af37-0e404569a230 WaitActive WaitActive WaitActive
25944 Sep 22 23:14:30.850 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25945 Sep 22 23:14:30.852 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25946 Sep 22 23:14:30.854 INFO Current flush_numbers [0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25947 Sep 22 23:14:30.858 DEBG Write :1000 deps:[] res:true
25948 Sep 22 23:14:30.861 DEBG Read :1001 deps:[JobId(1000)] res:true
25949 Sep 22 23:14:30.867 INFO Downstairs has completed Negotiation, task: proc
25950 Sep 22 23:14:30.868 INFO Downstairs has completed Negotiation, task: proc
25951 Sep 22 23:14:30.869 INFO Downstairs has completed Negotiation, task: proc
25952 Sep 22 23:14:30.869 INFO [0] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
25953 Sep 22 23:14:30.869 INFO [0] Transition from WaitActive to WaitQuorum
25954 Sep 22 23:14:30.869 WARN [0] new RM replaced this: None
25955 Sep 22 23:14:30.869 INFO [0] Starts reconcile loop
25956 Sep 22 23:14:30.869 INFO [1] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
25957 Sep 22 23:14:30.869 INFO [1] Transition from WaitActive to WaitQuorum
25958 Sep 22 23:14:30.869 WARN [1] new RM replaced this: None
25959 Sep 22 23:14:30.869 INFO [1] Starts reconcile loop
25960 Sep 22 23:14:30.869 INFO [2] 18b01c87-5587-4e4c-af37-0e404569a230 (e8b82b10-5353-499d-a44d-e0dd5d13af34) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
25961 Sep 22 23:14:30.870 INFO [2] Transition from WaitActive to WaitQuorum
25962 Sep 22 23:14:30.870 WARN [2] new RM replaced this: None
25963 Sep 22 23:14:30.870 INFO [2] Starts reconcile loop
25964 Sep 22 23:14:30.870 INFO [0] 127.0.0.1:62296 task reports connection:true
25965 Sep 22 23:14:30.870 INFO 18b01c87-5587-4e4c-af37-0e404569a230 WaitQuorum WaitQuorum WaitQuorum
25966 Sep 22 23:14:30.870 INFO [0]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25967 Sep 22 23:14:30.870 INFO [0]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25968 Sep 22 23:14:30.870 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25969 Sep 22 23:14:30.870 INFO [1]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25970 Sep 22 23:14:30.870 INFO [1]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25971 Sep 22 23:14:30.870 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25972 Sep 22 23:14:30.870 INFO [2]R flush_numbers[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25973 Sep 22 23:14:30.870 INFO [2]R generation[0..12]: [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
25974 Sep 22 23:14:30.870 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
25975 Sep 22 23:14:30.870 INFO Max found gen is 2
25976 Sep 22 23:14:30.870 INFO Generation requested: 2 >= found:2
25977 Sep 22 23:14:30.870 INFO Next flush: 2
25978 Sep 22 23:14:30.870 INFO All extents match
25979 Sep 22 23:14:30.870 INFO No downstairs repair required
25980 Sep 22 23:14:30.870 INFO No initial repair work was required
25981 Sep 22 23:14:30.870 INFO Set Downstairs and Upstairs active
25982 Sep 22 23:14:30.870 INFO 18b01c87-5587-4e4c-af37-0e404569a230 is now active with session: e8b82b10-5353-499d-a44d-e0dd5d13af34
25983 Sep 22 23:14:30.870 INFO 18b01c87-5587-4e4c-af37-0e404569a230 Set Active after no repair
25984 Sep 22 23:14:30.870 INFO Notify all downstairs, region set compare is done.
25985 Sep 22 23:14:30.870 INFO Set check for repair
25986 Sep 22 23:14:30.870 INFO [1] 127.0.0.1:62299 task reports connection:true
25987 Sep 22 23:14:30.870 INFO 18b01c87-5587-4e4c-af37-0e404569a230 Active Active Active
25988 Sep 22 23:14:30.870 INFO Set check for repair
25989 Sep 22 23:14:30.870 INFO [2] 127.0.0.1:44532 task reports connection:true
25990 Sep 22 23:14:30.870 INFO 18b01c87-5587-4e4c-af37-0e404569a230 Active Active Active
25991 Sep 22 23:14:30.870 INFO Set check for repair
25992 Sep 22 23:14:30.870 INFO [0] received reconcile message
25993 Sep 22 23:14:30.870 INFO [0] All repairs completed, exit
25994 Sep 22 23:14:30.870 INFO [0] Starts cmd_loop
25995 Sep 22 23:14:30.870 INFO [1] received reconcile message
25996 Sep 22 23:14:30.870 INFO [1] All repairs completed, exit
25997 Sep 22 23:14:30.870 INFO [1] Starts cmd_loop
25998 Sep 22 23:14:30.870 INFO [2] received reconcile message
25999 Sep 22 23:14:30.870 INFO [2] All repairs completed, exit
26000 Sep 22 23:14:30.870 INFO [2] Starts cmd_loop
26001 The guest has finished waiting for activation
26002 Sep 22 23:14:30.872 DEBG IO Read 1000 has deps []
26003 Sep 22 23:14:30.882 DEBG Read :1000 deps:[] res:true
26004 Sep 22 23:14:30.882 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26005 Sep 22 23:14:30.882 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26006 Sep 22 23:14:30.882 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
26007 Sep 22 23:14:30.896 DEBG Read :1000 deps:[] res:true
26008 Sep 22 23:14:30.910 DEBG Read :1000 deps:[] res:true
26009 Sep 22 23:14:31.172 DEBG [0] Read AckReady 1000, : downstairs
26010 Sep 22 23:14:31.337 DEBG [1] Read already AckReady 1000, : downstairs
26011 Sep 22 23:14:31.503 DEBG [2] Read already AckReady 1000, : downstairs
26012 Sep 22 23:14:31.504 DEBG up_ds_listen was notified
26013 Sep 22 23:14:31.504 DEBG up_ds_listen process 1000
26014 Sep 22 23:14:31.504 DEBG [A] ack job 1000:1, : downstairs
260152023-09-22T23:14:31.507ZINFOcrucible-pantry (datafile): Checking if live repair is needed
260162023-09-22T23:14:31.507ZINFOcrucible-pantry (datafile): No Live Repair required at this time
260172023-09-22T23:14:31.513ZINFOcrucible-pantry (dropshot): request completed latency_us = 855720 local_addr = 127.0.0.1:45536 method = POST remote_addr = 127.0.0.1:41882 req_id = 7a8495a2-592a-48de-9223-77f8950ceb7e response_code = 200 uri = /crucible/pantry/0/volume/a9fc305b-73b1-411e-9d00-c6edbf74297f/bulk_read
260182023-09-22T23:14:31.513ZINFOcrucible-pantry (datafile): Checking if live repair is needed
260192023-09-22T23:14:31.513ZINFOcrucible-pantry (datafile): No Live Repair required at this time
260202023-09-22T23:14:31.523ZINFOcrucible-pantry (datafile): Checking if live repair is needed
260212023-09-22T23:14:31.523ZINFOcrucible-pantry (datafile): No Live Repair required at this time
260222023-09-22T23:14:31.529ZINFOcrucible-pantry (datafile): detach removing entry for volume a9fc305b-73b1-411e-9d00-c6edbf74297f
260232023-09-22T23:14:31.529ZINFOcrucible-pantry (datafile): detaching volume a9fc305b-73b1-411e-9d00-c6edbf74297f
26024 Sep 22 23:14:31.530 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
26025 Sep 22 23:14:31.530 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
26026 Sep 22 23:14:31.530 DEBG Flush :1003 extent_limit None deps:[] res:true f:2 g:1
260272023-09-22T23:14:31.531ZINFOcrucible-pantry (datafile): Request to deactivate this guest
260282023-09-22T23:14:31.531ZINFOcrucible-pantry (datafile): d37ce371-0ed6-44c3-bb8f-e1d703ba2c92 set deactivating.
260292023-09-22T23:14:31.531ZINFOcrucible-pantry (dropshot): request completed latency_us = 1624 local_addr = 127.0.0.1:45536 method = DELETE remote_addr = 127.0.0.1:41882 req_id = 9498cace-6405-475e-88bd-9f70ef9db15f response_code = 204 uri = /crucible/pantry/0/volume/a9fc305b-73b1-411e-9d00-c6edbf74297f
26030 Sep 22 23:14:31.531 DEBG up_ds_listen checked 1 jobs, back to waiting
260312023-09-22T23:14:31.625ZINFOcrucible-pantry (dropshot): request completed latency_us = 301 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = f86ce50d-aab8-41b6-8c6c-ca52ece334e5 response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
26032 Sep 22 23:14:31.637 WARN a50fc3fb-3de2-4743-9c50-cc80cfba77db request to replace downstairs 127.0.0.1:42970 with 127.0.0.1:62845
26033 Sep 22 23:14:31.637 INFO a50fc3fb-3de2-4743-9c50-cc80cfba77db found new target: 127.0.0.1:62845 at 0
26034 Sep 22 23:14:31.637 INFO Downstairs replacement completed
26035 Sep 22 23:14:31.637 DEBG IO Read 1011 has deps []
26036 Sep 22 23:14:31.639 DEBG Read :1011 deps:[] res:true
26037 Sep 22 23:14:31.640 DEBG Read :1011 deps:[] res:true
260382023-09-22T23:14:31.640ZINFOcrucible-pantry (dropshot): request completed latency_us = 232 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = d67ef902-5b0d-4fde-b094-2d028ae346fd response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/is_finished
26039 Sep 22 23:14:31.640 DEBG Read :1011 deps:[] res:true
26040 Sep 22 23:14:31.645 DEBG [1] Read AckReady 1011, : downstairs
26041 Sep 22 23:14:31.647 DEBG [2] Read already AckReady 1011, : downstairs
26042 Sep 22 23:14:31.650 DEBG [0] Read already AckReady 1011, : downstairs
26043 Sep 22 23:14:31.650 DEBG up_ds_listen was notified
26044 Sep 22 23:14:31.650 DEBG up_ds_listen process 1011
26045 Sep 22 23:14:31.650 DEBG [A] ack job 1011:12, : downstairs
26046 Sep 22 23:14:31.650 DEBG up_ds_listen checked 1 jobs, back to waiting
26047 test test::integration_test_guest_replace_downstairs ... ok
26048 test test::test_pantry_bulk_read_max_chunk_size ... ok
26049 Sep 22 23:14:31.712 INFO current number of open files limit 65536 is already the maximum
26050 Sep 22 23:14:31.712 INFO Created new region file "/tmp/downstairs-don5KOuC/region.json"
26051 Sep 22 23:14:31.718 INFO current number of open files limit 65536 is already the maximum
26052 Sep 22 23:14:31.718 INFO Opened existing region file "/tmp/downstairs-don5KOuC/region.json"
26053 Sep 22 23:14:31.718 INFO Database read version 1
26054 Sep 22 23:14:31.718 INFO Database write version 1
26055 Sep 22 23:14:31.719 INFO UUID: 38f16af0-b890-4595-9750-0a66b8b08b29
26056 Sep 22 23:14:31.719 INFO Blocks per extent:5 Total Extents: 2
26057 Sep 22 23:14:31.719 INFO Crucible Version: Crucible Version: 0.0.1
26058 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26059 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26060 rustc: 1.70.0 stable x86_64-unknown-illumos
26061 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26062 Sep 22 23:14:31.719 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26063 Sep 22 23:14:31.719 INFO Using address: 127.0.0.1:56365, task: main
26064 Sep 22 23:14:31.719 INFO Repair listens on 127.0.0.1:0, task: repair
26065 Sep 22 23:14:31.720 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:51524, task: repair
26066 Sep 22 23:14:31.720 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:51524, task: repair
26067 Sep 22 23:14:31.720 INFO listening, local_addr: 127.0.0.1:51524, task: repair
26068 Sep 22 23:14:31.720 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:51524, task: repair
26069 Sep 22 23:14:31.720 INFO Using repair address: 127.0.0.1:51524, task: main
26070 Sep 22 23:14:31.720 INFO No SSL acceptor configured, task: main
26071 Sep 22 23:14:31.720 INFO current number of open files limit 65536 is already the maximum
26072 Sep 22 23:14:31.720 INFO Created new region file "/tmp/downstairs-jd0MhT1q/region.json"
26073 test test::test_pantry_bulk_write_max_chunk_size ... ok
26074 Sep 22 23:14:31.722 INFO current number of open files limit 65536 is already the maximum
26075 Sep 22 23:14:31.722 INFO Created new region file "/tmp/downstairs-S24QnP1S/region.json"
26076 Sep 22 23:14:31.723 INFO current number of open files limit 65536 is already the maximum
26077 Sep 22 23:14:31.723 INFO Opened existing region file "/tmp/downstairs-jd0MhT1q/region.json"
26078 Sep 22 23:14:31.723 INFO Database read version 1
26079 Sep 22 23:14:31.723 INFO Database write version 1
26080 Sep 22 23:14:31.725 INFO current number of open files limit 65536 is already the maximum
26081 Sep 22 23:14:31.725 INFO Opened existing region file "/tmp/downstairs-S24QnP1S/region.json"
26082 Sep 22 23:14:31.725 INFO Database read version 1
26083 Sep 22 23:14:31.725 INFO Database write version 1
26084 Sep 22 23:14:31.725 INFO UUID: cc08515f-5167-4ce8-af58-a1c8873a38fe
26085 Sep 22 23:14:31.725 INFO Blocks per extent:5 Total Extents: 2
26086 Sep 22 23:14:31.725 INFO Crucible Version: Crucible Version: 0.0.1
26087 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26088 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26089 rustc: 1.70.0 stable x86_64-unknown-illumos
26090 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26091 Sep 22 23:14:31.725 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26092 Sep 22 23:14:31.725 INFO Using address: 127.0.0.1:43819, task: main
26093 Sep 22 23:14:31.725 INFO Repair listens on 127.0.0.1:0, task: repair
26094 Sep 22 23:14:31.725 INFO UUID: 1535f28b-0d0c-40c8-8224-3bcf2b9ad4d7
26095 Sep 22 23:14:31.725 INFO Blocks per extent:5 Total Extents: 2
26096 Sep 22 23:14:31.725 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:36231, task: repair
26097 Sep 22 23:14:31.725 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:36231, task: repair
26098 Sep 22 23:14:31.725 INFO Crucible Version: Crucible Version: 0.0.1
26099 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26100 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26101 rustc: 1.70.0 stable x86_64-unknown-illumos
26102 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26103 Sep 22 23:14:31.725 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26104 Sep 22 23:14:31.725 INFO Using address: 127.0.0.1:45844, task: main
26105 Sep 22 23:14:31.725 INFO listening, local_addr: 127.0.0.1:36231, task: repair
26106 Sep 22 23:14:31.726 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:36231, task: repair
26107 Sep 22 23:14:31.726 INFO Using repair address: 127.0.0.1:36231, task: main
26108 Sep 22 23:14:31.726 INFO Repair listens on 127.0.0.1:0, task: repair
26109 Sep 22 23:14:31.726 INFO No SSL acceptor configured, task: main
26110 Sep 22 23:14:31.726 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:40664, task: repair
26111 Sep 22 23:14:31.726 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:40664, task: repair
26112 Sep 22 23:14:31.726 INFO listening, local_addr: 127.0.0.1:40664, task: repair
26113 Sep 22 23:14:31.726 INFO current number of open files limit 65536 is already the maximum
26114 Sep 22 23:14:31.726 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:40664, task: repair
26115 Sep 22 23:14:31.726 INFO Created new region file "/tmp/downstairs-VwMKzFw9/region.json"
26116 Sep 22 23:14:31.726 INFO Using repair address: 127.0.0.1:40664, task: main
26117 Sep 22 23:14:31.726 INFO No SSL acceptor configured, task: main
26118 Sep 22 23:14:31.726 INFO current number of open files limit 65536 is already the maximum
26119 Sep 22 23:14:31.726 INFO Created new region file "/tmp/downstairs-eC0rwx3k/region.json"
26120 Sep 22 23:14:31.729 INFO current number of open files limit 65536 is already the maximum
26121 Sep 22 23:14:31.729 INFO Opened existing region file "/tmp/downstairs-eC0rwx3k/region.json"
26122 Sep 22 23:14:31.729 INFO Database read version 1
26123 Sep 22 23:14:31.729 INFO Database write version 1
26124 Sep 22 23:14:31.730 INFO current number of open files limit 65536 is already the maximum
26125 Sep 22 23:14:31.730 INFO Opened existing region file "/tmp/downstairs-VwMKzFw9/region.json"
26126 Sep 22 23:14:31.730 INFO Database read version 1
26127 Sep 22 23:14:31.730 INFO Database write version 1
26128 Sep 22 23:14:31.730 INFO UUID: 77eb89cb-93f6-46c5-be2e-fd5390eb0b19
26129 Sep 22 23:14:31.730 INFO Blocks per extent:5 Total Extents: 2
26130 Sep 22 23:14:31.730 INFO Crucible Version: Crucible Version: 0.0.1
26131 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26132 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26133 rustc: 1.70.0 stable x86_64-unknown-illumos
26134 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26135 Sep 22 23:14:31.730 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26136 Sep 22 23:14:31.730 INFO Using address: 127.0.0.1:34614, task: main
26137 Sep 22 23:14:31.730 INFO Repair listens on 127.0.0.1:0, task: repair
26138 Sep 22 23:14:31.730 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37689, task: repair
26139 Sep 22 23:14:31.730 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37689, task: repair
26140 Sep 22 23:14:31.730 INFO listening, local_addr: 127.0.0.1:37689, task: repair
26141 Sep 22 23:14:31.730 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37689, task: repair
26142 Sep 22 23:14:31.730 INFO Using repair address: 127.0.0.1:37689, task: main
26143 Sep 22 23:14:31.730 INFO No SSL acceptor configured, task: main
26144 Sep 22 23:14:31.730 INFO current number of open files limit 65536 is already the maximum
26145 Sep 22 23:14:31.730 INFO UUID: 420155b0-a798-48b0-aa4b-1710ab0f9b49
26146 Sep 22 23:14:31.730 INFO Blocks per extent:5 Total Extents: 2
26147 Sep 22 23:14:31.730 INFO Created new region file "/tmp/downstairs-CCCKCF6f/region.json"
26148 Sep 22 23:14:31.731 INFO Crucible Version: Crucible Version: 0.0.1
26149 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26150 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26151 rustc: 1.70.0 stable x86_64-unknown-illumos
26152 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26153 Sep 22 23:14:31.731 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26154 Sep 22 23:14:31.731 INFO Using address: 127.0.0.1:64951, task: main
26155 Sep 22 23:14:31.731 INFO Repair listens on 127.0.0.1:0, task: repair
26156 Sep 22 23:14:31.731 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:50389, task: repair
26157 Sep 22 23:14:31.731 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:50389, task: repair
26158 Sep 22 23:14:31.731 INFO listening, local_addr: 127.0.0.1:50389, task: repair
26159 Sep 22 23:14:31.731 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:50389, task: repair
26160 Sep 22 23:14:31.731 INFO Using repair address: 127.0.0.1:50389, task: main
26161 Sep 22 23:14:31.731 INFO No SSL acceptor configured, task: main
26162 note: configured to log to "/dev/stdout"
26163 Sep 22 23:14:31.733 INFO current number of open files limit 65536 is already the maximum
26164 Sep 22 23:14:31.733 INFO Opened existing region file "/tmp/downstairs-CCCKCF6f/region.json"
26165 Sep 22 23:14:31.733 INFO Database read version 1
26166 Sep 22 23:14:31.733 INFO Database write version 1
261672023-09-22T23:14:31.733ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:51521
26168 Sep 22 23:14:31.733 INFO UUID: 4c7c4d38-93a3-4971-b5c4-80ee60a119d2
26169 Sep 22 23:14:31.733 INFO Blocks per extent:5 Total Extents: 2
26170 Sep 22 23:14:31.733 INFO Crucible Version: Crucible Version: 0.0.1
26171 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26172 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26173 rustc: 1.70.0 stable x86_64-unknown-illumos
26174 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26175 Sep 22 23:14:31.733 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26176 Sep 22 23:14:31.733 INFO Using address: 127.0.0.1:49185, task: main
261772023-09-22T23:14:31.733ZINFOcrucible-pantry: listen IP: 127.0.0.1:51521
26178 Sep 22 23:14:31.734 INFO Repair listens on 127.0.0.1:0, task: repair
26179 Sep 22 23:14:31.734 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39643, task: repair
26180 Sep 22 23:14:31.734 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39643, task: repair
26181 Sep 22 23:14:31.734 INFO listening, local_addr: 127.0.0.1:39643, task: repair
26182 Sep 22 23:14:31.734 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39643, task: repair
26183 Sep 22 23:14:31.734 INFO Using repair address: 127.0.0.1:39643, task: main
26184 Sep 22 23:14:31.734 INFO No SSL acceptor configured, task: main
26185 note: configured to log to "/dev/stdout"
261862023-09-22T23:14:31.735ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:51522
261872023-09-22T23:14:31.735ZINFOcrucible-pantry: listen IP: 127.0.0.1:51522
26188 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
26189 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
26190 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
26191 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
26192 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
26193 Sep 22 23:14:31.827 INFO listening on 127.0.0.1:0, task: main
261942023-09-22T23:14:31.827ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:43032
261952023-09-22T23:14:31.827ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51521 remote_addr = 127.0.0.1:60724
261962023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): no entry exists for volume b9c1e9d5-7588-4cfd-ba21-899d3f748374, constructing...
261972023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): no entry exists for volume 79c71868-5af1-4b01-8453-182bf4ff9124, constructing...
261982023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Upstairs starts
261992023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Upstairs starts
262002023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
262012023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
262022023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
262032023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
262042023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3
262052023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 has session id: 7773880a-bf4e-4e9d-af22-d332b059734c
262062023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:56365 looper = 0
262072023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 451049c0-e2b7-41c3-872a-e5be40bc5510
262082023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): Crucible 451049c0-e2b7-41c3-872a-e5be40bc5510 has session id: da82b289-0e3b-42f1-837a-a883c1bd3768
262092023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:45844 looper = 0
262102023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:34614 looper = 1
262112023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:49185 looper = 2
262122023-09-22T23:14:31.828ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:43819 looper = 1
262132023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
262142023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
262152023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
262162023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:64951 looper = 2
262172023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
262182023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
262192023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): volume 79c71868-5af1-4b01-8453-182bf4ff9124 constructed ok
262202023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
26221 The guest has requested activation
262222023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): volume b9c1e9d5-7588-4cfd-ba21-899d3f748374 constructed ok
262232023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 active request set
26224 Sep 22 23:14:31.829 INFO accepted connection from 127.0.0.1:47008, task: main
26225 The guest has requested activation
26226 Sep 22 23:14:31.829 INFO accepted connection from 127.0.0.1:52904, task: main
262272023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 active request set
26228 Sep 22 23:14:31.829 INFO accepted connection from 127.0.0.1:60505, task: main
26229 Sep 22 23:14:31.829 INFO accepted connection from 127.0.0.1:48109, task: main
262302023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): [0] 451049c0-e2b7-41c3-872a-e5be40bc5510 looper connected looper = 0
262312023-09-22T23:14:31.829ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:45844 in state New
26232 Sep 22 23:14:31.829 INFO accepted connection from 127.0.0.1:63156, task: main
26233 Sep 22 23:14:31.830 INFO accepted connection from 127.0.0.1:54276, task: main
262342023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] 451049c0-e2b7-41c3-872a-e5be40bc5510 looper connected looper = 1
262352023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:34614 in state New
262362023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [2] 451049c0-e2b7-41c3-872a-e5be40bc5510 looper connected looper = 2
262372023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 looper connected looper = 0
262382023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:49185 in state New
262392023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:56365 in state New
262402023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 looper connected looper = 1
262412023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:43819 in state New
262422023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [2] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 looper connected looper = 2
262432023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:64951 in state New
26244 Sep 22 23:14:31.830 INFO Connection request from 451049c0-e2b7-41c3-872a-e5be40bc5510 with version 4, task: proc
26245 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } connected, version 4, task: proc
26246 Sep 22 23:14:31.830 INFO Connection request from 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 with version 4, task: proc
26247 Sep 22 23:14:31.830 INFO Connection request from 451049c0-e2b7-41c3-872a-e5be40bc5510 with version 4, task: proc
26248 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } connected, version 4, task: proc
26249 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } connected, version 4, task: proc
26250 Sep 22 23:14:31.830 INFO Connection request from 451049c0-e2b7-41c3-872a-e5be40bc5510 with version 4, task: proc
26251 Sep 22 23:14:31.830 INFO Connection request from 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 with version 4, task: proc
26252 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } connected, version 4, task: proc
26253 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } connected, version 4, task: proc
26254 Sep 22 23:14:31.830 INFO Connection request from 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 with version 4, task: proc
26255 Sep 22 23:14:31.830 INFO upstairs UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } connected, version 4, task: proc
262562023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) New New New ds_transition to WaitActive
262572023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
262582023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 193f0536-0e45-44aa-a329-53c0a3eabf73
262592023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) WaitActive New New ds_transition to WaitActive
262602023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
262612023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) New New New ds_transition to WaitActive
262622023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 193f0536-0e45-44aa-a329-53c0a3eabf73
262632023-09-22T23:14:31.830ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
262642023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session a5beedbf-47b7-46b1-9353-7d08dc5cf76e
262652023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) WaitActive WaitActive New ds_transition to WaitActive
262662023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [1] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) WaitActive New New ds_transition to WaitActive
262672023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
26268 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } is now active (read-write)
262692023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session a5beedbf-47b7-46b1-9353-7d08dc5cf76e
262702023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
262712023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) WaitActive WaitActive New ds_transition to WaitActive
262722023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
262732023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 193f0536-0e45-44aa-a329-53c0a3eabf73
262742023-09-22T23:14:31.831ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session a5beedbf-47b7-46b1-9353-7d08dc5cf76e
26275 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } is now active (read-write)
26276 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } is now active (read-write)
26277 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 451049c0-e2b7-41c3-872a-e5be40bc5510, session_id: 193f0536-0e45-44aa-a329-53c0a3eabf73, gen: 1 } is now active (read-write)
26278 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } is now active (read-write)
26279 Sep 22 23:14:31.831 INFO UpstairsConnection { upstairs_id: 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3, session_id: a5beedbf-47b7-46b1-9353-7d08dc5cf76e, gen: 1 } is now active (read-write)
262802023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:45844 has UUID 1535f28b-0d0c-40c8-8224-3bcf2b9ad4d7
262812023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 1535f28b-0d0c-40c8-8224-3bcf2b9ad4d7, encrypted: true, database_read_version: 1, database_write_version: 1 }
262822023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 WaitActive WaitActive WaitActive
262832023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:34614 has UUID 77eb89cb-93f6-46c5-be2e-fd5390eb0b19
262842023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:56365 has UUID 38f16af0-b890-4595-9750-0a66b8b08b29
262852023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 77eb89cb-93f6-46c5-be2e-fd5390eb0b19, encrypted: true, database_read_version: 1, database_write_version: 1 }
262862023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 38f16af0-b890-4595-9750-0a66b8b08b29, encrypted: true, database_read_version: 1, database_write_version: 1 }
262872023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 WaitActive WaitActive WaitActive
262882023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 WaitActive WaitActive WaitActive
262892023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:49185 has UUID 4c7c4d38-93a3-4971-b5c4-80ee60a119d2
262902023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:43819 has UUID cc08515f-5167-4ce8-af58-a1c8873a38fe
262912023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4c7c4d38-93a3-4971-b5c4-80ee60a119d2, encrypted: true, database_read_version: 1, database_write_version: 1 }
262922023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: cc08515f-5167-4ce8-af58-a1c8873a38fe, encrypted: true, database_read_version: 1, database_write_version: 1 }
262932023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 WaitActive WaitActive WaitActive
262942023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 WaitActive WaitActive WaitActive
26295 Sep 22 23:14:31.832 INFO Current flush_numbers [0..12]: [0, 0]
262962023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:64951 has UUID 420155b0-a798-48b0-aa4b-1710ab0f9b49
26297 Sep 22 23:14:31.832 INFO Current flush_numbers [0..12]: [0, 0]
262982023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 420155b0-a798-48b0-aa4b-1710ab0f9b49, encrypted: true, database_read_version: 1, database_write_version: 1 }
262992023-09-22T23:14:31.832ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 WaitActive WaitActive WaitActive
26300 Sep 22 23:14:31.832 INFO Downstairs has completed Negotiation, task: proc
26301 Sep 22 23:14:31.832 INFO Downstairs has completed Negotiation, task: proc
26302 Sep 22 23:14:31.832 INFO Current flush_numbers [0..12]: [0, 0]
26303 Sep 22 23:14:31.832 INFO Current flush_numbers [0..12]: [0, 0]
26304 Sep 22 23:14:31.833 INFO Downstairs has completed Negotiation, task: proc
26305 Sep 22 23:14:31.833 INFO Downstairs has completed Negotiation, task: proc
26306 Sep 22 23:14:31.833 INFO Current flush_numbers [0..12]: [0, 0]
26307 Sep 22 23:14:31.833 INFO Current flush_numbers [0..12]: [0, 0]
26308 Sep 22 23:14:31.833 INFO Downstairs has completed Negotiation, task: proc
26309 Sep 22 23:14:31.833 INFO Downstairs has completed Negotiation, task: proc
263102023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
263112023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
263122023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
263132023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
263142023-09-22T23:14:31.833ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
263152023-09-22T23:14:31.833ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
263162023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
263172023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
263182023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [1] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
263192023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [1] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
263202023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
263212023-09-22T23:14:31.833ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
263222023-09-22T23:14:31.834ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
263232023-09-22T23:14:31.834ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
263242023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
263252023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
263262023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 (a5beedbf-47b7-46b1-9353-7d08dc5cf76e) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
263272023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] 451049c0-e2b7-41c3-872a-e5be40bc5510 (193f0536-0e45-44aa-a329-53c0a3eabf73) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
26328 The guest has finished waiting for activation
26329 The guest has finished waiting for activation
263302023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
263312023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
263322023-09-22T23:14:31.834ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
263332023-09-22T23:14:31.834ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
263342023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
263352023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
263362023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:56365 task reports connection:true
263372023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:45844 task reports connection:true
263382023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 WaitQuorum WaitQuorum WaitQuorum
263392023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 WaitQuorum WaitQuorum WaitQuorum
263402023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
263412023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
263422023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
263432023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
263442023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
263452023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
263462023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
263472023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
263482023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
263492023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
263502023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
263512023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
263522023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
263532023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
263542023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
263552023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
263562023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
263572023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
263582023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Max found gen is 1
263592023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Max found gen is 1
263602023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
263612023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
263622023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Next flush: 1
263632023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): All extents match
263642023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): Next flush: 1
263652023-09-22T23:14:31.834ZINFOcrucible-pantry (datafile): No downstairs repair required
263662023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): All extents match
263672023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): No initial repair work was required
263682023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): No downstairs repair required
263692023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
263702023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): No initial repair work was required
263712023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 is now active with session: 193f0536-0e45-44aa-a329-53c0a3eabf73
263722023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
263732023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 Set Active after no repair
263742023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 is now active with session: a5beedbf-47b7-46b1-9353-7d08dc5cf76e
263752023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
263762023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 Set Active after no repair
263772023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263782023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
263792023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:34614 task reports connection:true
263802023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263812023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 Active Active Active
263822023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:43819 task reports connection:true
263832023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263842023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 Active Active Active
263852023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:49185 task reports connection:true
263862023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263872023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 Active Active Active
263882023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:64951 task reports connection:true
263892023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263902023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 Active Active Active
263912023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] received reconcile message
263922023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): Set check for repair
263932023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
263942023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] received reconcile message
263952023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
263962023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
263972023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] received reconcile message
263982023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
263992023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
264002023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] received reconcile message
264012023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
264022023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
264032023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] received reconcile message
264042023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
264052023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
264062023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
264072023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] received reconcile message
264082023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): volume 79c71868-5af1-4b01-8453-182bf4ff9124 activated ok
264092023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
264102023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): volume 79c71868-5af1-4b01-8453-182bf4ff9124 constructed and inserted ok
264112023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
264122023-09-22T23:14:31.835ZINFOcrucible-pantry (datafile): volume b9c1e9d5-7588-4cfd-ba21-899d3f748374 activated ok
264132023-09-22T23:14:31.835ZINFOcrucible-pantry (dropshot): request completed latency_us = 6635 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:43032 req_id = a3141e3f-e0f9-4c25-8b21-d6e793fc27b4 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124
264142023-09-22T23:14:31.836ZINFOcrucible-pantry (datafile): volume b9c1e9d5-7588-4cfd-ba21-899d3f748374 constructed and inserted ok
264152023-09-22T23:14:31.836ZINFOcrucible-pantry (dropshot): request completed latency_us = 6599 local_addr = 127.0.0.1:51521 method = POST remote_addr = 127.0.0.1:60724 req_id = d8376324-72a1-4e60-9b98-73d722e6a2fb response_code = 200 uri = /crucible/pantry/0/volume/b9c1e9d5-7588-4cfd-ba21-899d3f748374
264162023-09-22T23:14:31.836ZINFOcrucible-pantry (datafile): flush with snap requested
26417 Sep 22 23:14:31.836 ERRO Snapshot request received on unsupported binary
26418 Sep 22 23:14:31.836 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26419 Sep 22 23:14:31.836 ERRO Snapshot request received on unsupported binary
26420 Sep 22 23:14:31.836 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
26421 Sep 22 23:14:31.836 ERRO Snapshot request received on unsupported binary
26422 Sep 22 23:14:31.836 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
264232023-09-22T23:14:31.837ZINFOcrucible-pantry (dropshot): request completed latency_us = 1846 local_addr = 127.0.0.1:51521 method = POST remote_addr = 127.0.0.1:60724 req_id = c5106f12-9782-40fd-b3d0-52c334dff322 response_code = 204 uri = /crucible/pantry/0/volume/b9c1e9d5-7588-4cfd-ba21-899d3f748374/snapshot
264242023-09-22T23:14:31.837ZINFOcrucible-pantry (datafile): detach removing entry for volume b9c1e9d5-7588-4cfd-ba21-899d3f748374
264252023-09-22T23:14:31.837ZINFOcrucible-pantry (datafile): detaching volume b9c1e9d5-7588-4cfd-ba21-899d3f748374
26426 Sep 22 23:14:31.838 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
26427 Sep 22 23:14:31.838 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
26428 Sep 22 23:14:31.838 DEBG Flush :1001 extent_limit None deps:[] res:true f:2 g:1
264292023-09-22T23:14:31.839ZINFOcrucible-pantry (datafile): Request to deactivate this guest
264302023-09-22T23:14:31.839ZINFOcrucible-pantry (datafile): 7fa1ceb3-2853-4b6f-aa54-bc7d3f1bd8a3 set deactivating.
264312023-09-22T23:14:31.839ZINFOcrucible-pantry (dropshot): request completed latency_us = 1938 local_addr = 127.0.0.1:51521 method = DELETE remote_addr = 127.0.0.1:60724 req_id = e4821359-efff-4694-8eb5-9a0c244e389d response_code = 204 uri = /crucible/pantry/0/volume/b9c1e9d5-7588-4cfd-ba21-899d3f748374
264322023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:49688
264332023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:45421
264342023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:51046
264352023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:60061
264362023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:54313
264372023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:37651
264382023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:46952
264392023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:58452
264402023-09-22T23:14:31.840ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:51522 remote_addr = 127.0.0.1:41946
264412023-09-22T23:14:31.842ZINFOcrucible-pantry (dropshot): request completed latency_us = 3508 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:43032 req_id = a1900ba1-380b-41cd-9073-b78f2f897184 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
26442 test test::test_pantry_snapshot ... ok
26443 Sep 22 23:14:31.845 INFO current number of open files limit 65536 is already the maximum
26444 Sep 22 23:14:31.845 INFO Created new region file "/tmp/downstairs-kXOwbSjj/region.json"
26445 Sep 22 23:14:31.846 DEBG Write :1000 deps:[] res:true
26446 Sep 22 23:14:31.846 DEBG Write :1000 deps:[] res:true
26447 Sep 22 23:14:31.847 DEBG Write :1000 deps:[] res:true
26448 Sep 22 23:14:31.847 INFO current number of open files limit 65536 is already the maximum
26449 Sep 22 23:14:31.847 INFO Opened existing region file "/tmp/downstairs-kXOwbSjj/region.json"
26450 Sep 22 23:14:31.847 INFO Database read version 1
26451 Sep 22 23:14:31.847 INFO Database write version 1
26452 Sep 22 23:14:31.848 INFO UUID: 0023ec13-80c8-426d-b3f0-fea7668a3f39
26453 Sep 22 23:14:31.848 INFO Blocks per extent:5 Total Extents: 2
26454 Sep 22 23:14:31.848 INFO Crucible Version: Crucible Version: 0.0.1
26455 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26456 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26457 rustc: 1.70.0 stable x86_64-unknown-illumos
26458 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26459 Sep 22 23:14:31.848 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26460 Sep 22 23:14:31.848 INFO Using address: 127.0.0.1:45781, task: main
26461 Sep 22 23:14:31.848 INFO Repair listens on 127.0.0.1:0, task: repair
26462 Sep 22 23:14:31.848 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:64829, task: repair
26463 Sep 22 23:14:31.848 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:64829, task: repair
26464 Sep 22 23:14:31.848 INFO listening, local_addr: 127.0.0.1:64829, task: repair
26465 Sep 22 23:14:31.848 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:64829, task: repair
26466 Sep 22 23:14:31.848 INFO Using repair address: 127.0.0.1:64829, task: main
26467 Sep 22 23:14:31.848 INFO No SSL acceptor configured, task: main
26468 Sep 22 23:14:31.848 INFO current number of open files limit 65536 is already the maximum
26469 Sep 22 23:14:31.849 INFO Created new region file "/tmp/downstairs-8upV5Efn/region.json"
26470 Sep 22 23:14:31.851 INFO current number of open files limit 65536 is already the maximum
26471 Sep 22 23:14:31.851 INFO Opened existing region file "/tmp/downstairs-8upV5Efn/region.json"
26472 Sep 22 23:14:31.851 INFO Database read version 1
26473 Sep 22 23:14:31.851 INFO Database write version 1
26474 Sep 22 23:14:31.851 INFO UUID: f811b648-86cf-4cf5-927b-38fc90064f11
26475 Sep 22 23:14:31.851 INFO Blocks per extent:5 Total Extents: 2
26476 Sep 22 23:14:31.851 INFO Crucible Version: Crucible Version: 0.0.1
26477 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26478 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26479 rustc: 1.70.0 stable x86_64-unknown-illumos
26480 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26481 Sep 22 23:14:31.851 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26482 Sep 22 23:14:31.851 INFO Using address: 127.0.0.1:52134, task: main
26483 Sep 22 23:14:31.851 INFO Repair listens on 127.0.0.1:0, task: repair
26484 Sep 22 23:14:31.852 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:62441, task: repair
26485 Sep 22 23:14:31.852 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:62441, task: repair
26486 Sep 22 23:14:31.852 INFO listening, local_addr: 127.0.0.1:62441, task: repair
26487 Sep 22 23:14:31.852 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:62441, task: repair
26488 Sep 22 23:14:31.852 INFO Using repair address: 127.0.0.1:62441, task: main
26489 Sep 22 23:14:31.852 INFO No SSL acceptor configured, task: main
26490 Sep 22 23:14:31.852 INFO current number of open files limit 65536 is already the maximum
26491 Sep 22 23:14:31.852 INFO Created new region file "/tmp/downstairs-Wku7LpKv/region.json"
264922023-09-22T23:14:31.852ZINFOcrucible-pantry (dropshot): request completed latency_us = 9480 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:49688 req_id = 95300955-d478-4550-8427-49b16d182763 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264932023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 9385 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:45421 req_id = 0f4a5c8f-9f77-4485-878a-226746cbcff2 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264942023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 9259 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:51046 req_id = 189538a7-9338-4d47-8009-05ad93e8f632 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264952023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 9131 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:60061 req_id = d5963a47-84a6-4d9e-a606-b65f0d070384 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264962023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 9011 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:54313 req_id = 20c44313-ac23-4c07-9233-45b7ecbc8502 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264972023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 8885 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:37651 req_id = 30ea465d-adb6-4362-979c-a76a1943245d response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264982023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 8766 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:46952 req_id = 02ef650c-4e27-41d8-a668-8fecd754d689 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
264992023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 8644 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:58452 req_id = 63c9d6b3-87ad-4ed9-b8d9-c36ab148a547 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
265002023-09-22T23:14:31.853ZINFOcrucible-pantry (dropshot): request completed latency_us = 8539 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 1a30ef41-202e-4e81-b5ee-196a04e3f431 response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_write
26501 Sep 22 23:14:31.853 DEBG Write :1001 deps:[] res:true
26502 Sep 22 23:14:31.853 DEBG Write :1002 deps:[] res:true
26503 Sep 22 23:14:31.854 DEBG Write :1003 deps:[] res:true
26504 Sep 22 23:14:31.854 DEBG Write :1004 deps:[] res:true
26505 Sep 22 23:14:31.854 INFO current number of open files limit 65536 is already the maximum
26506 Sep 22 23:14:31.854 INFO Opened existing region file "/tmp/downstairs-Wku7LpKv/region.json"
26507 Sep 22 23:14:31.854 INFO Database read version 1
26508 Sep 22 23:14:31.854 INFO Database write version 1
26509 Sep 22 23:14:31.854 DEBG Write :1005 deps:[] res:true
26510 Sep 22 23:14:31.854 DEBG Write :1006 deps:[] res:true
26511 Sep 22 23:14:31.855 DEBG Write :1007 deps:[] res:true
26512 Sep 22 23:14:31.855 INFO UUID: 4cdee3da-6bae-4f3e-9cc5-e3e96652865a
26513 Sep 22 23:14:31.855 INFO Blocks per extent:5 Total Extents: 2
26514 Sep 22 23:14:31.855 INFO Crucible Version: Crucible Version: 0.0.1
26515 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26516 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26517 rustc: 1.70.0 stable x86_64-unknown-illumos
26518 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26519 Sep 22 23:14:31.855 DEBG Write :1008 deps:[] res:true
26520 Sep 22 23:14:31.855 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26521 Sep 22 23:14:31.855 INFO Using address: 127.0.0.1:60333, task: main
26522 Sep 22 23:14:31.855 DEBG Write :1009 deps:[] res:true
26523 Sep 22 23:14:31.855 INFO Repair listens on 127.0.0.1:0, task: repair
26524 Sep 22 23:14:31.855 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37396, task: repair
26525 Sep 22 23:14:31.855 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37396, task: repair
26526 Sep 22 23:14:31.855 INFO listening, local_addr: 127.0.0.1:37396, task: repair
26527 Sep 22 23:14:31.855 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37396, task: repair
26528 Sep 22 23:14:31.855 INFO Using repair address: 127.0.0.1:37396, task: main
26529 Sep 22 23:14:31.855 INFO No SSL acceptor configured, task: main
26530 note: configured to log to "/dev/stdout"
265312023-09-22T23:14:31.856ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:39234
265322023-09-22T23:14:31.857ZINFOcrucible-pantry: listen IP: 127.0.0.1:39234
26533 Sep 22 23:14:31.857 DEBG Write :1001 deps:[] res:true
26534 Sep 22 23:14:31.857 DEBG Write :1002 deps:[] res:true
26535 Sep 22 23:14:31.857 DEBG Write :1003 deps:[] res:true
26536 Sep 22 23:14:31.857 DEBG Write :1004 deps:[] res:true
26537 Sep 22 23:14:31.858 DEBG Write :1005 deps:[] res:true
26538 Sep 22 23:14:31.858 DEBG Write :1006 deps:[] res:true
26539 Sep 22 23:14:31.858 DEBG Write :1007 deps:[] res:true
26540 Sep 22 23:14:31.858 DEBG Write :1008 deps:[] res:true
26541 Sep 22 23:14:31.858 DEBG Write :1009 deps:[] res:true
26542 Sep 22 23:14:31.858 DEBG Write :1001 deps:[] res:true
26543 Sep 22 23:14:31.859 DEBG Write :1002 deps:[] res:true
26544 Sep 22 23:14:31.859 DEBG Write :1003 deps:[] res:true
26545 Sep 22 23:14:31.859 DEBG Write :1004 deps:[] res:true
26546 Sep 22 23:14:31.859 DEBG Write :1005 deps:[] res:true
26547 Sep 22 23:14:31.859 DEBG Write :1006 deps:[] res:true
26548 Sep 22 23:14:31.859 DEBG Write :1007 deps:[] res:true
26549 Sep 22 23:14:31.860 DEBG Write :1008 deps:[] res:true
26550 Sep 22 23:14:31.860 DEBG Write :1009 deps:[] res:true
26551 Sep 22 23:14:31.861 DEBG Read :1010 deps:[JobId(1000)] res:true
26552 Sep 22 23:14:31.862 DEBG Read :1010 deps:[JobId(1000)] res:true
26553 Sep 22 23:14:31.862 DEBG Read :1010 deps:[JobId(1000)] res:true
265542023-09-22T23:14:31.863ZINFOcrucible-pantry (dropshot): request completed latency_us = 2643 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 48c1c518-5f5f-4de9-94a5-a50d0e3198ce response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26555 Sep 22 23:14:31.865 DEBG Read :1011 deps:[JobId(1001)] res:true
26556 Sep 22 23:14:31.865 DEBG Read :1011 deps:[JobId(1001)] res:true
26557 Sep 22 23:14:31.865 DEBG Read :1011 deps:[JobId(1001)] res:true
265582023-09-22T23:14:31.866ZINFOcrucible-pantry (dropshot): request completed latency_us = 2560 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 709db523-f743-4a43-b2f1-b21923a676da response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26559 Sep 22 23:14:31.868 DEBG Read :1012 deps:[JobId(1002)] res:true
26560 Sep 22 23:14:31.868 DEBG Read :1012 deps:[JobId(1002)] res:true
26561 Sep 22 23:14:31.868 DEBG Read :1012 deps:[JobId(1002)] res:true
265622023-09-22T23:14:31.869ZINFOcrucible-pantry (dropshot): request completed latency_us = 2536 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = c93c0b26-3196-42cc-b62f-199e2d47c195 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26563 Sep 22 23:14:31.871 DEBG Read :1013 deps:[JobId(1003)] res:true
26564 Sep 22 23:14:31.871 DEBG Read :1013 deps:[JobId(1003)] res:true
26565 Sep 22 23:14:31.871 DEBG Read :1013 deps:[JobId(1003)] res:true
265662023-09-22T23:14:31.873ZINFOcrucible-pantry (dropshot): request completed latency_us = 2576 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = af4f704a-38df-439d-bc56-77e9d715fe68 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26567 Sep 22 23:14:31.874 DEBG Read :1014 deps:[JobId(1004)] res:true
26568 Sep 22 23:14:31.874 DEBG Read :1014 deps:[JobId(1004)] res:true
26569 Sep 22 23:14:31.875 DEBG Read :1014 deps:[JobId(1004)] res:true
265702023-09-22T23:14:31.876ZINFOcrucible-pantry (dropshot): request completed latency_us = 2571 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = a06d3977-c87e-4c94-b5b4-1351f3d7c709 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26571 Sep 22 23:14:31.878 DEBG Read :1015 deps:[JobId(1005)] res:true
26572 Sep 22 23:14:31.878 DEBG Read :1015 deps:[JobId(1005)] res:true
26573 Sep 22 23:14:31.878 DEBG Read :1015 deps:[JobId(1005)] res:true
265742023-09-22T23:14:31.879ZINFOcrucible-pantry (dropshot): request completed latency_us = 2616 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = e17d203d-a6a9-4d13-a86d-775d2ded5dbc response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26575 Sep 22 23:14:31.881 DEBG Read :1016 deps:[JobId(1006)] res:true
26576 Sep 22 23:14:31.881 DEBG Read :1016 deps:[JobId(1006)] res:true
26577 Sep 22 23:14:31.881 DEBG Read :1016 deps:[JobId(1006)] res:true
265782023-09-22T23:14:31.883ZINFOcrucible-pantry (dropshot): request completed latency_us = 2538 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = da7750e8-2504-4af8-8ec4-968cb3eb56c7 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26579 Sep 22 23:14:31.884 DEBG Read :1017 deps:[JobId(1007)] res:true
26580 Sep 22 23:14:31.884 DEBG Read :1017 deps:[JobId(1007)] res:true
26581 Sep 22 23:14:31.884 DEBG Read :1017 deps:[JobId(1007)] res:true
265822023-09-22T23:14:31.886ZINFOcrucible-pantry (dropshot): request completed latency_us = 2533 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 6d4a7526-0c20-410a-9e4c-1707bc703366 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26583 Sep 22 23:14:31.887 DEBG Read :1018 deps:[JobId(1008)] res:true
26584 Sep 22 23:14:31.887 DEBG Read :1018 deps:[JobId(1008)] res:true
26585 Sep 22 23:14:31.887 DEBG Read :1018 deps:[JobId(1008)] res:true
265862023-09-22T23:14:31.889ZINFOcrucible-pantry (dropshot): request completed latency_us = 2548 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = b0f51dc2-99e4-4464-86ea-a51e5ad2a994 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
26587 Sep 22 23:14:31.890 WARN e94af85e-3796-4fab-91a1-f12add9c3020 request to replace downstairs 127.0.0.1:52905 with 127.0.0.1:56689
26588 Sep 22 23:14:31.890 INFO e94af85e-3796-4fab-91a1-f12add9c3020 found new target: 127.0.0.1:56689 at 0
26589 Downstairs replacement completed
26590 Sep 22 23:14:31.890 DEBG Read :1019 deps:[JobId(1009)] res:true
26591 Sep 22 23:14:31.891 DEBG IO Read 1011 has deps []
26592 Sep 22 23:14:31.891 DEBG Read :1019 deps:[JobId(1009)] res:true
26593 Sep 22 23:14:31.891 DEBG Read :1019 deps:[JobId(1009)] res:true
26594 Sep 22 23:14:31.891 DEBG Read :1011 deps:[] res:true
26595 Sep 22 23:14:31.892 DEBG Read :1011 deps:[] res:true
26596 Sep 22 23:14:31.893 DEBG Read :1011 deps:[] res:true
265972023-09-22T23:14:31.893ZINFOcrucible-pantry (dropshot): request completed latency_us = 3330 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 953a44d0-b480-4c38-879c-c0ed4e5f96c8 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/bulk_read
265982023-09-22T23:14:31.895ZINFOcrucible-pantry (dropshot): request completed latency_us = 324 local_addr = 127.0.0.1:51522 method = POST remote_addr = 127.0.0.1:41946 req_id = 4bd8ca14-e4b3-4ecd-a12d-4bcb290ed780 response_code = 200 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124/validate
26599 Sep 22 23:14:31.896 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
26600 Sep 22 23:14:31.896 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
26601 Sep 22 23:14:31.897 DEBG Read :1020 deps:[JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
26602 Sep 22 23:14:31.897 DEBG [1] Read AckReady 1011, : downstairs
26603 Sep 22 23:14:31.900 DEBG [2] Read already AckReady 1011, : downstairs
26604 Sep 22 23:14:31.903 DEBG [0] Read already AckReady 1011, : downstairs
26605 Sep 22 23:14:31.903 DEBG up_ds_listen was notified
26606 Sep 22 23:14:31.903 DEBG up_ds_listen process 1011
26607 Sep 22 23:14:31.903 DEBG [A] ack job 1011:12, : downstairs
26608 Sep 22 23:14:31.903 DEBG up_ds_listen checked 1 jobs, back to waiting
266092023-09-22T23:14:31.908ZINFOcrucible-pantry (dropshot): request completed latency_us = 179 local_addr = 127.0.0.1:51522 method = GET remote_addr = 127.0.0.1:41946 req_id = 08326023-eaa4-4f4d-96a1-659c8f8d680b response_code = 200 uri = /crucible/pantry/0/job/023f91c6-d6ff-4bd9-a4de-d41b53664b0d/is_finished
266102023-09-22T23:14:31.909ZINFOcrucible-pantry (dropshot): request completed latency_us = 184 local_addr = 127.0.0.1:51522 method = GET remote_addr = 127.0.0.1:41946 req_id = 09580edb-a327-4adc-a4da-d4a3e0f0e6db response_code = 200 uri = /crucible/pantry/0/job/023f91c6-d6ff-4bd9-a4de-d41b53664b0d/ok
26611 test test::integration_test_volume_replace_downstairs ... ok
26612 Sep 22 23:14:31.909 INFO current number of open files limit 65536 is already the maximum
26613 Sep 22 23:14:31.909 INFO Created new region file "/tmp/downstairs-V3hLVw1a/region.json"
266142023-09-22T23:14:31.910ZINFOcrucible-pantry (datafile): detach removing entry for volume 79c71868-5af1-4b01-8453-182bf4ff9124
266152023-09-22T23:14:31.910ZINFOcrucible-pantry (datafile): detaching volume 79c71868-5af1-4b01-8453-182bf4ff9124
26616 Sep 22 23:14:31.911 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
26617 Sep 22 23:14:31.912 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
26618 Sep 22 23:14:31.912 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
266192023-09-22T23:14:31.912ZINFOcrucible-pantry (datafile): Request to deactivate this guest
266202023-09-22T23:14:31.912ZINFOcrucible-pantry (datafile): 451049c0-e2b7-41c3-872a-e5be40bc5510 set deactivating.
26621 Sep 22 23:14:31.912 INFO current number of open files limit 65536 is already the maximum
26622 Sep 22 23:14:31.912 INFO Opened existing region file "/tmp/downstairs-V3hLVw1a/region.json"
26623 Sep 22 23:14:31.912 INFO Database read version 1
26624 Sep 22 23:14:31.912 INFO Database write version 1
266252023-09-22T23:14:31.912ZINFOcrucible-pantry (dropshot): request completed latency_us = 3091 local_addr = 127.0.0.1:51522 method = DELETE remote_addr = 127.0.0.1:41946 req_id = 7eba7968-1a2b-4c28-a1b8-06ea8cdc136c response_code = 204 uri = /crucible/pantry/0/volume/79c71868-5af1-4b01-8453-182bf4ff9124
26626 Sep 22 23:14:31.913 INFO UUID: fc04a5c8-8424-4fbf-8cbf-45e4299fa04d
26627 Sep 22 23:14:31.913 INFO Blocks per extent:5 Total Extents: 2
26628 Sep 22 23:14:31.913 INFO Crucible Version: Crucible Version: 0.0.1
26629 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26630 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26631 rustc: 1.70.0 stable x86_64-unknown-illumos
26632 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26633 Sep 22 23:14:31.913 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26634 Sep 22 23:14:31.913 INFO Using address: 127.0.0.1:62247, task: main
26635 Sep 22 23:14:31.914 INFO Repair listens on 127.0.0.1:0, task: repair
26636 Sep 22 23:14:31.914 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:57817, task: repair
26637 Sep 22 23:14:31.914 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:57817, task: repair
26638 Sep 22 23:14:31.914 INFO listening, local_addr: 127.0.0.1:57817, task: repair
26639 Sep 22 23:14:31.914 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:57817, task: repair
26640 Sep 22 23:14:31.914 INFO Using repair address: 127.0.0.1:57817, task: main
26641 Sep 22 23:14:31.914 INFO No SSL acceptor configured, task: main
26642 Sep 22 23:14:31.914 INFO current number of open files limit 65536 is already the maximum
26643 Sep 22 23:14:31.914 INFO Created new region file "/tmp/downstairs-vWZk4NRb/region.json"
26644 test test::test_pantry_validate ... ok
26645 Sep 22 23:14:31.918 INFO test_volume_replace of a volume
26646 Sep 22 23:14:31.918 INFO current number of open files limit 65536 is already the maximum
26647 Sep 22 23:14:31.918 INFO current number of open files limit 65536 is already the maximum
26648 Sep 22 23:14:31.918 INFO Opened existing region file "/tmp/downstairs-vWZk4NRb/region.json"
26649 Sep 22 23:14:31.918 INFO Database read version 1
26650 Sep 22 23:14:31.918 INFO Database write version 1
26651 Sep 22 23:14:31.918 INFO Created new region file "/tmp/downstairs-DqWKEajh/region.json"
26652 Sep 22 23:14:31.919 INFO UUID: 64eb55ec-79b8-4aa0-81d6-70fa12931989
26653 Sep 22 23:14:31.919 INFO Blocks per extent:5 Total Extents: 2
26654 Sep 22 23:14:31.919 INFO Crucible Version: Crucible Version: 0.0.1
26655 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26656 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26657 rustc: 1.70.0 stable x86_64-unknown-illumos
26658 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26659 Sep 22 23:14:31.919 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26660 Sep 22 23:14:31.919 INFO Using address: 127.0.0.1:39430, task: main
26661 Sep 22 23:14:31.919 INFO Repair listens on 127.0.0.1:0, task: repair
26662 Sep 22 23:14:31.919 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:37977, task: repair
26663 Sep 22 23:14:31.919 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:37977, task: repair
26664 Sep 22 23:14:31.919 INFO listening, local_addr: 127.0.0.1:37977, task: repair
26665 Sep 22 23:14:31.920 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:37977, task: repair
26666 Sep 22 23:14:31.920 INFO Using repair address: 127.0.0.1:37977, task: main
26667 Sep 22 23:14:31.920 INFO No SSL acceptor configured, task: main
26668 Sep 22 23:14:31.920 INFO current number of open files limit 65536 is already the maximum
26669 Sep 22 23:14:31.920 INFO Created new region file "/tmp/downstairs-PfkZQLmG/region.json"
26670 Sep 22 23:14:31.921 INFO current number of open files limit 65536 is already the maximum
26671 Sep 22 23:14:31.921 INFO Opened existing region file "/tmp/downstairs-DqWKEajh/region.json"
26672 Sep 22 23:14:31.921 INFO Database read version 1
26673 Sep 22 23:14:31.921 INFO Database write version 1
26674 Sep 22 23:14:31.921 INFO UUID: 399f1d0b-a42b-4a95-9d11-4686487cbd7a
26675 Sep 22 23:14:31.921 INFO Blocks per extent:5 Total Extents: 2
26676 Sep 22 23:14:31.921 INFO Crucible Version: Crucible Version: 0.0.1
26677 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26678 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26679 rustc: 1.70.0 stable x86_64-unknown-illumos
26680 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26681 Sep 22 23:14:31.921 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26682 Sep 22 23:14:31.921 INFO Using address: 127.0.0.1:61980, task: main
26683 Sep 22 23:14:31.922 INFO Repair listens on 127.0.0.1:0, task: repair
26684 Sep 22 23:14:31.922 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:58454, task: repair
26685 Sep 22 23:14:31.922 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:58454, task: repair
26686 Sep 22 23:14:31.922 INFO listening, local_addr: 127.0.0.1:58454, task: repair
26687 Sep 22 23:14:31.922 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:58454, task: repair
26688 Sep 22 23:14:31.922 INFO Using repair address: 127.0.0.1:58454, task: main
26689 Sep 22 23:14:31.922 INFO No SSL acceptor configured, task: main
26690 Sep 22 23:14:31.922 INFO current number of open files limit 65536 is already the maximum
26691 Sep 22 23:14:31.922 INFO Created new region file "/tmp/downstairs-pVoHEn8Q/region.json"
26692 Sep 22 23:14:31.924 INFO current number of open files limit 65536 is already the maximum
26693 Sep 22 23:14:31.924 INFO Opened existing region file "/tmp/downstairs-PfkZQLmG/region.json"
26694 Sep 22 23:14:31.924 INFO Database read version 1
26695 Sep 22 23:14:31.924 INFO Database write version 1
26696 Sep 22 23:14:31.925 INFO UUID: 6e47f0ac-72f2-4910-8085-d56269d7e5b1
26697 Sep 22 23:14:31.925 INFO Blocks per extent:5 Total Extents: 2
26698 Sep 22 23:14:31.925 INFO current number of open files limit 65536 is already the maximum
26699 Sep 22 23:14:31.925 INFO Opened existing region file "/tmp/downstairs-pVoHEn8Q/region.json"
26700 Sep 22 23:14:31.925 INFO Database read version 1
26701 Sep 22 23:14:31.925 INFO Database write version 1
26702 Sep 22 23:14:31.925 INFO Crucible Version: Crucible Version: 0.0.1
26703 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26704 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26705 rustc: 1.70.0 stable x86_64-unknown-illumos
26706 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26707 Sep 22 23:14:31.925 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26708 Sep 22 23:14:31.925 INFO Using address: 127.0.0.1:44923, task: main
26709 Sep 22 23:14:31.925 INFO Repair listens on 127.0.0.1:0, task: repair
26710 Sep 22 23:14:31.925 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:65121, task: repair
26711 Sep 22 23:14:31.926 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:65121, task: repair
26712 Sep 22 23:14:31.926 INFO listening, local_addr: 127.0.0.1:65121, task: repair
26713 Sep 22 23:14:31.926 INFO UUID: 43c441f1-cf2f-4bad-b83b-ff7ae6f4603d
26714 Sep 22 23:14:31.926 INFO Blocks per extent:5 Total Extents: 2
26715 Sep 22 23:14:31.926 INFO Crucible Version: Crucible Version: 0.0.1
26716 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26717 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26718 rustc: 1.70.0 stable x86_64-unknown-illumos
26719 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26720 Sep 22 23:14:31.926 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26721 Sep 22 23:14:31.926 INFO Using address: 127.0.0.1:54419, task: main
26722 Sep 22 23:14:31.926 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:65121, task: repair
26723 Sep 22 23:14:31.926 INFO Using repair address: 127.0.0.1:65121, task: main
26724 Sep 22 23:14:31.926 INFO No SSL acceptor configured, task: main
26725 Sep 22 23:14:31.926 INFO Repair listens on 127.0.0.1:0, task: repair
26726 note: configured to log to "/dev/stdout"
26727 Sep 22 23:14:31.926 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38668, task: repair
26728 Sep 22 23:14:31.926 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38668, task: repair
26729 Sep 22 23:14:31.926 INFO listening, local_addr: 127.0.0.1:38668, task: repair
26730 Sep 22 23:14:31.926 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38668, task: repair
26731 Sep 22 23:14:31.926 INFO Using repair address: 127.0.0.1:38668, task: main
26732 Sep 22 23:14:31.926 INFO No SSL acceptor configured, task: main
26733 Sep 22 23:14:31.926 INFO current number of open files limit 65536 is already the maximum
26734 Sep 22 23:14:31.926 INFO Created new region file "/tmp/downstairs-TEMyQKsJ/region.json"
267352023-09-22T23:14:31.927ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:54057
267362023-09-22T23:14:31.928ZINFOcrucible-pantry: listen IP: 127.0.0.1:54057
26737 Sep 22 23:14:31.929 INFO current number of open files limit 65536 is already the maximum
26738 Sep 22 23:14:31.929 INFO Opened existing region file "/tmp/downstairs-TEMyQKsJ/region.json"
26739 Sep 22 23:14:31.929 INFO Database read version 1
26740 Sep 22 23:14:31.929 INFO Database write version 1
26741 Sep 22 23:14:31.929 INFO UUID: a2bfa752-7a6e-4ff9-854c-32e43259f2a8
26742 Sep 22 23:14:31.929 INFO Blocks per extent:5 Total Extents: 2
26743 Sep 22 23:14:31.929 INFO Crucible Version: Crucible Version: 0.0.1
26744 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
26745 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
26746 rustc: 1.70.0 stable x86_64-unknown-illumos
26747 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
26748 Sep 22 23:14:31.929 INFO Upstairs <-> Downstairs Message Version: 4, task: main
26749 Sep 22 23:14:31.929 INFO Using address: 127.0.0.1:58078, task: main
26750 Sep 22 23:14:31.930 INFO Repair listens on 127.0.0.1:0, task: repair
26751 Sep 22 23:14:31.930 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:43913, task: repair
26752 Sep 22 23:14:31.930 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:43913, task: repair
26753 Sep 22 23:14:31.930 INFO listening, local_addr: 127.0.0.1:43913, task: repair
26754 Sep 22 23:14:31.930 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:43913, task: repair
26755 Sep 22 23:14:31.930 INFO Using repair address: 127.0.0.1:43913, task: main
26756 Sep 22 23:14:31.930 INFO No SSL acceptor configured, task: main
26757 Sep 22 23:14:31.930 INFO Upstairs starts
26758 Sep 22 23:14:31.930 INFO Crucible Version: BuildInfo {
26759 version: "0.0.1",
26760 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
26761 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
26762 git_branch: "main",
26763 rustc_semver: "1.70.0",
26764 rustc_channel: "stable",
26765 rustc_host_triple: "x86_64-unknown-illumos",
26766 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
26767 cargo_triple: "x86_64-unknown-illumos",
26768 debug: true,
26769 opt_level: 0,
26770 }
26771 Sep 22 23:14:31.930 INFO Upstairs <-> Downstairs Message Version: 4
26772 Sep 22 23:14:31.930 INFO Crucible stats registered with UUID: 68728c39-2b9d-474c-a894-338e842ff7ac
26773 Sep 22 23:14:31.930 INFO Crucible 68728c39-2b9d-474c-a894-338e842ff7ac has session id: 0a32346b-7a73-4634-a8f6-e4b15f18f823
26774 Sep 22 23:14:31.930 INFO listening on 127.0.0.1:0, task: main
26775 Sep 22 23:14:31.930 INFO listening on 127.0.0.1:0, task: main
26776 Sep 22 23:14:31.931 INFO listening on 127.0.0.1:0, task: main
26777 Sep 22 23:14:31.931 INFO [0] connecting to 127.0.0.1:61980, looper: 0
26778 Sep 22 23:14:31.931 INFO [1] connecting to 127.0.0.1:54419, looper: 1
26779 Sep 22 23:14:31.931 INFO [2] connecting to 127.0.0.1:58078, looper: 2
26780 Sep 22 23:14:31.931 INFO up_listen starts, task: up_listen
26781 Sep 22 23:14:31.931 INFO Wait for all three downstairs to come online
26782 Sep 22 23:14:31.931 INFO Flush timeout: 0.5
26783 Sep 22 23:14:31.931 INFO [0] 68728c39-2b9d-474c-a894-338e842ff7ac looper connected, looper: 0
26784 Sep 22 23:14:31.931 INFO [0] Proc runs for 127.0.0.1:61980 in state New
26785 Sep 22 23:14:31.931 INFO accepted connection from 127.0.0.1:43584, task: main
26786 Sep 22 23:14:31.931 INFO [1] 68728c39-2b9d-474c-a894-338e842ff7ac looper connected, looper: 1
26787 Sep 22 23:14:31.931 INFO [1] Proc runs for 127.0.0.1:54419 in state New
26788 Sep 22 23:14:31.931 INFO [2] 68728c39-2b9d-474c-a894-338e842ff7ac looper connected, looper: 2
26789 Sep 22 23:14:31.931 INFO [2] Proc runs for 127.0.0.1:58078 in state New
26790 Sep 22 23:14:31.931 INFO accepted connection from 127.0.0.1:39878, task: main
26791 Sep 22 23:14:31.931 INFO accepted connection from 127.0.0.1:36978, task: main
26792 Sep 22 23:14:31.932 INFO Connection request from 68728c39-2b9d-474c-a894-338e842ff7ac with version 4, task: proc
26793 Sep 22 23:14:31.932 INFO upstairs UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } connected, version 4, task: proc
26794 Sep 22 23:14:31.932 INFO Connection request from 68728c39-2b9d-474c-a894-338e842ff7ac with version 4, task: proc
26795 Sep 22 23:14:31.932 INFO upstairs UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } connected, version 4, task: proc
26796 Sep 22 23:14:31.932 INFO Connection request from 68728c39-2b9d-474c-a894-338e842ff7ac with version 4, task: proc
26797 Sep 22 23:14:31.932 INFO upstairs UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } connected, version 4, task: proc
26798 Sep 22 23:14:31.932 INFO [0] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) New New New ds_transition to WaitActive
26799 Sep 22 23:14:31.932 INFO [0] Transition from New to WaitActive
26800 Sep 22 23:14:31.932 INFO [1] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) WaitActive New New ds_transition to WaitActive
26801 Sep 22 23:14:31.932 INFO [1] Transition from New to WaitActive
26802 Sep 22 23:14:31.932 INFO [2] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) WaitActive WaitActive New ds_transition to WaitActive
26803 Sep 22 23:14:31.932 INFO [2] Transition from New to WaitActive
26804 The guest has requested activation
26805 Sep 22 23:14:31.932 INFO 68728c39-2b9d-474c-a894-338e842ff7ac active request set
26806 Sep 22 23:14:31.932 INFO [0] received activate with gen 2
26807 Sep 22 23:14:31.932 INFO [0] client got ds_active_rx, promote! session b1e7d83f-1c6e-44a2-a060-99d0bf74c596
26808 Sep 22 23:14:31.932 INFO [1] received activate with gen 2
26809 Sep 22 23:14:31.932 INFO [1] client got ds_active_rx, promote! session b1e7d83f-1c6e-44a2-a060-99d0bf74c596
26810 Sep 22 23:14:31.932 INFO [2] received activate with gen 2
26811 Sep 22 23:14:31.933 INFO [2] client got ds_active_rx, promote! session b1e7d83f-1c6e-44a2-a060-99d0bf74c596
26812 Sep 22 23:14:31.933 INFO UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } is now active (read-write)
26813 Sep 22 23:14:31.933 INFO UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } is now active (read-write)
26814 Sep 22 23:14:31.933 INFO UpstairsConnection { upstairs_id: 68728c39-2b9d-474c-a894-338e842ff7ac, session_id: b1e7d83f-1c6e-44a2-a060-99d0bf74c596, gen: 2 } is now active (read-write)
26815 Sep 22 23:14:31.933 INFO [0] downstairs client at 127.0.0.1:61980 has UUID 399f1d0b-a42b-4a95-9d11-4686487cbd7a
26816 Sep 22 23:14:31.933 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 399f1d0b-a42b-4a95-9d11-4686487cbd7a, encrypted: true, database_read_version: 1, database_write_version: 1 }
26817 Sep 22 23:14:31.933 INFO 68728c39-2b9d-474c-a894-338e842ff7ac WaitActive WaitActive WaitActive
26818 Sep 22 23:14:31.933 INFO [1] downstairs client at 127.0.0.1:54419 has UUID 43c441f1-cf2f-4bad-b83b-ff7ae6f4603d
26819 Sep 22 23:14:31.933 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 43c441f1-cf2f-4bad-b83b-ff7ae6f4603d, encrypted: true, database_read_version: 1, database_write_version: 1 }
26820 Sep 22 23:14:31.933 INFO 68728c39-2b9d-474c-a894-338e842ff7ac WaitActive WaitActive WaitActive
26821 Sep 22 23:14:31.933 INFO [2] downstairs client at 127.0.0.1:58078 has UUID a2bfa752-7a6e-4ff9-854c-32e43259f2a8
26822 Sep 22 23:14:31.933 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: a2bfa752-7a6e-4ff9-854c-32e43259f2a8, encrypted: true, database_read_version: 1, database_write_version: 1 }
26823 Sep 22 23:14:31.933 INFO 68728c39-2b9d-474c-a894-338e842ff7ac WaitActive WaitActive WaitActive
26824 Sep 22 23:14:31.934 INFO Current flush_numbers [0..12]: [0, 0]
26825 Sep 22 23:14:31.934 INFO Downstairs has completed Negotiation, task: proc
26826 Sep 22 23:14:31.934 INFO Current flush_numbers [0..12]: [0, 0]
26827 Sep 22 23:14:31.934 INFO Downstairs has completed Negotiation, task: proc
26828 Sep 22 23:14:31.934 INFO Current flush_numbers [0..12]: [0, 0]
26829 Sep 22 23:14:31.934 INFO Downstairs has completed Negotiation, task: proc
26830 Sep 22 23:14:31.934 INFO [0] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
26831 Sep 22 23:14:31.934 INFO [0] Transition from WaitActive to WaitQuorum
26832 Sep 22 23:14:31.934 WARN [0] new RM replaced this: None
26833 Sep 22 23:14:31.934 INFO [0] Starts reconcile loop
26834 Sep 22 23:14:31.935 INFO [1] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
26835 Sep 22 23:14:31.935 INFO [1] Transition from WaitActive to WaitQuorum
26836 Sep 22 23:14:31.935 WARN [1] new RM replaced this: None
26837 Sep 22 23:14:31.935 INFO [1] Starts reconcile loop
26838 Sep 22 23:14:31.935 INFO [2] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
26839 Sep 22 23:14:31.935 INFO [2] Transition from WaitActive to WaitQuorum
26840 Sep 22 23:14:31.935 WARN [2] new RM replaced this: None
26841 Sep 22 23:14:31.935 INFO [2] Starts reconcile loop
26842 Sep 22 23:14:31.935 INFO [0] 127.0.0.1:61980 task reports connection:true
26843 Sep 22 23:14:31.935 INFO 68728c39-2b9d-474c-a894-338e842ff7ac WaitQuorum WaitQuorum WaitQuorum
26844 Sep 22 23:14:31.935 INFO [0]R flush_numbers: [0, 0]
26845 Sep 22 23:14:31.935 INFO [0]R generation: [0, 0]
26846 Sep 22 23:14:31.935 INFO [0]R dirty: [false, false]
26847 Sep 22 23:14:31.935 INFO [1]R flush_numbers: [0, 0]
26848 Sep 22 23:14:31.935 INFO [1]R generation: [0, 0]
26849 Sep 22 23:14:31.935 INFO [1]R dirty: [false, false]
26850 Sep 22 23:14:31.935 INFO [2]R flush_numbers: [0, 0]
26851 Sep 22 23:14:31.935 INFO [2]R generation: [0, 0]
26852 Sep 22 23:14:31.935 INFO [2]R dirty: [false, false]
26853 Sep 22 23:14:31.935 INFO Max found gen is 1
26854 Sep 22 23:14:31.935 INFO Generation requested: 2 >= found:1
26855 Sep 22 23:14:31.935 INFO Next flush: 1
26856 Sep 22 23:14:31.935 INFO listening on 127.0.0.1:0, task: main
26857 Sep 22 23:14:31.935 INFO All extents match
26858 Sep 22 23:14:31.935 INFO No downstairs repair required
26859 Sep 22 23:14:31.935 INFO No initial repair work was required
26860 Sep 22 23:14:31.935 INFO Set Downstairs and Upstairs active
26861 Sep 22 23:14:31.935 INFO listening on 127.0.0.1:0, task: main
26862 Sep 22 23:14:31.935 INFO 68728c39-2b9d-474c-a894-338e842ff7ac is now active with session: b1e7d83f-1c6e-44a2-a060-99d0bf74c596
26863 Sep 22 23:14:31.935 INFO 68728c39-2b9d-474c-a894-338e842ff7ac Set Active after no repair
26864 Sep 22 23:14:31.935 INFO listening on 127.0.0.1:0, task: main
26865 Sep 22 23:14:31.935 INFO Notify all downstairs, region set compare is done.
26866 Sep 22 23:14:31.935 INFO Set check for repair
26867 Sep 22 23:14:31.935 INFO [1] 127.0.0.1:54419 task reports connection:true
26868 Sep 22 23:14:31.935 INFO 68728c39-2b9d-474c-a894-338e842ff7ac Active Active Active
26869 Sep 22 23:14:31.935 INFO Set check for repair
26870 Sep 22 23:14:31.935 INFO [2] 127.0.0.1:58078 task reports connection:true
26871 Sep 22 23:14:31.935 INFO 68728c39-2b9d-474c-a894-338e842ff7ac Active Active Active
26872 Sep 22 23:14:31.935 INFO Set check for repair
26873 Sep 22 23:14:31.935 INFO [0] received reconcile message
26874 Sep 22 23:14:31.935 INFO [0] All repairs completed, exit
26875 Sep 22 23:14:31.935 INFO [0] Starts cmd_loop
26876 Sep 22 23:14:31.935 INFO [1] received reconcile message
26877 Sep 22 23:14:31.935 INFO [1] All repairs completed, exit
26878 Sep 22 23:14:31.935 INFO [1] Starts cmd_loop
26879 Sep 22 23:14:31.935 INFO [2] received reconcile message
268802023-09-22T23:14:31.935ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:39234 remote_addr = 127.0.0.1:43629
26881 Sep 22 23:14:31.935 INFO [2] All repairs completed, exit
26882 Sep 22 23:14:31.935 INFO [2] Starts cmd_loop
26883 The guest has finished waiting for activation
268842023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): no entry exists for volume 7cd46074-355e-4bc3-ab28-d527993e88ad, constructing...
268852023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): Upstairs starts
268862023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
268872023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
268882023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1
268892023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): Crucible c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 has session id: b4f0e22e-6012-4edf-bac7-dd4160d48302
268902023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:45781 looper = 0
268912023-09-22T23:14:31.936ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:52134 looper = 1
268922023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:60333 looper = 2
268932023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
268942023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
268952023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
268962023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): volume 7cd46074-355e-4bc3-ab28-d527993e88ad constructed ok
26897 The guest has requested activation
268982023-09-22T23:14:31.937ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 active request set
26899 Sep 22 23:14:31.937 INFO accepted connection from 127.0.0.1:64285, task: main
26900 Sep 22 23:14:31.937 INFO accepted connection from 127.0.0.1:56568, task: main
26901 Sep 22 23:14:31.937 INFO accepted connection from 127.0.0.1:44587, task: main
269022023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [0] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 looper connected looper = 0
269032023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:45781 in state New
269042023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [1] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 looper connected looper = 1
26905 Sep 22 23:14:31.938 DEBG IO Write 1000 has deps []
269062023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:52134 in state New
269072023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [2] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 looper connected looper = 2
269082023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:60333 in state New
26909 Sep 22 23:14:31.938 DEBG up_ds_listen was notified
26910 Sep 22 23:14:31.938 DEBG up_ds_listen process 1000
26911 Sep 22 23:14:31.938 DEBG [A] ack job 1000:1, : downstairs
26912 Sep 22 23:14:31.938 INFO Connection request from c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 with version 4, task: proc
26913 Sep 22 23:14:31.938 INFO upstairs UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } connected, version 4, task: proc
26914 Sep 22 23:14:31.938 DEBG up_ds_listen checked 1 jobs, back to waiting
26915 Sep 22 23:14:31.938 INFO Connection request from c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 with version 4, task: proc
26916 Sep 22 23:14:31.938 INFO upstairs UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } connected, version 4, task: proc
26917 Sep 22 23:14:31.938 INFO Connection request from c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 with version 4, task: proc
26918 Sep 22 23:14:31.938 INFO upstairs UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } connected, version 4, task: proc
269192023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [0] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) New New New ds_transition to WaitActive
269202023-09-22T23:14:31.938ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
269212023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 0f529401-1886-4b80-b1e1-a309115d7cef
269222023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [1] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) WaitActive New New ds_transition to WaitActive
269232023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
26924 Sep 22 23:14:31.939 INFO UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } is now active (read-write)
269252023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 0f529401-1886-4b80-b1e1-a309115d7cef
269262023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [2] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) WaitActive WaitActive New ds_transition to WaitActive
26927 Sep 22 23:14:31.939 INFO UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } is now active (read-write)
269282023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
269292023-09-22T23:14:31.939ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 0f529401-1886-4b80-b1e1-a309115d7cef
26930 Sep 22 23:14:31.939 INFO UpstairsConnection { upstairs_id: c3b92b8f-1217-4240-b3aa-0b4e2d139bb1, session_id: 0f529401-1886-4b80-b1e1-a309115d7cef, gen: 1 } is now active (read-write)
269312023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:45781 has UUID 0023ec13-80c8-426d-b3f0-fea7668a3f39
269322023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 0023ec13-80c8-426d-b3f0-fea7668a3f39, encrypted: true, database_read_version: 1, database_write_version: 1 }
269332023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 WaitActive WaitActive WaitActive
269342023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:52134 has UUID f811b648-86cf-4cf5-927b-38fc90064f11
269352023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: f811b648-86cf-4cf5-927b-38fc90064f11, encrypted: true, database_read_version: 1, database_write_version: 1 }
269362023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 WaitActive WaitActive WaitActive
269372023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:60333 has UUID 4cdee3da-6bae-4f3e-9cc5-e3e96652865a
269382023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 4cdee3da-6bae-4f3e-9cc5-e3e96652865a, encrypted: true, database_read_version: 1, database_write_version: 1 }
269392023-09-22T23:14:31.940ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 WaitActive WaitActive WaitActive
26940 Sep 22 23:14:31.940 INFO Current flush_numbers [0..12]: [0, 0]
26941 Sep 22 23:14:31.940 INFO Downstairs has completed Negotiation, task: proc
26942 Sep 22 23:14:31.940 INFO Current flush_numbers [0..12]: [0, 0]
26943 Sep 22 23:14:31.941 INFO Downstairs has completed Negotiation, task: proc
26944 Sep 22 23:14:31.941 INFO Current flush_numbers [0..12]: [0, 0]
26945 Sep 22 23:14:31.941 INFO Downstairs has completed Negotiation, task: proc
26946 Sep 22 23:14:31.941 DEBG Write :1000 deps:[] res:true
269472023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [0] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
269482023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
269492023-09-22T23:14:31.941ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
269502023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
269512023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [1] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
269522023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
269532023-09-22T23:14:31.941ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
269542023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
269552023-09-22T23:14:31.941ZINFOcrucible-pantry (datafile): [2] c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 (0f529401-1886-4b80-b1e1-a309115d7cef) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
269562023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
269572023-09-22T23:14:31.942ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
269582023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
269592023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:45781 task reports connection:true
26960 The guest has finished waiting for activation
269612023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 WaitQuorum WaitQuorum WaitQuorum
269622023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
269632023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
269642023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
269652023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
269662023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
26967 Sep 22 23:14:31.942 DEBG Write :1000 deps:[] res:true
269682023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
269692023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
269702023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
269712023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
269722023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Max found gen is 1
269732023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
269742023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Next flush: 1
269752023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): All extents match
269762023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): No downstairs repair required
269772023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): No initial repair work was required
269782023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
269792023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 is now active with session: 0f529401-1886-4b80-b1e1-a309115d7cef
269802023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 Set Active after no repair
269812023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
269822023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): Set check for repair
269832023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:52134 task reports connection:true
269842023-09-22T23:14:31.942ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 Active Active Active
269852023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): Set check for repair
269862023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:60333 task reports connection:true
269872023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 Active Active Active
269882023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): Set check for repair
269892023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [0] received reconcile message
26990 Sep 22 23:14:31.943 DEBG Write :1000 deps:[] res:true
269912023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
269922023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
269932023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [1] received reconcile message
269942023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
269952023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
269962023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [2] received reconcile message
269972023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
269982023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
269992023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): volume 7cd46074-355e-4bc3-ab28-d527993e88ad activated ok
27000 Sep 22 23:14:31.943 DEBG IO Read 1001 has deps [JobId(1000)]
270012023-09-22T23:14:31.943ZINFOcrucible-pantry (datafile): volume 7cd46074-355e-4bc3-ab28-d527993e88ad constructed and inserted ok
270022023-09-22T23:14:31.943ZINFOcrucible-pantry (dropshot): request completed latency_us = 6523 local_addr = 127.0.0.1:39234 method = POST remote_addr = 127.0.0.1:43629 req_id = 778caea1-1ec0-4e49-901f-b79a2efc0b03 response_code = 200 uri = /crucible/pantry/0/volume/7cd46074-355e-4bc3-ab28-d527993e88ad
270032023-09-22T23:14:31.943ZINFOcrucible-pantry (dropshot): request completed latency_us = 283 local_addr = 127.0.0.1:39234 method = POST remote_addr = 127.0.0.1:43629 req_id = c9ec6d84-9e90-4647-9c4e-3377db738304 response_code = 200 uri = /crucible/pantry/0/volume/7cd46074-355e-4bc3-ab28-d527993e88ad/validate
270042023-09-22T23:14:31.944ZINFOcrucible-pantry (dropshot): request completed latency_us = 211 local_addr = 127.0.0.1:39234 method = GET remote_addr = 127.0.0.1:43629 req_id = cc741c30-1c70-4362-bd61-0f3521df38b6 response_code = 200 uri = /crucible/pantry/0/job/61daaa4e-2831-4f75-abc5-514826d12648/is_finished
27005 Sep 22 23:14:31.944 DEBG Read :1001 deps:[JobId(1000)] res:true
27006 Sep 22 23:14:31.945 DEBG Read :1001 deps:[JobId(1000)] res:true
270072023-09-22T23:14:31.945ZERROcrucible-pantry (datafile): job 61daaa4e-2831-4f75-abc5-514826d12648 failed with size to validate 100 not divisible by block size 512!
270082023-09-22T23:14:31.945ZINFOcrucible-pantry (dropshot): request completed latency_us = 271 local_addr = 127.0.0.1:39234 method = GET remote_addr = 127.0.0.1:43629 req_id = a9e7cd82-6c3a-4b1e-9466-f154b27894fa response_code = 200 uri = /crucible/pantry/0/job/61daaa4e-2831-4f75-abc5-514826d12648/ok
27009 Sep 22 23:14:31.945 DEBG Read :1001 deps:[JobId(1000)] res:true
270102023-09-22T23:14:31.946ZINFOcrucible-pantry (datafile): detach removing entry for volume 7cd46074-355e-4bc3-ab28-d527993e88ad
270112023-09-22T23:14:31.946ZINFOcrucible-pantry (datafile): detaching volume 7cd46074-355e-4bc3-ab28-d527993e88ad
27012 Sep 22 23:14:31.946 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
27013 Sep 22 23:14:31.947 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
27014 Sep 22 23:14:31.947 DEBG Flush :1000 extent_limit None deps:[] res:true f:1 g:1
270152023-09-22T23:14:31.947ZINFOcrucible-pantry (datafile): Request to deactivate this guest
270162023-09-22T23:14:31.947ZINFOcrucible-pantry (datafile): c3b92b8f-1217-4240-b3aa-0b4e2d139bb1 set deactivating.
270172023-09-22T23:14:31.948ZINFOcrucible-pantry (dropshot): request completed latency_us = 2048 local_addr = 127.0.0.1:39234 method = DELETE remote_addr = 127.0.0.1:43629 req_id = d253661b-e826-45b5-a9e2-0c1e7d190a69 response_code = 204 uri = /crucible/pantry/0/volume/7cd46074-355e-4bc3-ab28-d527993e88ad
27018 Sep 22 23:14:31.949 DEBG [0] Read AckReady 1001, : downstairs
27019 Sep 22 23:14:31.952 DEBG [1] Read already AckReady 1001, : downstairs
27020 test test::test_pantry_validate_fail ... ok
27021 Sep 22 23:14:31.954 INFO current number of open files limit 65536 is already the maximum
27022 Sep 22 23:14:31.955 INFO Created new region file "/tmp/downstairs-hyRADvf6/region.json"
27023 Sep 22 23:14:31.955 DEBG [2] Read already AckReady 1001, : downstairs
27024 Sep 22 23:14:31.955 DEBG up_ds_listen was notified
27025 Sep 22 23:14:31.955 DEBG up_ds_listen process 1001
27026 Sep 22 23:14:31.955 DEBG [A] ack job 1001:2, : downstairs
27027 Sep 22 23:14:31.955 DEBG up_ds_listen checked 1 jobs, back to waiting
27028 Sep 22 23:14:31.956 INFO current number of open files limit 65536 is already the maximum
27029 Sep 22 23:14:31.956 INFO Created new region file "/tmp/downstairs-d7nPPm5v/region.json"
27030 Sep 22 23:14:31.958 INFO current number of open files limit 65536 is already the maximum
27031 Sep 22 23:14:31.958 INFO Opened existing region file "/tmp/downstairs-hyRADvf6/region.json"
27032 Sep 22 23:14:31.958 INFO Database read version 1
27033 Sep 22 23:14:31.958 INFO Database write version 1
27034 Sep 22 23:14:31.959 INFO UUID: 81a6906e-92f4-4b89-a16f-ad205aeec853
27035 Sep 22 23:14:31.959 INFO Blocks per extent:5 Total Extents: 2
27036 Sep 22 23:14:31.959 INFO current number of open files limit 65536 is already the maximum
27037 Sep 22 23:14:31.959 INFO Opened existing region file "/tmp/downstairs-d7nPPm5v/region.json"
27038 Sep 22 23:14:31.959 INFO Database read version 1
27039 Sep 22 23:14:31.959 INFO Database write version 1
27040 Sep 22 23:14:31.959 INFO Crucible Version: Crucible Version: 0.0.1
27041 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27042 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27043 rustc: 1.70.0 stable x86_64-unknown-illumos
27044 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27045 Sep 22 23:14:31.959 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27046 Sep 22 23:14:31.959 INFO Using address: 127.0.0.1:63206, task: main
27047 Sep 22 23:14:31.960 INFO Repair listens on 127.0.0.1:0, task: repair
27048 Sep 22 23:14:31.960 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:39983, task: repair
27049 Sep 22 23:14:31.960 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:39983, task: repair
27050 Sep 22 23:14:31.960 INFO listening, local_addr: 127.0.0.1:39983, task: repair
27051 Sep 22 23:14:31.960 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:39983, task: repair
27052 Sep 22 23:14:31.960 INFO UUID: 337e6b08-994a-40bb-9f68-b28e57db149f
27053 Sep 22 23:14:31.960 INFO Blocks per extent:5 Total Extents: 2
27054 Sep 22 23:14:31.960 INFO Using repair address: 127.0.0.1:39983, task: main
27055 Sep 22 23:14:31.960 INFO No SSL acceptor configured, task: main
27056 Sep 22 23:14:31.960 INFO Crucible Version: Crucible Version: 0.0.1
27057 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27058 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27059 rustc: 1.70.0 stable x86_64-unknown-illumos
27060 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27061 Sep 22 23:14:31.960 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27062 Sep 22 23:14:31.960 INFO Using address: 127.0.0.1:63920, task: main
27063 Sep 22 23:14:31.960 INFO current number of open files limit 65536 is already the maximum
27064 Sep 22 23:14:31.961 INFO Created new region file "/tmp/downstairs-JqsrYBQV/region.json"
27065 Sep 22 23:14:31.961 INFO Repair listens on 127.0.0.1:0, task: repair
27066 Sep 22 23:14:31.961 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:55595, task: repair
27067 Sep 22 23:14:31.961 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:55595, task: repair
27068 Sep 22 23:14:31.961 INFO listening, local_addr: 127.0.0.1:55595, task: repair
27069 Sep 22 23:14:31.961 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:55595, task: repair
27070 Sep 22 23:14:31.961 INFO Using repair address: 127.0.0.1:55595, task: main
27071 Sep 22 23:14:31.961 INFO No SSL acceptor configured, task: main
27072 Sep 22 23:14:31.961 INFO A New downstairs: 127.0.0.1:63920
27073 Sep 22 23:14:31.961 INFO Old ops target: [127.0.0.1:61980, 127.0.0.1:54419, 127.0.0.1:58078]
27074 Sep 22 23:14:31.961 INFO New ops target: [127.0.0.1:63920, 127.0.0.1:54419, 127.0.0.1:58078]
27075 Sep 22 23:14:31.961 INFO Replace VCR now: Volume { id: db7ee0ac-f829-46eb-9220-5bfcb72a4e78, block_size: 512, sub_volumes: [Region { block_size: 512, blocks_per_extent: 5, extent_count: 2, opts: CrucibleOpts { id: 68728c39-2b9d-474c-a894-338e842ff7ac, target: [127.0.0.1:63920, 127.0.0.1:54419, 127.0.0.1:58078], lossy: false, flush_timeout: None, key: Some("sW8AMOMPttO5lP6RWUN7Sd/rpjhwgoA0UyujUtsb1EI="), cert_pem: None, key_pem: None, root_cert_pem: None, control: None, read_only: false }, gen: 3 }], read_only_parent: None }
27076 Sep 22 23:14:31.962 INFO Volume db7ee0ac-f829-46eb-9220-5bfcb72a4e78, OK to replace: 127.0.0.1:61980 with 127.0.0.1:63920
27077 Sep 22 23:14:31.962 INFO listening on 127.0.0.1:0, task: main
27078 Sep 22 23:14:31.962 WARN db7ee0ac-f829-46eb-9220-5bfcb72a4e78 request to replace downstairs 127.0.0.1:61980 with 127.0.0.1:63920
27079 Sep 22 23:14:31.962 INFO db7ee0ac-f829-46eb-9220-5bfcb72a4e78 found old target: 127.0.0.1:61980 at 0
27080 Sep 22 23:14:31.962 INFO db7ee0ac-f829-46eb-9220-5bfcb72a4e78 replacing old: 127.0.0.1:61980 at 0
27081 Sep 22 23:14:31.962 INFO [0] client skip 2 in process jobs because fault, : downstairs
27082 Sep 22 23:14:31.962 INFO [0] changed 0 jobs to fault skipped, : downstairs
27083 Sep 22 23:14:31.962 INFO [0] 68728c39-2b9d-474c-a894-338e842ff7ac (b1e7d83f-1c6e-44a2-a060-99d0bf74c596) Active Active Active ds_transition to Replacing
27084 Sep 22 23:14:31.962 INFO [0] Transition from Active to Replacing
27085 Sep 22 23:14:31.962 INFO Replace downstairs underway for db7ee0ac-f829-46eb-9220-5bfcb72a4e78
27086 Sep 22 23:14:31.962 INFO send read now
27087 Sep 22 23:14:31.962 DEBG IO Read 1002 has deps [JobId(1000)]
27088 Sep 22 23:14:31.963 DEBG Read :1002 deps:[JobId(1000)] res:true
27089 Sep 22 23:14:31.964 DEBG Read :1002 deps:[JobId(1000)] res:true
27090 Sep 22 23:14:31.964 INFO current number of open files limit 65536 is already the maximum
27091 Sep 22 23:14:31.964 INFO Opened existing region file "/tmp/downstairs-JqsrYBQV/region.json"
27092 Sep 22 23:14:31.964 INFO Database read version 1
27093 Sep 22 23:14:31.964 INFO Database write version 1
27094 Sep 22 23:14:31.965 INFO UUID: b3079a2e-be41-4aab-ab6a-2a7b8c6e517f
27095 Sep 22 23:14:31.965 INFO Blocks per extent:5 Total Extents: 2
27096 Sep 22 23:14:31.965 INFO Crucible Version: Crucible Version: 0.0.1
27097 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27098 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27099 rustc: 1.70.0 stable x86_64-unknown-illumos
27100 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27101 Sep 22 23:14:31.965 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27102 Sep 22 23:14:31.965 INFO Using address: 127.0.0.1:55744, task: main
27103 Sep 22 23:14:31.965 INFO Repair listens on 127.0.0.1:0, task: repair
27104 Sep 22 23:14:31.965 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:49915, task: repair
27105 Sep 22 23:14:31.965 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:49915, task: repair
27106 Sep 22 23:14:31.965 INFO listening, local_addr: 127.0.0.1:49915, task: repair
27107 Sep 22 23:14:31.966 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:49915, task: repair
27108 Sep 22 23:14:31.966 INFO Using repair address: 127.0.0.1:49915, task: main
27109 Sep 22 23:14:31.966 INFO No SSL acceptor configured, task: main
27110 Sep 22 23:14:31.966 INFO current number of open files limit 65536 is already the maximum
27111 Sep 22 23:14:31.966 INFO Created new region file "/tmp/downstairs-94qyRc9J/region.json"
27112 Sep 22 23:14:31.968 DEBG [1] Read AckReady 1002, : downstairs
27113 Sep 22 23:14:31.969 INFO current number of open files limit 65536 is already the maximum
27114 Sep 22 23:14:31.969 INFO Opened existing region file "/tmp/downstairs-94qyRc9J/region.json"
27115 Sep 22 23:14:31.969 INFO Database read version 1
27116 Sep 22 23:14:31.969 INFO Database write version 1
27117 Sep 22 23:14:31.970 INFO UUID: 2d772065-2982-4cf2-ae0e-3cc9afca219b
27118 Sep 22 23:14:31.970 INFO Blocks per extent:5 Total Extents: 2
27119 Sep 22 23:14:31.970 INFO Crucible Version: Crucible Version: 0.0.1
27120 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27121 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27122 rustc: 1.70.0 stable x86_64-unknown-illumos
27123 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27124 Sep 22 23:14:31.970 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27125 Sep 22 23:14:31.970 INFO Using address: 127.0.0.1:59203, task: main
27126 Sep 22 23:14:31.970 INFO Repair listens on 127.0.0.1:0, task: repair
27127 Sep 22 23:14:31.970 DEBG [2] Read already AckReady 1002, : downstairs
27128 Sep 22 23:14:31.970 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:52811, task: repair
27129 Sep 22 23:14:31.970 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:52811, task: repair
27130 Sep 22 23:14:31.970 DEBG up_ds_listen was notified
27131 Sep 22 23:14:31.970 INFO listening, local_addr: 127.0.0.1:52811, task: repair
27132 Sep 22 23:14:31.971 DEBG up_ds_listen process 1002
27133 Sep 22 23:14:31.971 DEBG [A] ack job 1002:3, : downstairs
27134 Sep 22 23:14:31.971 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:52811, task: repair
27135 Sep 22 23:14:31.971 INFO Using repair address: 127.0.0.1:52811, task: main
27136 Sep 22 23:14:31.971 INFO No SSL acceptor configured, task: main
27137 Sep 22 23:14:31.971 DEBG up_ds_listen checked 1 jobs, back to waiting
27138 Sep 22 23:14:31.971 INFO Upstairs starts
27139 Sep 22 23:14:31.971 INFO Crucible Version: BuildInfo {
27140 version: "0.0.1",
27141 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27142 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27143 git_branch: "main",
27144 rustc_semver: "1.70.0",
27145 rustc_channel: "stable",
27146 rustc_host_triple: "x86_64-unknown-illumos",
27147 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27148 cargo_triple: "x86_64-unknown-illumos",
27149 debug: true,
27150 opt_level: 0,
27151 }
27152 Sep 22 23:14:31.971 INFO Upstairs <-> Downstairs Message Version: 4
27153 Sep 22 23:14:31.971 INFO Crucible stats registered with UUID: d6abcc00-47da-425e-a424-9934c8a21974
27154 Sep 22 23:14:31.971 INFO Crucible d6abcc00-47da-425e-a424-9934c8a21974 has session id: 62be0b39-953d-4d8c-883d-97dac094cc50
27155 Sep 22 23:14:31.971 INFO listening on 127.0.0.1:0, task: main
27156 Sep 22 23:14:31.971 INFO listening on 127.0.0.1:0, task: main
27157 Sep 22 23:14:31.971 INFO listening on 127.0.0.1:0, task: main
27158 Sep 22 23:14:31.972 INFO [0] connecting to 127.0.0.1:63206, looper: 0
27159 Sep 22 23:14:31.972 INFO [1] connecting to 127.0.0.1:55744, looper: 1
27160 Sep 22 23:14:31.972 INFO [2] connecting to 127.0.0.1:59203, looper: 2
27161 Sep 22 23:14:31.972 INFO up_listen starts, task: up_listen
27162 Sep 22 23:14:31.972 INFO Wait for all three downstairs to come online
27163 Sep 22 23:14:31.972 INFO Flush timeout: 0.5
27164 Sep 22 23:14:31.972 INFO [1] d6abcc00-47da-425e-a424-9934c8a21974 looper connected, looper: 1
27165 Sep 22 23:14:31.972 INFO [1] Proc runs for 127.0.0.1:55744 in state New
27166 Sep 22 23:14:31.972 INFO [2] d6abcc00-47da-425e-a424-9934c8a21974 looper connected, looper: 2
27167 Sep 22 23:14:31.972 INFO [2] Proc runs for 127.0.0.1:59203 in state New
27168 Sep 22 23:14:31.972 INFO [0] d6abcc00-47da-425e-a424-9934c8a21974 looper connected, looper: 0
27169 Sep 22 23:14:31.973 INFO [0] Proc runs for 127.0.0.1:63206 in state New
27170 Sep 22 23:14:31.973 INFO accepted connection from 127.0.0.1:42562, task: main
27171 Sep 22 23:14:31.973 INFO accepted connection from 127.0.0.1:62245, task: main
27172 Sep 22 23:14:31.973 INFO accepted connection from 127.0.0.1:58254, task: main
27173 Sep 22 23:14:31.973 INFO Connection request from d6abcc00-47da-425e-a424-9934c8a21974 with version 4, task: proc
27174 Sep 22 23:14:31.973 INFO upstairs UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } connected, version 4, task: proc
27175 Sep 22 23:14:31.973 INFO Connection request from d6abcc00-47da-425e-a424-9934c8a21974 with version 4, task: proc
27176 Sep 22 23:14:31.973 INFO upstairs UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } connected, version 4, task: proc
27177 Sep 22 23:14:31.973 INFO Connection request from d6abcc00-47da-425e-a424-9934c8a21974 with version 4, task: proc
27178 Sep 22 23:14:31.973 INFO upstairs UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } connected, version 4, task: proc
27179 Sep 22 23:14:31.974 INFO [1] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) New New New ds_transition to WaitActive
27180 Sep 22 23:14:31.974 INFO [1] Transition from New to WaitActive
27181 Sep 22 23:14:31.974 INFO [2] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) New WaitActive New ds_transition to WaitActive
27182 Sep 22 23:14:31.974 INFO [2] Transition from New to WaitActive
27183 Sep 22 23:14:31.974 INFO [0] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) New WaitActive WaitActive ds_transition to WaitActive
27184 Sep 22 23:14:31.974 INFO [0] Transition from New to WaitActive
27185 The guest has requested activation
27186 Sep 22 23:14:31.974 INFO d6abcc00-47da-425e-a424-9934c8a21974 active request set
27187 Sep 22 23:14:31.974 INFO [0] received activate with gen 1
27188 Sep 22 23:14:31.974 INFO [0] client got ds_active_rx, promote! session 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa
27189 Sep 22 23:14:31.974 INFO [1] received activate with gen 1
27190 Sep 22 23:14:31.974 INFO [1] client got ds_active_rx, promote! session 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa
27191 Sep 22 23:14:31.975 INFO [2] received activate with gen 1
27192 Sep 22 23:14:31.975 INFO [2] client got ds_active_rx, promote! session 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa
27193 Sep 22 23:14:31.975 INFO UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } is now active (read-write)
27194 Sep 22 23:14:31.975 INFO UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } is now active (read-write)
27195 Sep 22 23:14:31.975 INFO UpstairsConnection { upstairs_id: d6abcc00-47da-425e-a424-9934c8a21974, session_id: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa, gen: 1 } is now active (read-write)
27196 test test::test_volume_replace_vcr ... ok
27197 Sep 22 23:14:31.975 INFO [1] downstairs client at 127.0.0.1:55744 has UUID b3079a2e-be41-4aab-ab6a-2a7b8c6e517f
27198 Sep 22 23:14:31.975 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: b3079a2e-be41-4aab-ab6a-2a7b8c6e517f, encrypted: true, database_read_version: 1, database_write_version: 1 }
27199 Sep 22 23:14:31.975 INFO d6abcc00-47da-425e-a424-9934c8a21974 WaitActive WaitActive WaitActive
27200 Sep 22 23:14:31.975 INFO [2] downstairs client at 127.0.0.1:59203 has UUID 2d772065-2982-4cf2-ae0e-3cc9afca219b
27201 Sep 22 23:14:31.975 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 2d772065-2982-4cf2-ae0e-3cc9afca219b, encrypted: true, database_read_version: 1, database_write_version: 1 }
27202 Sep 22 23:14:31.976 INFO d6abcc00-47da-425e-a424-9934c8a21974 WaitActive WaitActive WaitActive
27203 Sep 22 23:14:31.976 INFO [0] downstairs client at 127.0.0.1:63206 has UUID 81a6906e-92f4-4b89-a16f-ad205aeec853
27204 Sep 22 23:14:31.976 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 81a6906e-92f4-4b89-a16f-ad205aeec853, encrypted: true, database_read_version: 1, database_write_version: 1 }
27205 Sep 22 23:14:31.976 INFO d6abcc00-47da-425e-a424-9934c8a21974 WaitActive WaitActive WaitActive
27206 Sep 22 23:14:31.976 INFO Current flush_numbers [0..12]: [0, 0]
27207 Sep 22 23:14:31.976 INFO Downstairs has completed Negotiation, task: proc
27208 Sep 22 23:14:31.976 INFO Current flush_numbers [0..12]: [0, 0]
27209 Sep 22 23:14:31.976 INFO Downstairs has completed Negotiation, task: proc
27210 Sep 22 23:14:31.976 INFO Current flush_numbers [0..12]: [0, 0]
27211 Sep 22 23:14:31.976 INFO Downstairs has completed Negotiation, task: proc
27212 Sep 22 23:14:31.977 INFO [1] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27213 Sep 22 23:14:31.977 INFO [1] Transition from WaitActive to WaitQuorum
27214 Sep 22 23:14:31.977 WARN [1] new RM replaced this: None
27215 Sep 22 23:14:31.977 INFO [1] Starts reconcile loop
27216 Sep 22 23:14:31.977 INFO [2] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) WaitActive WaitQuorum WaitActive ds_transition to WaitQuorum
27217 Sep 22 23:14:31.977 INFO [2] Transition from WaitActive to WaitQuorum
27218 Sep 22 23:14:31.977 WARN [2] new RM replaced this: None
27219 Sep 22 23:14:31.977 INFO [2] Starts reconcile loop
27220 Sep 22 23:14:31.977 INFO [0] d6abcc00-47da-425e-a424-9934c8a21974 (0ddebd5a-0043-49cf-93bb-961f6f1d3dfa) WaitActive WaitQuorum WaitQuorum ds_transition to WaitQuorum
27221 Sep 22 23:14:31.977 INFO [0] Transition from WaitActive to WaitQuorum
27222 Sep 22 23:14:31.977 WARN [0] new RM replaced this: None
27223 Sep 22 23:14:31.977 INFO [0] Starts reconcile loop
27224 Sep 22 23:14:31.977 INFO [1] 127.0.0.1:55744 task reports connection:true
27225 Sep 22 23:14:31.977 INFO d6abcc00-47da-425e-a424-9934c8a21974 WaitQuorum WaitQuorum WaitQuorum
27226 Sep 22 23:14:31.977 INFO [0]R flush_numbers: [0, 0]
27227 Sep 22 23:14:31.977 INFO [0]R generation: [0, 0]
27228 Sep 22 23:14:31.977 INFO [0]R dirty: [false, false]
27229 Sep 22 23:14:31.977 INFO [1]R flush_numbers: [0, 0]
27230 Sep 22 23:14:31.977 INFO [1]R generation: [0, 0]
27231 Sep 22 23:14:31.977 INFO [1]R dirty: [false, false]
27232 Sep 22 23:14:31.977 INFO [2]R flush_numbers: [0, 0]
27233 Sep 22 23:14:31.977 INFO [2]R generation: [0, 0]
27234 Sep 22 23:14:31.977 INFO [2]R dirty: [false, false]
27235 Sep 22 23:14:31.977 INFO Max found gen is 1
27236 Sep 22 23:14:31.977 INFO Generation requested: 1 >= found:1
27237 Sep 22 23:14:31.977 INFO Next flush: 1
27238 Sep 22 23:14:31.977 INFO All extents match
27239 Sep 22 23:14:31.977 INFO No downstairs repair required
27240 Sep 22 23:14:31.977 INFO No initial repair work was required
27241 Sep 22 23:14:31.977 INFO Set Downstairs and Upstairs active
27242 Sep 22 23:14:31.977 INFO d6abcc00-47da-425e-a424-9934c8a21974 is now active with session: 0ddebd5a-0043-49cf-93bb-961f6f1d3dfa
27243 Sep 22 23:14:31.977 INFO d6abcc00-47da-425e-a424-9934c8a21974 Set Active after no repair
27244 Sep 22 23:14:31.977 INFO Notify all downstairs, region set compare is done.
27245 Sep 22 23:14:31.977 INFO Set check for repair
27246 Sep 22 23:14:31.977 INFO [2] 127.0.0.1:59203 task reports connection:true
27247 Sep 22 23:14:31.977 INFO d6abcc00-47da-425e-a424-9934c8a21974 Active Active Active
27248 Sep 22 23:14:31.977 INFO Set check for repair
27249 Sep 22 23:14:31.977 INFO [0] 127.0.0.1:63206 task reports connection:true
27250 Sep 22 23:14:31.977 INFO d6abcc00-47da-425e-a424-9934c8a21974 Active Active Active
27251 Sep 22 23:14:31.977 INFO Set check for repair
27252 Sep 22 23:14:31.977 INFO [0] received reconcile message
27253 Sep 22 23:14:31.977 INFO [0] All repairs completed, exit
27254 Sep 22 23:14:31.977 INFO [0] Starts cmd_loop
27255 Sep 22 23:14:31.977 INFO [1] received reconcile message
27256 Sep 22 23:14:31.977 INFO [1] All repairs completed, exit
27257 Sep 22 23:14:31.977 INFO [1] Starts cmd_loop
27258 Sep 22 23:14:31.977 INFO [2] received reconcile message
27259 Sep 22 23:14:31.977 INFO [2] All repairs completed, exit
27260 Sep 22 23:14:31.977 INFO [2] Starts cmd_loop
27261 The guest has finished waiting for activation
27262 test test::volume_zero_length_io ... ok
27263 Sep 22 23:14:31.995 INFO listening on 127.0.0.1:0, task: main
27264 Sep 22 23:14:31.995 INFO listening on 127.0.0.1:0, task: main
27265 Sep 22 23:14:31.995 INFO listening on 127.0.0.1:0, task: main
272662023-09-22T23:14:31.995ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:34332
272672023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): no entry exists for volume 7d6e799f-ffff-4301-8a22-abec78155ecf, constructing...
272682023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): Upstairs starts
272692023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
272702023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
272712023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 4444e318-0130-42a4-85b4-7dec4633fa58
272722023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): Crucible 4444e318-0130-42a4-85b4-7dec4633fa58 has session id: 5ec47966-8d02-4373-9261-df6674769b26
272732023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:62247 looper = 0
272742023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:39430 looper = 1
272752023-09-22T23:14:31.996ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:44923 looper = 2
272762023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
272772023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
272782023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
272792023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): volume 7d6e799f-ffff-4301-8a22-abec78155ecf constructed ok
27280 The guest has requested activation
272812023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 active request set
272822023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [0] 4444e318-0130-42a4-85b4-7dec4633fa58 looper connected looper = 0
272832023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:62247 in state New
272842023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [1] 4444e318-0130-42a4-85b4-7dec4633fa58 looper connected looper = 1
272852023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:39430 in state New
27286 Sep 22 23:14:31.997 INFO accepted connection from 127.0.0.1:56510, task: main
272872023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [2] 4444e318-0130-42a4-85b4-7dec4633fa58 looper connected looper = 2
272882023-09-22T23:14:31.997ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:44923 in state New
27289 Sep 22 23:14:31.997 INFO accepted connection from 127.0.0.1:38856, task: main
27290 Sep 22 23:14:31.997 INFO accepted connection from 127.0.0.1:57737, task: main
27291 Sep 22 23:14:31.998 INFO Connection request from 4444e318-0130-42a4-85b4-7dec4633fa58 with version 4, task: proc
27292 Sep 22 23:14:31.998 INFO upstairs UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } connected, version 4, task: proc
27293 Sep 22 23:14:31.998 INFO Connection request from 4444e318-0130-42a4-85b4-7dec4633fa58 with version 4, task: proc
27294 Sep 22 23:14:31.998 INFO upstairs UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } connected, version 4, task: proc
27295 Sep 22 23:14:31.998 INFO Connection request from 4444e318-0130-42a4-85b4-7dec4633fa58 with version 4, task: proc
27296 Sep 22 23:14:31.998 INFO upstairs UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } connected, version 4, task: proc
272972023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [0] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) New New New ds_transition to WaitActive
272982023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
272992023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [0] client is_active_req TRUE, promote! session 93f845b8-2585-4545-9c84-03a1fb12d486
273002023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [1] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) WaitActive New New ds_transition to WaitActive
273012023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
27302 Sep 22 23:14:31.998 INFO UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } is now active (read-write)
273032023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [1] client is_active_req TRUE, promote! session 93f845b8-2585-4545-9c84-03a1fb12d486
273042023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [2] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) WaitActive WaitActive New ds_transition to WaitActive
27305 Sep 22 23:14:31.998 INFO UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } is now active (read-write)
273062023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
273072023-09-22T23:14:31.998ZINFOcrucible-pantry (datafile): [2] client is_active_req TRUE, promote! session 93f845b8-2585-4545-9c84-03a1fb12d486
27308 Sep 22 23:14:31.998 INFO UpstairsConnection { upstairs_id: 4444e318-0130-42a4-85b4-7dec4633fa58, session_id: 93f845b8-2585-4545-9c84-03a1fb12d486, gen: 1 } is now active (read-write)
273092023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:62247 has UUID fc04a5c8-8424-4fbf-8cbf-45e4299fa04d
273102023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: fc04a5c8-8424-4fbf-8cbf-45e4299fa04d, encrypted: true, database_read_version: 1, database_write_version: 1 }
273112023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 WaitActive WaitActive WaitActive
273122023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:39430 has UUID 64eb55ec-79b8-4aa0-81d6-70fa12931989
273132023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 64eb55ec-79b8-4aa0-81d6-70fa12931989, encrypted: true, database_read_version: 1, database_write_version: 1 }
273142023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 WaitActive WaitActive WaitActive
273152023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:44923 has UUID 6e47f0ac-72f2-4910-8085-d56269d7e5b1
27316 Sep 22 23:14:31.999 INFO Current flush_numbers [0..12]: [0, 0]
273172023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 5, shift: 9 }, extent_count: 2, uuid: 6e47f0ac-72f2-4910-8085-d56269d7e5b1, encrypted: true, database_read_version: 1, database_write_version: 1 }
273182023-09-22T23:14:31.999ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 WaitActive WaitActive WaitActive
27319 Sep 22 23:14:31.999 INFO Downstairs has completed Negotiation, task: proc
27320 Sep 22 23:14:31.999 INFO Current flush_numbers [0..12]: [0, 0]
27321 Sep 22 23:14:31.999 INFO Downstairs has completed Negotiation, task: proc
27322 Sep 22 23:14:31.999 INFO Current flush_numbers [0..12]: [0, 0]
27323 Sep 22 23:14:32.000 INFO Downstairs has completed Negotiation, task: proc
273242023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
273252023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
273262023-09-22T23:14:32.000ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
273272023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
273282023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
273292023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
273302023-09-22T23:14:32.000ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
27331 The guest has finished waiting for activation
273322023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
273332023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [2] 4444e318-0130-42a4-85b4-7dec4633fa58 (93f845b8-2585-4545-9c84-03a1fb12d486) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
273342023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
273352023-09-22T23:14:32.000ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
273362023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
273372023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:62247 task reports connection:true
273382023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 WaitQuorum WaitQuorum WaitQuorum
273392023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0]R flush_numbers: [0, 0]
273402023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0]R generation: [0, 0]
273412023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [0]R dirty: [false, false]
273422023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1]R flush_numbers: [0, 0]
273432023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1]R generation: [0, 0]
273442023-09-22T23:14:32.000ZINFOcrucible-pantry (datafile): [1]R dirty: [false, false]
273452023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [2]R flush_numbers: [0, 0]
273462023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [2]R generation: [0, 0]
273472023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [2]R dirty: [false, false]
273482023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Max found gen is 1
273492023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Generation requested: 1 >= found:1
273502023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Next flush: 1
273512023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): All extents match
273522023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): No downstairs repair required
273532023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): No initial repair work was required
273542023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
273552023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 is now active with session: 93f845b8-2585-4545-9c84-03a1fb12d486
273562023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 Set Active after no repair
273572023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
273582023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Set check for repair
273592023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:39430 task reports connection:true
273602023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 Active Active Active
273612023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Set check for repair
273622023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:44923 task reports connection:true
273632023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 Active Active Active
273642023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): Set check for repair
273652023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [0] received reconcile message
273662023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
273672023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
273682023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [1] received reconcile message
273692023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
273702023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
273712023-09-22T23:14:32.001ZINFOcrucible-pantry (datafile): [2] received reconcile message
273722023-09-22T23:14:32.002ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
273732023-09-22T23:14:32.002ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
273742023-09-22T23:14:32.002ZINFOcrucible-pantry (datafile): volume 7d6e799f-ffff-4301-8a22-abec78155ecf activated ok
273752023-09-22T23:14:32.002ZINFOcrucible-pantry (datafile): volume 7d6e799f-ffff-4301-8a22-abec78155ecf constructed and inserted ok
273762023-09-22T23:14:32.002ZINFOcrucible-pantry (dropshot): request completed latency_us = 4669 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:34332 req_id = c001bbe6-4785-43eb-915e-b9567e602310 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf
273772023-09-22T23:14:32.004ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:62949
273782023-09-22T23:14:32.004ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:53307
273792023-09-22T23:14:32.004ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:35509
273802023-09-22T23:14:32.004ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:42885
273812023-09-22T23:14:32.004ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:65096
273822023-09-22T23:14:32.005ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:59818
273832023-09-22T23:14:32.005ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:56187
273842023-09-22T23:14:32.005ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:56761
273852023-09-22T23:14:32.005ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:54057 remote_addr = 127.0.0.1:61150
273862023-09-22T23:14:32.006ZINFOcrucible-pantry (dropshot): request completed latency_us = 2404 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:34332 req_id = 30056ddb-6ed0-4f18-bd1e-44768cf7bcd4 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
27387 Sep 22 23:14:32.009 DEBG Write :1000 deps:[] res:true
27388 Sep 22 23:14:32.009 DEBG Write :1000 deps:[] res:true
27389 Sep 22 23:14:32.009 DEBG Write :1000 deps:[] res:true
273902023-09-22T23:14:32.014ZINFOcrucible-pantry (dropshot): request completed latency_us = 8036 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:62949 req_id = 126feb0c-69ef-46d5-acd7-60170937aef3 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273912023-09-22T23:14:32.014ZINFOcrucible-pantry (dropshot): request completed latency_us = 7971 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:53307 req_id = cfe6fab7-8bc9-4f9d-bcac-0b4d09e3f00c response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273922023-09-22T23:14:32.014ZINFOcrucible-pantry (dropshot): request completed latency_us = 7870 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:35509 req_id = 49ea016f-8a0c-4f2e-a30a-1d346bf924a8 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273932023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7768 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:42885 req_id = 4a77812b-dcb9-491a-892f-62729e36fc4c response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273942023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7660 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:65096 req_id = 0b76f18e-13f9-4866-bac8-2583255a6088 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273952023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7558 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:59818 req_id = 0c8c4ab0-856a-4b10-b259-3efcbe1b4380 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273962023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7458 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:56187 req_id = a3429450-5aa8-48f5-92b2-585248ff823e response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273972023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7359 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:56761 req_id = 8554dbf9-0c26-48d0-8026-5a8aa2aed1f6 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
273982023-09-22T23:14:32.015ZINFOcrucible-pantry (dropshot): request completed latency_us = 7253 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 61ebdd06-77f5-49ab-9634-b6200427a6c8 response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_write
27399 Sep 22 23:14:32.015 DEBG Write :1001 deps:[] res:true
27400 Sep 22 23:14:32.015 DEBG Write :1002 deps:[] res:true
27401 Sep 22 23:14:32.015 DEBG Write :1003 deps:[] res:true
27402 Sep 22 23:14:32.015 DEBG Write :1004 deps:[] res:true
27403 Sep 22 23:14:32.016 DEBG Write :1005 deps:[] res:true
27404 Sep 22 23:14:32.016 DEBG Write :1006 deps:[] res:true
27405 Sep 22 23:14:32.016 DEBG Write :1007 deps:[] res:true
27406 Sep 22 23:14:32.016 DEBG Write :1008 deps:[] res:true
27407 Sep 22 23:14:32.016 DEBG Write :1009 deps:[] res:true
27408 Sep 22 23:14:32.018 DEBG Write :1001 deps:[] res:true
27409 Sep 22 23:14:32.019 DEBG Write :1002 deps:[] res:true
27410 Sep 22 23:14:32.019 DEBG Write :1003 deps:[] res:true
27411 Sep 22 23:14:32.019 DEBG Write :1004 deps:[] res:true
27412 Sep 22 23:14:32.019 DEBG Write :1005 deps:[] res:true
27413 Sep 22 23:14:32.019 DEBG Write :1006 deps:[] res:true
27414 Sep 22 23:14:32.019 DEBG Write :1007 deps:[] res:true
27415 Sep 22 23:14:32.020 DEBG Write :1008 deps:[] res:true
27416 Sep 22 23:14:32.020 DEBG Write :1009 deps:[] res:true
27417 Sep 22 23:14:32.020 DEBG Write :1001 deps:[] res:true
27418 Sep 22 23:14:32.020 DEBG Write :1002 deps:[] res:true
27419 Sep 22 23:14:32.020 DEBG Write :1003 deps:[] res:true
27420 Sep 22 23:14:32.020 DEBG Write :1004 deps:[] res:true
27421 Sep 22 23:14:32.021 DEBG Write :1005 deps:[] res:true
27422 Sep 22 23:14:32.021 DEBG Write :1006 deps:[] res:true
27423 Sep 22 23:14:32.021 DEBG Write :1007 deps:[] res:true
27424 Sep 22 23:14:32.021 DEBG Write :1008 deps:[] res:true
27425 Sep 22 23:14:32.021 DEBG Write :1009 deps:[] res:true
27426 Sep 22 23:14:32.023 DEBG Read :1010 deps:[JobId(1000)] res:true
27427 Sep 22 23:14:32.023 DEBG Read :1010 deps:[JobId(1000)] res:true
27428 Sep 22 23:14:32.023 DEBG Read :1010 deps:[JobId(1000)] res:true
274292023-09-22T23:14:32.025ZINFOcrucible-pantry (dropshot): request completed latency_us = 2493 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 463ed387-467f-4392-b935-b464d1c1ed56 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27430 Sep 22 23:14:32.026 DEBG Read :1011 deps:[JobId(1001)] res:true
27431 Sep 22 23:14:32.026 DEBG Read :1011 deps:[JobId(1001)] res:true
27432 Sep 22 23:14:32.026 DEBG Read :1011 deps:[JobId(1001)] res:true
274332023-09-22T23:14:32.027ZINFOcrucible-pantry (dropshot): request completed latency_us = 2374 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = b761941d-8d3f-49fd-abe7-fb468dee286c response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27434 Sep 22 23:14:32.029 DEBG Read :1012 deps:[JobId(1002)] res:true
27435 Sep 22 23:14:32.029 DEBG Read :1012 deps:[JobId(1002)] res:true
27436 Sep 22 23:14:32.029 DEBG Read :1012 deps:[JobId(1002)] res:true
274372023-09-22T23:14:32.031ZINFOcrucible-pantry (dropshot): request completed latency_us = 2474 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 6ca67021-c10a-4252-babd-8c45426e87e9 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27438 Sep 22 23:14:32.032 DEBG Read :1013 deps:[JobId(1003)] res:true
27439 Sep 22 23:14:32.032 DEBG Read :1013 deps:[JobId(1003)] res:true
27440 Sep 22 23:14:32.032 DEBG Read :1013 deps:[JobId(1003)] res:true
274412023-09-22T23:14:32.033ZINFOcrucible-pantry (dropshot): request completed latency_us = 2368 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 03ca5732-067f-4810-b794-2a823ce721ab response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27442 Sep 22 23:14:32.035 DEBG Read :1014 deps:[JobId(1004)] res:true
27443 Sep 22 23:14:32.035 DEBG Read :1014 deps:[JobId(1004)] res:true
27444 Sep 22 23:14:32.035 DEBG Read :1014 deps:[JobId(1004)] res:true
274452023-09-22T23:14:32.036ZINFOcrucible-pantry (dropshot): request completed latency_us = 2400 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = e09b1a8e-df46-4bb0-ae5e-387c98b0f721 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27446 Sep 22 23:14:32.038 DEBG Read :1015 deps:[JobId(1005)] res:true
27447 Sep 22 23:14:32.038 DEBG Read :1015 deps:[JobId(1005)] res:true
27448 Sep 22 23:14:32.038 DEBG Read :1015 deps:[JobId(1005)] res:true
274492023-09-22T23:14:32.039ZINFOcrucible-pantry (dropshot): request completed latency_us = 2454 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 0a359e8e-7efc-4eaf-b5c9-3bb12b087927 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27450 Sep 22 23:14:32.041 DEBG Read :1016 deps:[JobId(1006)] res:true
27451 Sep 22 23:14:32.041 DEBG Read :1016 deps:[JobId(1006)] res:true
27452 Sep 22 23:14:32.041 DEBG Read :1016 deps:[JobId(1006)] res:true
274532023-09-22T23:14:32.042ZINFOcrucible-pantry (dropshot): request completed latency_us = 2377 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 723329f0-fcfe-4502-a291-8b3b9c9bbfd7 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27454 Sep 22 23:14:32.044 DEBG Read :1017 deps:[JobId(1007)] res:true
27455 Sep 22 23:14:32.044 DEBG Read :1017 deps:[JobId(1007)] res:true
27456 Sep 22 23:14:32.044 DEBG Read :1017 deps:[JobId(1007)] res:true
274572023-09-22T23:14:32.045ZINFOcrucible-pantry (dropshot): request completed latency_us = 2375 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = a20da959-a5a4-4930-9193-4d269b85c2bb response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27458 Sep 22 23:14:32.047 DEBG Read :1018 deps:[JobId(1008)] res:true
27459 Sep 22 23:14:32.047 DEBG Read :1018 deps:[JobId(1008)] res:true
27460 Sep 22 23:14:32.047 DEBG Read :1018 deps:[JobId(1008)] res:true
274612023-09-22T23:14:32.048ZINFOcrucible-pantry (dropshot): request completed latency_us = 2370 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 42f8088b-8d87-41a6-9d9e-70a39064ae09 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
27462 Sep 22 23:14:32.049 DEBG Read :1019 deps:[JobId(1009)] res:true
27463 Sep 22 23:14:32.050 DEBG Read :1019 deps:[JobId(1009)] res:true
27464 Sep 22 23:14:32.050 DEBG Read :1019 deps:[JobId(1009)] res:true
274652023-09-22T23:14:32.051ZINFOcrucible-pantry (dropshot): request completed latency_us = 2370 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = 7798cda2-8112-401a-acde-792a99af81b1 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/bulk_read
274662023-09-22T23:14:32.052ZINFOcrucible-pantry (dropshot): request completed latency_us = 236 local_addr = 127.0.0.1:54057 method = POST remote_addr = 127.0.0.1:61150 req_id = cf097eee-de48-420d-bb5c-76c7894f5ab6 response_code = 200 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf/validate
27467 Sep 22 23:14:32.053 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27468 Sep 22 23:14:32.053 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
27469 Sep 22 23:14:32.053 DEBG Read :1020 deps:[JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true
274702023-09-22T23:14:32.058ZINFOcrucible-pantry (dropshot): request completed latency_us = 162 local_addr = 127.0.0.1:54057 method = GET remote_addr = 127.0.0.1:61150 req_id = ab161ce6-c776-4d86-9679-71c8e134b9ed response_code = 200 uri = /crucible/pantry/0/job/e11c43d1-a447-4be1-8ab4-f569126c52b2/is_finished
274712023-09-22T23:14:32.058ZINFOcrucible-pantry (dropshot): request completed latency_us = 170 local_addr = 127.0.0.1:54057 method = GET remote_addr = 127.0.0.1:61150 req_id = ff02ad82-f6f7-4d75-8a94-7903d5fccc55 response_code = 200 uri = /crucible/pantry/0/job/e11c43d1-a447-4be1-8ab4-f569126c52b2/ok
274722023-09-22T23:14:32.059ZINFOcrucible-pantry (datafile): detach removing entry for volume 7d6e799f-ffff-4301-8a22-abec78155ecf
274732023-09-22T23:14:32.059ZINFOcrucible-pantry (datafile): detaching volume 7d6e799f-ffff-4301-8a22-abec78155ecf
27474 Sep 22 23:14:32.060 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
27475 Sep 22 23:14:32.060 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
27476 Sep 22 23:14:32.061 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019), JobId(1018), JobId(1017), JobId(1016), JobId(1015), JobId(1014), JobId(1013), JobId(1012), JobId(1011), JobId(1010), JobId(1009), JobId(1008), JobId(1007), JobId(1006), JobId(1005), JobId(1004), JobId(1003), JobId(1002), JobId(1001), JobId(1000)] res:true f:1 g:1
274772023-09-22T23:14:32.061ZINFOcrucible-pantry (datafile): Request to deactivate this guest
274782023-09-22T23:14:32.061ZINFOcrucible-pantry (datafile): 4444e318-0130-42a4-85b4-7dec4633fa58 set deactivating.
274792023-09-22T23:14:32.061ZINFOcrucible-pantry (dropshot): request completed latency_us = 2449 local_addr = 127.0.0.1:54057 method = DELETE remote_addr = 127.0.0.1:61150 req_id = 4692e680-54f2-48b2-8f9b-4e71bf64d59f response_code = 204 uri = /crucible/pantry/0/volume/7d6e799f-ffff-4301-8a22-abec78155ecf
27480 test test::test_pantry_validate_subset ... ok
27481 Sep 22 23:14:32.093 DEBG IO Write 1000 has deps []
27482 Sep 22 23:14:32.094 INFO Checking if live repair is needed
27483 Sep 22 23:14:32.094 INFO No Live Repair required at this time
27484 Sep 22 23:14:32.094 DEBG up_ds_listen was notified
27485 Sep 22 23:14:32.094 DEBG up_ds_listen process 1000
27486 Sep 22 23:14:32.094 DEBG [A] ack job 1000:1, : downstairs
27487 Sep 22 23:14:32.094 DEBG up_ds_listen checked 1 jobs, back to waiting
27488 Sep 22 23:14:32.257 DEBG Write :1000 deps:[] res:true
27489 Sep 22 23:14:32.272 DEBG Write :1000 deps:[] res:true
27490 Sep 22 23:14:32.288 DEBG Write :1000 deps:[] res:true
27491 Sep 22 23:14:32.293 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27492 Sep 22 23:14:32.293 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27493 Sep 22 23:14:32.293 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27494 Sep 22 23:14:32.485 DEBG Write :1000 deps:[] res:true
27495 Sep 22 23:14:32.500 DEBG Write :1000 deps:[] res:true
27496 Sep 22 23:14:32.502 DEBG IO Write 1001 has deps []
27497 Sep 22 23:14:32.502 DEBG up_ds_listen was notified
27498 Sep 22 23:14:32.502 DEBG up_ds_listen process 1001
27499 Sep 22 23:14:32.502 DEBG [A] ack job 1001:2, : downstairs
27500 Sep 22 23:14:32.503 DEBG up_ds_listen checked 1 jobs, back to waiting
27501 Sep 22 23:14:32.503 DEBG IO Flush 1002 has deps [JobId(1001), JobId(1000)]
27502 Sep 22 23:14:32.516 DEBG Write :1000 deps:[] res:true
27503 Sep 22 23:14:32.522 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27504 Sep 22 23:14:32.522 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27505 Sep 22 23:14:32.522 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
275062023-09-22T23:14:32.628ZINFOcrucible-pantry (dropshot): request completed latency_us = 273 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = fd9b3ce4-b994-4876-be17-fd28ec18bca6 response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
275072023-09-22T23:14:32.642ZINFOcrucible-pantry (dropshot): request completed latency_us = 256 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = 841983c7-4468-458c-bf79-c64d38599663 response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/is_finished
27508 Sep 22 23:14:32.842 DEBG IO Write 1003 has deps [JobId(1002)]
27509 Sep 22 23:14:32.842 DEBG up_ds_listen was notified
27510 Sep 22 23:14:32.842 DEBG up_ds_listen process 1003
27511 Sep 22 23:14:32.842 DEBG [A] ack job 1003:4, : downstairs
27512 Sep 22 23:14:32.842 DEBG up_ds_listen checked 1 jobs, back to waiting
27513 Sep 22 23:14:33.018 DEBG Write :1002 deps:[] res:true
27514 Sep 22 23:14:33.033 DEBG Write :1002 deps:[] res:true
27515 Sep 22 23:14:33.048 DEBG Write :1002 deps:[] res:true
27516 Sep 22 23:14:33.101 INFO current number of open files limit 65536 is already the maximum
27517 Sep 22 23:14:33.101 INFO Created new region file "/tmp/downstairs-Xh4OLaSh/region.json"
27518 Sep 22 23:14:33.183 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27519 Sep 22 23:14:33.183 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27520 Sep 22 23:14:33.183 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27521 Sep 22 23:14:33.197 DEBG IO Write 1004 has deps [JobId(1002)]
27522 Sep 22 23:14:33.197 DEBG up_ds_listen was notified
27523 Sep 22 23:14:33.197 DEBG up_ds_listen process 1004
27524 Sep 22 23:14:33.197 DEBG [A] ack job 1004:5, : downstairs
27525 Sep 22 23:14:33.197 DEBG up_ds_listen checked 1 jobs, back to waiting
27526 Sep 22 23:14:33.197 DEBG IO Flush 1005 has deps [JobId(1004), JobId(1003), JobId(1002)]
27527 Sep 22 23:14:33.203 INFO current number of open files limit 65536 is already the maximum
27528 Sep 22 23:14:33.203 INFO Opened existing region file "/tmp/downstairs-Xh4OLaSh/region.json"
27529 Sep 22 23:14:33.203 INFO Database read version 1
27530 Sep 22 23:14:33.203 INFO Database write version 1
27531 Sep 22 23:14:33.251 INFO UUID: bafba53d-f7d4-4132-95f6-92b6a45487a9
27532 Sep 22 23:14:33.251 INFO Blocks per extent:512 Total Extents: 188
27533 Sep 22 23:14:33.251 INFO Crucible Version: Crucible Version: 0.0.1
27534 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27535 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27536 rustc: 1.70.0 stable x86_64-unknown-illumos
27537 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27538 Sep 22 23:14:33.251 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27539 Sep 22 23:14:33.251 INFO Using address: 127.0.0.1:63639, task: main
27540 Sep 22 23:14:33.252 INFO Repair listens on 127.0.0.1:0, task: repair
27541 Sep 22 23:14:33.252 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:35667, task: repair
27542 Sep 22 23:14:33.252 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:35667, task: repair
27543 Sep 22 23:14:33.252 INFO listening, local_addr: 127.0.0.1:35667, task: repair
27544 Sep 22 23:14:33.252 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:35667, task: repair
27545 Sep 22 23:14:33.252 INFO Using repair address: 127.0.0.1:35667, task: main
27546 Sep 22 23:14:33.252 INFO No SSL acceptor configured, task: main
27547 Sep 22 23:14:33.259 INFO current number of open files limit 65536 is already the maximum
27548 Sep 22 23:14:33.259 INFO Created new region file "/tmp/downstairs-x3lx8d7J/region.json"
27549 Sep 22 23:14:33.357 INFO listening on 127.0.0.1:0, task: main
27550 Sep 22 23:14:33.357 INFO current number of open files limit 65536 is already the maximum
27551 Sep 22 23:14:33.357 INFO Opened existing region file "/tmp/downstairs-x3lx8d7J/region.json"
27552 Sep 22 23:14:33.357 INFO Database read version 1
27553 Sep 22 23:14:33.357 INFO Database write version 1
27554 Sep 22 23:14:33.378 DEBG Write :1002 deps:[] res:true
27555 Sep 22 23:14:33.393 DEBG Write :1002 deps:[] res:true
27556 Sep 22 23:14:33.407 INFO UUID: 1e7a075d-e51e-4361-bdf9-32f916f89647
27557 Sep 22 23:14:33.407 INFO Blocks per extent:512 Total Extents: 188
27558 Sep 22 23:14:33.407 INFO Crucible Version: Crucible Version: 0.0.1
27559 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27560 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27561 rustc: 1.70.0 stable x86_64-unknown-illumos
27562 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27563 Sep 22 23:14:33.407 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27564 Sep 22 23:14:33.407 INFO Using address: 127.0.0.1:43949, task: main
27565 Sep 22 23:14:33.408 INFO Repair listens on 127.0.0.1:0, task: repair
27566 Sep 22 23:14:33.408 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:46314, task: repair
27567 Sep 22 23:14:33.408 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:46314, task: repair
27568 Sep 22 23:14:33.408 INFO listening, local_addr: 127.0.0.1:46314, task: repair
27569 Sep 22 23:14:33.408 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:46314, task: repair
27570 Sep 22 23:14:33.408 INFO Using repair address: 127.0.0.1:46314, task: main
27571 Sep 22 23:14:33.408 INFO No SSL acceptor configured, task: main
27572 Sep 22 23:14:33.409 DEBG Write :1002 deps:[] res:true
27573 Sep 22 23:14:33.415 INFO current number of open files limit 65536 is already the maximum
27574 Sep 22 23:14:33.415 INFO Created new region file "/tmp/downstairs-qle3gqaw/region.json"
27575 Sep 22 23:14:33.518 INFO listening on 127.0.0.1:0, task: main
27576 Sep 22 23:14:33.518 INFO current number of open files limit 65536 is already the maximum
27577 Sep 22 23:14:33.518 INFO Opened existing region file "/tmp/downstairs-qle3gqaw/region.json"
27578 Sep 22 23:14:33.518 INFO Database read version 1
27579 Sep 22 23:14:33.518 INFO Database write version 1
27580 Sep 22 23:14:33.527 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27581 Sep 22 23:14:33.527 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27582 Sep 22 23:14:33.527 DEBG Flush :1003 extent_limit None deps:[JobId(1002)] res:true f:2 g:1
27583 Sep 22 23:14:33.571 INFO UUID: b89ddae9-a405-4d4a-865d-e95f1fd0a715
27584 Sep 22 23:14:33.571 INFO Blocks per extent:512 Total Extents: 188
27585 Sep 22 23:14:33.571 INFO Crucible Version: Crucible Version: 0.0.1
27586 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
27587 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
27588 rustc: 1.70.0 stable x86_64-unknown-illumos
27589 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
27590 Sep 22 23:14:33.571 INFO Upstairs <-> Downstairs Message Version: 4, task: main
27591 Sep 22 23:14:33.571 INFO Using address: 127.0.0.1:63175, task: main
27592 Sep 22 23:14:33.572 INFO Repair listens on 127.0.0.1:0, task: repair
27593 Sep 22 23:14:33.572 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:47187, task: repair
27594 Sep 22 23:14:33.572 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:47187, task: repair
27595 Sep 22 23:14:33.572 INFO listening, local_addr: 127.0.0.1:47187, task: repair
27596 Sep 22 23:14:33.572 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:47187, task: repair
27597 Sep 22 23:14:33.572 INFO Using repair address: 127.0.0.1:47187, task: main
27598 Sep 22 23:14:33.572 INFO No SSL acceptor configured, task: main
27599 Sep 22 23:14:33.579 INFO Upstairs starts
27600 Sep 22 23:14:33.579 INFO Crucible Version: BuildInfo {
27601 version: "0.0.1",
27602 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27603 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27604 git_branch: "main",
27605 rustc_semver: "1.70.0",
27606 rustc_channel: "stable",
27607 rustc_host_triple: "x86_64-unknown-illumos",
27608 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27609 cargo_triple: "x86_64-unknown-illumos",
27610 debug: true,
27611 opt_level: 0,
27612 }
27613 Sep 22 23:14:33.579 INFO Upstairs <-> Downstairs Message Version: 4
27614 Sep 22 23:14:33.579 INFO Crucible stats registered with UUID: 521f2004-29de-4d41-9494-447a49569d29
27615 Sep 22 23:14:33.579 INFO Crucible 521f2004-29de-4d41-9494-447a49569d29 has session id: 2b3ff256-dd3e-46b0-a6b3-756709d17c55
27616 Sep 22 23:14:33.579 INFO listening on 127.0.0.1:0, task: main
27617 Sep 22 23:14:33.579 INFO [0] connecting to 127.0.0.1:63639, looper: 0
27618 Sep 22 23:14:33.580 INFO [1] connecting to 127.0.0.1:43949, looper: 1
27619 Sep 22 23:14:33.580 INFO [2] connecting to 127.0.0.1:63175, looper: 2
27620 Sep 22 23:14:33.580 INFO up_listen starts, task: up_listen
27621 Sep 22 23:14:33.580 INFO Wait for all three downstairs to come online
27622 Sep 22 23:14:33.580 INFO Flush timeout: 0.5
27623 Sep 22 23:14:33.580 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 0
27624 Sep 22 23:14:33.580 INFO [0] Proc runs for 127.0.0.1:63639 in state New
27625 Sep 22 23:14:33.580 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 1
27626 Sep 22 23:14:33.580 INFO [1] Proc runs for 127.0.0.1:43949 in state New
27627 Sep 22 23:14:33.580 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 2
27628 Sep 22 23:14:33.580 INFO [2] Proc runs for 127.0.0.1:63175 in state New
27629 Sep 22 23:14:33.580 INFO accepted connection from 127.0.0.1:52189, task: main
27630 Sep 22 23:14:33.580 INFO accepted connection from 127.0.0.1:54318, task: main
27631 Sep 22 23:14:33.580 INFO accepted connection from 127.0.0.1:37634, task: main
27632 Sep 22 23:14:33.581 DEBG IO Write 1006 has deps [JobId(1005), JobId(1002)]
27633 Sep 22 23:14:33.581 DEBG up_ds_listen was notified
27634 Sep 22 23:14:33.581 DEBG up_ds_listen process 1006
27635 Sep 22 23:14:33.581 DEBG [A] ack job 1006:7, : downstairs
27636 Sep 22 23:14:33.581 DEBG up_ds_listen checked 1 jobs, back to waiting
276372023-09-22T23:14:33.630ZINFOcrucible-pantry (dropshot): request completed latency_us = 216 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = 099b8a41-2c32-421f-aa4a-808d3528e54b response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
27638 Sep 22 23:14:33.671 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27639 Sep 22 23:14:33.671 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
27640 Sep 22 23:14:33.671 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27641 Sep 22 23:14:33.671 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
27642 Sep 22 23:14:33.671 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27643 Sep 22 23:14:33.671 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
27644 Sep 22 23:14:33.672 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) New New New ds_transition to WaitActive
27645 Sep 22 23:14:33.672 INFO [0] Transition from New to WaitActive
27646 Sep 22 23:14:33.672 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitActive New New ds_transition to WaitActive
27647 Sep 22 23:14:33.672 INFO [1] Transition from New to WaitActive
27648 Sep 22 23:14:33.672 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitActive WaitActive New ds_transition to WaitActive
27649 Sep 22 23:14:33.672 INFO [2] Transition from New to WaitActive
27650 Sep 22 23:14:33.703 DEBG Write :1004 deps:[] res:true
27651 Sep 22 23:14:33.719 DEBG Write :1004 deps:[] res:true
27652 Sep 22 23:14:33.734 DEBG Write :1004 deps:[] res:true
276532023-09-22T23:14:33.737ZINFOcrucible-pantry (dropshot): request completed latency_us = 373 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = f88f8002-cb7f-4c17-a71d-06fed6786627 response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/is_finished
27654 Sep 22 23:14:33.742 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27655 Sep 22 23:14:33.742 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27656 Sep 22 23:14:33.742 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27657 Sep 22 23:14:33.917 DEBG IO Write 1007 has deps [JobId(1005), JobId(1002)]
27658 Sep 22 23:14:33.917 DEBG up_ds_listen was notified
27659 Sep 22 23:14:33.917 DEBG up_ds_listen process 1007
27660 Sep 22 23:14:33.917 DEBG [A] ack job 1007:8, : downstairs
27661 Sep 22 23:14:33.917 DEBG up_ds_listen checked 1 jobs, back to waiting
27662 Sep 22 23:14:33.918 DEBG IO Flush 1008 has deps [JobId(1007), JobId(1006), JobId(1005)]
27663 The guest has requested activation
27664 Sep 22 23:14:33.959 INFO 521f2004-29de-4d41-9494-447a49569d29 active request set
27665 Sep 22 23:14:33.959 INFO [0] received activate with gen 1
27666 Sep 22 23:14:33.959 INFO [0] client got ds_active_rx, promote! session ca92c31b-eb28-41e5-843d-26b8bd24de02
27667 Sep 22 23:14:33.959 INFO [1] received activate with gen 1
27668 Sep 22 23:14:33.959 INFO [1] client got ds_active_rx, promote! session ca92c31b-eb28-41e5-843d-26b8bd24de02
27669 Sep 22 23:14:33.959 INFO [2] received activate with gen 1
27670 Sep 22 23:14:33.959 INFO [2] client got ds_active_rx, promote! session ca92c31b-eb28-41e5-843d-26b8bd24de02
27671 Sep 22 23:14:33.960 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } is now active (read-write)
27672 Sep 22 23:14:33.960 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } is now active (read-write)
27673 Sep 22 23:14:33.960 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } is now active (read-write)
27674 Sep 22 23:14:33.961 INFO [0] downstairs client at 127.0.0.1:63639 has UUID bafba53d-f7d4-4132-95f6-92b6a45487a9
27675 Sep 22 23:14:33.961 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bafba53d-f7d4-4132-95f6-92b6a45487a9, encrypted: true, database_read_version: 1, database_write_version: 1 }
27676 Sep 22 23:14:33.961 INFO 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
27677 Sep 22 23:14:33.961 INFO [1] downstairs client at 127.0.0.1:43949 has UUID 1e7a075d-e51e-4361-bdf9-32f916f89647
27678 Sep 22 23:14:33.961 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1e7a075d-e51e-4361-bdf9-32f916f89647, encrypted: true, database_read_version: 1, database_write_version: 1 }
27679 Sep 22 23:14:33.961 INFO 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
27680 Sep 22 23:14:33.961 INFO [2] downstairs client at 127.0.0.1:63175 has UUID b89ddae9-a405-4d4a-865d-e95f1fd0a715
27681 Sep 22 23:14:33.961 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b89ddae9-a405-4d4a-865d-e95f1fd0a715, encrypted: true, database_read_version: 1, database_write_version: 1 }
27682 Sep 22 23:14:33.961 INFO 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
27683 Sep 22 23:14:33.979 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27684 Sep 22 23:14:33.982 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27685 Sep 22 23:14:33.985 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27686 Sep 22 23:14:34.011 INFO Downstairs has completed Negotiation, task: proc
27687 Sep 22 23:14:34.012 INFO Downstairs has completed Negotiation, task: proc
27688 Sep 22 23:14:34.013 INFO Downstairs has completed Negotiation, task: proc
27689 Sep 22 23:14:34.013 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27690 Sep 22 23:14:34.013 INFO [0] Transition from WaitActive to WaitQuorum
27691 Sep 22 23:14:34.013 WARN [0] new RM replaced this: None
27692 Sep 22 23:14:34.013 INFO [0] Starts reconcile loop
27693 Sep 22 23:14:34.013 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
27694 Sep 22 23:14:34.013 INFO [1] Transition from WaitActive to WaitQuorum
27695 Sep 22 23:14:34.013 WARN [1] new RM replaced this: None
27696 Sep 22 23:14:34.013 INFO [1] Starts reconcile loop
27697 Sep 22 23:14:34.014 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
27698 Sep 22 23:14:34.014 INFO [2] Transition from WaitActive to WaitQuorum
27699 Sep 22 23:14:34.014 WARN [2] new RM replaced this: None
27700 Sep 22 23:14:34.014 INFO [2] Starts reconcile loop
27701 Sep 22 23:14:34.014 INFO [0] 127.0.0.1:63639 task reports connection:true
27702 Sep 22 23:14:34.014 INFO 521f2004-29de-4d41-9494-447a49569d29 WaitQuorum WaitQuorum WaitQuorum
27703 Sep 22 23:14:34.014 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27704 Sep 22 23:14:34.014 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27705 Sep 22 23:14:34.014 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27706 Sep 22 23:14:34.014 INFO [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27707 Sep 22 23:14:34.014 INFO [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27708 Sep 22 23:14:34.014 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27709 Sep 22 23:14:34.014 INFO [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27710 Sep 22 23:14:34.014 INFO [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
27711 Sep 22 23:14:34.014 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
27712 Sep 22 23:14:34.014 INFO Max found gen is 1
27713 Sep 22 23:14:34.014 INFO Generation requested: 1 >= found:1
27714 Sep 22 23:14:34.014 INFO Next flush: 1
27715 Sep 22 23:14:34.014 INFO All extents match
27716 Sep 22 23:14:34.014 INFO No downstairs repair required
27717 Sep 22 23:14:34.014 INFO No initial repair work was required
27718 Sep 22 23:14:34.014 INFO Set Downstairs and Upstairs active
27719 Sep 22 23:14:34.014 INFO 521f2004-29de-4d41-9494-447a49569d29 is now active with session: ca92c31b-eb28-41e5-843d-26b8bd24de02
27720 Sep 22 23:14:34.014 INFO 521f2004-29de-4d41-9494-447a49569d29 Set Active after no repair
27721 Sep 22 23:14:34.014 INFO Notify all downstairs, region set compare is done.
27722 Sep 22 23:14:34.014 INFO Set check for repair
27723 Sep 22 23:14:34.014 INFO [1] 127.0.0.1:43949 task reports connection:true
27724 Sep 22 23:14:34.014 INFO 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
27725 Sep 22 23:14:34.014 INFO Set check for repair
27726 Sep 22 23:14:34.014 INFO [2] 127.0.0.1:63175 task reports connection:true
27727 Sep 22 23:14:34.014 INFO 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
27728 Sep 22 23:14:34.014 INFO Set check for repair
27729 Sep 22 23:14:34.014 INFO [0] received reconcile message
27730 Sep 22 23:14:34.014 INFO [0] All repairs completed, exit
27731 Sep 22 23:14:34.014 INFO [0] Starts cmd_loop
27732 Sep 22 23:14:34.014 INFO [1] received reconcile message
27733 Sep 22 23:14:34.014 INFO [1] All repairs completed, exit
27734 Sep 22 23:14:34.014 INFO [1] Starts cmd_loop
27735 Sep 22 23:14:34.014 INFO [2] received reconcile message
27736 Sep 22 23:14:34.014 INFO [2] All repairs completed, exit
27737 Sep 22 23:14:34.014 INFO [2] Starts cmd_loop
27738 The guest has finished waiting for activation
27739 Sep 22 23:14:34.028 DEBG IO Read 1000 has deps []
27740 Sep 22 23:14:34.065 DEBG Read :1000 deps:[] res:true
27741 Sep 22 23:14:34.089 DEBG Read :1000 deps:[] res:true
27742 Sep 22 23:14:34.126 DEBG Read :1000 deps:[] res:true
27743 Sep 22 23:14:34.147 DEBG IO Flush 1001 has deps [JobId(1000)]
27744 Sep 22 23:14:34.150 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27745 Sep 22 23:14:34.151 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27746 Sep 22 23:14:34.152 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
27747 Sep 22 23:14:34.232 DEBG Write :1006 deps:[] res:true
27748 Sep 22 23:14:34.237 DEBG Write :1004 deps:[] res:true
27749 Sep 22 23:14:34.252 DEBG Write :1006 deps:[] res:true
27750 Sep 22 23:14:34.262 DEBG Write :1004 deps:[] res:true
27751 Sep 22 23:14:34.271 DEBG Write :1006 deps:[] res:true
27752 Sep 22 23:14:34.273 DEBG IO Write 1009 has deps [JobId(1008), JobId(1005), JobId(1002)]
27753 Sep 22 23:14:34.273 DEBG up_ds_listen was notified
27754 Sep 22 23:14:34.273 DEBG up_ds_listen process 1009
27755 Sep 22 23:14:34.273 DEBG [A] ack job 1009:10, : downstairs
27756 Sep 22 23:14:34.273 DEBG up_ds_listen checked 1 jobs, back to waiting
27757 Sep 22 23:14:34.279 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27758 Sep 22 23:14:34.279 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27759 Sep 22 23:14:34.279 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27760 Sep 22 23:14:34.286 DEBG Write :1004 deps:[] res:true
27761 Sep 22 23:14:34.291 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27762 Sep 22 23:14:34.291 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27763 Sep 22 23:14:34.291 DEBG Flush :1005 extent_limit None deps:[JobId(1004)] res:true f:3 g:1
27764 Sep 22 23:14:34.344 DEBG [0] Read AckReady 1000, : downstairs
27765 Sep 22 23:14:34.381 DEBG [1] Read already AckReady 1000, : downstairs
27766 Sep 22 23:14:34.418 DEBG [2] Read already AckReady 1000, : downstairs
27767 Sep 22 23:14:34.420 DEBG up_ds_listen was notified
27768 Sep 22 23:14:34.420 DEBG up_ds_listen process 1000
27769 Sep 22 23:14:34.420 DEBG [A] ack job 1000:1, : downstairs
27770 Sep 22 23:14:34.518 DEBG up_ds_listen process 1001
27771 Sep 22 23:14:34.518 DEBG [A] ack job 1001:2, : downstairs
27772 Sep 22 23:14:34.518 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
27773 Sep 22 23:14:34.518 DEBG up_ds_listen checked 2 jobs, back to waiting
27774 Sep 22 23:14:34.518 DEBG up_ds_listen was notified
27775 Sep 22 23:14:34.518 DEBG up_ds_listen checked 0 jobs, back to waiting
27776 Sep 22 23:14:34.607 DEBG IO Write 1010 has deps [JobId(1008), JobId(1005), JobId(1002)]
27777 Sep 22 23:14:34.607 DEBG up_ds_listen was notified
27778 Sep 22 23:14:34.607 DEBG up_ds_listen process 1010
27779 Sep 22 23:14:34.607 DEBG [A] ack job 1010:11, : downstairs
27780 Sep 22 23:14:34.607 DEBG up_ds_listen checked 1 jobs, back to waiting
27781 Sep 22 23:14:34.607 DEBG IO Flush 1011 has deps [JobId(1010), JobId(1009), JobId(1008)]
277822023-09-22T23:14:34.645ZINFOcrucible-pantry (dropshot): request completed latency_us = 314 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = dfb7be5b-209a-4151-83e5-bdabb62bc93b response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
27783 Sep 22 23:14:34.711 DEBG Write :1006 deps:[] res:true
27784 Sep 22 23:14:34.723 DEBG Write :1006 deps:[] res:true
27785 Sep 22 23:14:34.735 DEBG Write :1006 deps:[] res:true
27786 Sep 22 23:14:34.739 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27787 Sep 22 23:14:34.739 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
27788 Sep 22 23:14:34.739 DEBG Flush :1007 extent_limit None deps:[JobId(1006)] res:true f:4 g:1
277892023-09-22T23:14:34.745ZINFOcrucible-pantry (dropshot): request completed latency_us = 175 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = 6cfed45a-e874-4b8e-b7ba-11ec76783bdb response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/is_finished
277902023-09-22T23:14:34.745ZINFOcrucible-pantry (dropshot): request completed latency_us = 172 local_addr = 127.0.0.1:61933 method = GET remote_addr = 127.0.0.1:58570 req_id = 1fb6ef58-1139-4f08-98ee-924b8eb1e7b2 response_code = 200 uri = /crucible/pantry/0/job/de4c6089-f970-4d99-9581-6bd61244a7f8/ok
277912023-09-22T23:14:34.746ZINFOcrucible-pantry (datafile): detach removing entry for volume d6736968-99ba-4ebf-b4ff-15afb0904fed
277922023-09-22T23:14:34.746ZINFOcrucible-pantry (datafile): detaching volume d6736968-99ba-4ebf-b4ff-15afb0904fed
27793 Sep 22 23:14:34.746 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27794 Sep 22 23:14:34.746 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27795 Sep 22 23:14:34.746 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
277962023-09-22T23:14:34.747ZINFOcrucible-pantry (datafile): Request to deactivate this guest
277972023-09-22T23:14:34.747ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 set deactivating.
277982023-09-22T23:14:34.747ZINFOcrucible-pantry (dropshot): request completed latency_us = 1348 local_addr = 127.0.0.1:61933 method = DELETE remote_addr = 127.0.0.1:58570 req_id = b4571eef-b144-43ac-ae81-3e5e28b550cb response_code = 204 uri = /crucible/pantry/0/volume/d6736968-99ba-4ebf-b4ff-15afb0904fed
27799 Sep 22 23:14:34.938 DEBG IO Write 1012 has deps [JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
27800 Sep 22 23:14:34.938 DEBG up_ds_listen was notified
27801 Sep 22 23:14:34.938 DEBG up_ds_listen process 1012
27802 Sep 22 23:14:34.938 DEBG [A] ack job 1012:13, : downstairs
27803 Sep 22 23:14:34.938 DEBG up_ds_listen checked 1 jobs, back to waiting
27804 Sep 22 23:14:35.015 INFO Checking if live repair is needed
27805 Sep 22 23:14:35.015 INFO No Live Repair required at this time
27806 Sep 22 23:14:35.331 DEBG IO Write 1013 has deps [JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
27807 Sep 22 23:14:35.331 DEBG up_ds_listen was notified
27808 Sep 22 23:14:35.331 DEBG up_ds_listen process 1013
27809 Sep 22 23:14:35.331 DEBG [A] ack job 1013:14, : downstairs
27810 Sep 22 23:14:35.331 DEBG up_ds_listen checked 1 jobs, back to waiting
27811 Sep 22 23:14:35.477 DEBG IO Flush 1014 has deps [JobId(1013), JobId(1012), JobId(1011)]
27812 Sep 22 23:14:35.478 INFO [lossy] sleeping 1 second
27813 Sep 22 23:14:35.509 DEBG Write :1000 deps:[] res:true
27814 Sep 22 23:14:35.510 WARN returning error on write!
27815 Sep 22 23:14:35.510 DEBG Write :1000 deps:[] res:false
27816 Sep 22 23:14:35.540 DEBG Write :1000 deps:[] res:true
278172023-09-22T23:14:35.648ZINFOcrucible-pantry (dropshot): request completed latency_us = 252 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = 3e70b85a-273e-4e67-9d0c-8ea791124386 response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/is_finished
278182023-09-22T23:14:35.649ZERROcrucible-pantry (datafile): job a52bd47d-76c9-4928-9712-218eb57bfeb7 failed with sha256 digest mismatch! expected 00000000000000000000000000000000000000000000000000000000f5b32221, saw 319d678f093c43502ca360911d52b475dea7fa6dcd962150c84fff18f5b32221
278192023-09-22T23:14:35.649ZINFOcrucible-pantry (dropshot): request completed latency_us = 225 local_addr = 127.0.0.1:33566 method = GET remote_addr = 127.0.0.1:49478 req_id = 81f6f686-93b7-4ef4-bd40-9057ebcdbb21 response_code = 200 uri = /crucible/pantry/0/job/a52bd47d-76c9-4928-9712-218eb57bfeb7/ok
278202023-09-22T23:14:35.650ZINFOcrucible-pantry (datafile): detach removing entry for volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f
278212023-09-22T23:14:35.650ZINFOcrucible-pantry (datafile): detaching volume 1e088c74-6c1b-4846-90a3-1bfe70bc4f7f
27822 Sep 22 23:14:35.650 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27823 Sep 22 23:14:35.650 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
27824 Sep 22 23:14:35.650 DEBG Flush :1008 extent_limit None deps:[] res:true f:5 g:1
278252023-09-22T23:14:35.651ZINFOcrucible-pantry (datafile): Request to deactivate this guest
278262023-09-22T23:14:35.651ZINFOcrucible-pantry (datafile): 982ad9a6-37b1-4b89-8804-8ba0c1300cac set deactivating.
278272023-09-22T23:14:35.651ZINFOcrucible-pantry (dropshot): request completed latency_us = 1472 local_addr = 127.0.0.1:33566 method = DELETE remote_addr = 127.0.0.1:49478 req_id = cccaebf3-3153-477b-8d2b-af0f03a4a07c response_code = 204 uri = /crucible/pantry/0/volume/1e088c74-6c1b-4846-90a3-1bfe70bc4f7f
27828 test test::test_pantry_import_from_url_ovmf_bad_digest ... ok
27829 Sep 22 23:14:35.882 DEBG IO Write 1015 has deps [JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
27830 Sep 22 23:14:35.882 DEBG up_ds_listen was notified
27831 Sep 22 23:14:35.882 DEBG up_ds_listen process 1015
27832 Sep 22 23:14:35.883 DEBG [A] ack job 1015:16, : downstairs
27833 Sep 22 23:14:35.883 DEBG up_ds_listen checked 1 jobs, back to waiting
27834 Sep 22 23:14:35.939 INFO Request to deactivate this guest
27835 Sep 22 23:14:35.939 INFO 521f2004-29de-4d41-9494-447a49569d29 set deactivating.
27836 Sep 22 23:14:35.939 DEBG No work, no need to flush, return OK
27837 note: configured to log to "/dev/stdout"
278382023-09-22T23:14:35.941ZINFOcrucible-pantry (dropshot): listening local_addr = 127.0.0.1:49411
278392023-09-22T23:14:35.941ZINFOcrucible-pantry: listen IP: 127.0.0.1:49411
278402023-09-22T23:14:36.007ZINFOcrucible-pantry (dropshot): accepted connection local_addr = 127.0.0.1:49411 remote_addr = 127.0.0.1:43906
278412023-09-22T23:14:36.007ZINFOcrucible-pantry (datafile): no entry exists for volume fc5b0c26-5d3b-433c-b382-b2a002415eea, constructing...
278422023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Upstairs starts
278432023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
278442023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Upstairs <-> Downstairs Message Version: 4
278452023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Crucible stats registered with UUID: 521f2004-29de-4d41-9494-447a49569d29
278462023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Crucible 521f2004-29de-4d41-9494-447a49569d29 has session id: aa90d023-19be-44f8-b8a2-74eb98aeb980
278472023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): [0] connecting to 127.0.0.1:63639 looper = 0
278482023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): [1] connecting to 127.0.0.1:43949 looper = 1
278492023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): [2] connecting to 127.0.0.1:63175 looper = 2
278502023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): up_listen starts task = up_listen
278512023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Wait for all three downstairs to come online
278522023-09-22T23:14:36.008ZINFOcrucible-pantry (datafile): Flush timeout: 0.5
278532023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 0
278542023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:63639 in state New
278552023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 1
278562023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:43949 in state New
278572023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 2
278582023-09-22T23:14:36.073ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:63175 in state New
27859 Sep 22 23:14:36.073 INFO accepted connection from 127.0.0.1:33714, task: main
27860 Sep 22 23:14:36.073 INFO accepted connection from 127.0.0.1:36045, task: main
27861 Sep 22 23:14:36.073 INFO accepted connection from 127.0.0.1:62745, task: main
27862 Sep 22 23:14:36.074 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27863 Sep 22 23:14:36.074 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } connected, version 4, task: proc
27864 Sep 22 23:14:36.074 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27865 Sep 22 23:14:36.074 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } connected, version 4, task: proc
27866 Sep 22 23:14:36.074 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
27867 Sep 22 23:14:36.074 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } connected, version 4, task: proc
278682023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) New New New ds_transition to WaitActive
278692023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [0] Transition from New to WaitActive
278702023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) WaitActive New New ds_transition to WaitActive
278712023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [1] Transition from New to WaitActive
278722023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) WaitActive WaitActive New ds_transition to WaitActive
278732023-09-22T23:14:36.074ZINFOcrucible-pantry (datafile): [2] Transition from New to WaitActive
27874 Sep 22 23:14:36.127 INFO Upstairs starts
27875 Sep 22 23:14:36.127 INFO Crucible Version: BuildInfo {
27876 version: "0.0.1",
27877 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
27878 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
27879 git_branch: "main",
27880 rustc_semver: "1.70.0",
27881 rustc_channel: "stable",
27882 rustc_host_triple: "x86_64-unknown-illumos",
27883 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
27884 cargo_triple: "x86_64-unknown-illumos",
27885 debug: true,
27886 opt_level: 0,
27887 }
27888 Sep 22 23:14:36.128 INFO Upstairs <-> Downstairs Message Version: 4
27889 Sep 22 23:14:36.128 INFO Crucible stats registered with UUID: 0d1229ab-bd36-49b6-95e1-d6287586b840
27890 Sep 22 23:14:36.128 INFO Crucible 0d1229ab-bd36-49b6-95e1-d6287586b840 has session id: b6e522d3-fd64-4f28-884d-5ed051ece380
27891 Sep 22 23:14:36.128 INFO [0] connecting to 127.0.0.1:38920, looper: 0
27892 Sep 22 23:14:36.128 INFO [1] connecting to 127.0.0.1:44435, looper: 1
27893 Sep 22 23:14:36.128 INFO [2] connecting to 127.0.0.1:51108, looper: 2
27894 Sep 22 23:14:36.128 INFO up_listen starts, task: up_listen
27895 Sep 22 23:14:36.128 INFO Wait for all three downstairs to come online
27896 Sep 22 23:14:36.128 INFO Flush timeout: 0.5
27897 Sep 22 23:14:36.128 INFO [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected, looper: 0
27898 Sep 22 23:14:36.128 INFO [0] Proc runs for 127.0.0.1:38920 in state New
27899 Sep 22 23:14:36.128 INFO [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected, looper: 1
27900 Sep 22 23:14:36.128 INFO [1] Proc runs for 127.0.0.1:44435 in state New
27901 Sep 22 23:14:36.128 INFO [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected, looper: 2
27902 Sep 22 23:14:36.128 INFO [2] Proc runs for 127.0.0.1:51108 in state New
27903 Sep 22 23:14:36.128 INFO accepted connection from 127.0.0.1:33103, task: main
27904 Sep 22 23:14:36.128 INFO accepted connection from 127.0.0.1:50432, task: main
27905 Sep 22 23:14:36.128 INFO accepted connection from 127.0.0.1:35052, task: main
27906 Sep 22 23:14:36.129 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
27907 Sep 22 23:14:36.129 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } connected, version 4, task: proc
27908 Sep 22 23:14:36.129 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
27909 Sep 22 23:14:36.129 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } connected, version 4, task: proc
27910 Sep 22 23:14:36.129 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
27911 Sep 22 23:14:36.129 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } connected, version 4, task: proc
27912 Sep 22 23:14:36.129 INFO [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) New New New ds_transition to WaitActive
27913 Sep 22 23:14:36.129 INFO [0] Transition from New to WaitActive
27914 Sep 22 23:14:36.129 INFO [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) WaitActive New New ds_transition to WaitActive
27915 Sep 22 23:14:36.129 INFO [1] Transition from New to WaitActive
27916 Sep 22 23:14:36.129 INFO [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) WaitActive WaitActive New ds_transition to WaitActive
27917 Sep 22 23:14:36.129 INFO [2] Transition from New to WaitActive
27918 The guest has requested activation
27919 Sep 22 23:14:36.129 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 active request set
27920 Sep 22 23:14:36.129 INFO [0] received activate with gen 2
27921 Sep 22 23:14:36.129 INFO [0] client got ds_active_rx, promote! session 950a06ad-9a63-4ae1-a8e0-14de8b7aaece
27922 Sep 22 23:14:36.130 INFO [1] received activate with gen 2
27923 Sep 22 23:14:36.130 INFO [1] client got ds_active_rx, promote! session 950a06ad-9a63-4ae1-a8e0-14de8b7aaece
27924 Sep 22 23:14:36.130 INFO [2] received activate with gen 2
27925 Sep 22 23:14:36.130 INFO [2] client got ds_active_rx, promote! session 950a06ad-9a63-4ae1-a8e0-14de8b7aaece
27926 Sep 22 23:14:36.130 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 }
27927 Sep 22 23:14:36.130 WARN Signaling to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } thread that UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is being promoted (read-write)
27928 Sep 22 23:14:36.130 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 }
27929 Sep 22 23:14:36.130 WARN Signaling to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } thread that UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is being promoted (read-write)
27930 Sep 22 23:14:36.130 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 }
27931 Sep 22 23:14:36.130 WARN Signaling to UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } thread that UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is being promoted (read-write)
27932 Sep 22 23:14:36.130 WARN Another upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 }, task: main
27933 Sep 22 23:14:36.130 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is now active (read-write)
27934 Sep 22 23:14:36.130 WARN Another upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 }, task: main
27935 Sep 22 23:14:36.131 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is now active (read-write)
27936 Sep 22 23:14:36.131 WARN Another upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 }, task: main
27937 Sep 22 23:14:36.131 INFO UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece, gen: 2 } is now active (read-write)
27938 Sep 22 23:14:36.131 INFO connection (127.0.0.1:59183): all done
27939 Sep 22 23:14:36.131 INFO connection (127.0.0.1:49569): all done
27940 Sep 22 23:14:36.131 INFO connection (127.0.0.1:64659): all done
279412023-09-22T23:14:36.131ZERROcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) cmd_loop saw YouAreNoLongerActive 0d1229ab-bd36-49b6-95e1-d6287586b840 950a06ad-9a63-4ae1-a8e0-14de8b7aaece 2
279422023-09-22T23:14:36.131ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) Active Active Active ds_transition to Disabled
279432023-09-22T23:14:36.131ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
279442023-09-22T23:14:36.131ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 set inactive, session 16060f7a-9986-4db1-95c8-74c4346b949d
279452023-09-22T23:14:36.131ZERROcrucible-pantry (datafile): 127.0.0.1:38920: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 0
279462023-09-22T23:14:36.131ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 Gone missing, transition from Disabled to Disconnected
279472023-09-22T23:14:36.131ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 connection to 127.0.0.1:38920 closed looper = 0
279482023-09-22T23:14:36.131ZERROcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) cmd_loop saw YouAreNoLongerActive 0d1229ab-bd36-49b6-95e1-d6287586b840 950a06ad-9a63-4ae1-a8e0-14de8b7aaece 2
279492023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) Disconnected Active Active ds_transition to Disabled
279502023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
279512023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 set inactive, session 16060f7a-9986-4db1-95c8-74c4346b949d
279522023-09-22T23:14:36.132ZERROcrucible-pantry (datafile): 127.0.0.1:44435: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 1
279532023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 Gone missing, transition from Disabled to Disconnected
279542023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 connection to 127.0.0.1:44435 closed looper = 1
279552023-09-22T23:14:36.132ZERROcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) cmd_loop saw YouAreNoLongerActive 0d1229ab-bd36-49b6-95e1-d6287586b840 950a06ad-9a63-4ae1-a8e0-14de8b7aaece 2
279562023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) Disconnected Disconnected Active ds_transition to Disabled
279572023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
279582023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 set inactive, session 16060f7a-9986-4db1-95c8-74c4346b949d
279592023-09-22T23:14:36.132ZERROcrucible-pantry (datafile): 127.0.0.1:51108: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1 looper = 2
279602023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 Gone missing, transition from Disabled to Disconnected
279612023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 connection to 127.0.0.1:51108 closed looper = 2
279622023-09-22T23:14:36.132ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
279632023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:38920 task reports connection:false
279642023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Disconnected Disconnected Disconnected
279652023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:38920 task reports offline
27966 Sep 22 23:14:36.132 INFO [0] downstairs client at 127.0.0.1:38920 has UUID b38dafac-2da7-4558-aae1-959c564193b2
279672023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:44435 task reports connection:false
27968 Sep 22 23:14:36.132 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b38dafac-2da7-4558-aae1-959c564193b2, encrypted: true, database_read_version: 1, database_write_version: 1 }
279692023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Disconnected Disconnected Disconnected
279702023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:44435 task reports offline
27971 Sep 22 23:14:36.132 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
279722023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:51108 task reports connection:false
279732023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): 0d1229ab-bd36-49b6-95e1-d6287586b840 Disconnected Disconnected Disconnected
27974 Sep 22 23:14:36.132 INFO [1] downstairs client at 127.0.0.1:44435 has UUID 14399b3b-6e6e-4995-afe0-74c1441f0833
27975 Sep 22 23:14:36.132 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 14399b3b-6e6e-4995-afe0-74c1441f0833, encrypted: true, database_read_version: 1, database_write_version: 1 }
279762023-09-22T23:14:36.132ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:51108 task reports offline
279772023-09-22T23:14:36.132ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
27978 Sep 22 23:14:36.132 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
279792023-09-22T23:14:36.132ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
27980 Sep 22 23:14:36.132 INFO [2] downstairs client at 127.0.0.1:51108 has UUID 9da41dd6-4cdf-4b03-8df1-d13ee59384a3
27981 Sep 22 23:14:36.132 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 9da41dd6-4cdf-4b03-8df1-d13ee59384a3, encrypted: true, database_read_version: 1, database_write_version: 1 }
27982 Sep 22 23:14:36.132 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitActive WaitActive WaitActive
27983 Sep 22 23:14:36.140 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
27984 Sep 22 23:14:36.141 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
27985 Sep 22 23:14:36.142 INFO Current flush_numbers [0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
27986 Sep 22 23:14:36.154 INFO Downstairs has completed Negotiation, task: proc
27987 Sep 22 23:14:36.155 INFO Downstairs has completed Negotiation, task: proc
27988 Sep 22 23:14:36.156 INFO Downstairs has completed Negotiation, task: proc
27989 Sep 22 23:14:36.156 INFO [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
27990 Sep 22 23:14:36.156 INFO [0] Transition from WaitActive to WaitQuorum
27991 Sep 22 23:14:36.156 WARN [0] new RM replaced this: None
27992 Sep 22 23:14:36.157 INFO [0] Starts reconcile loop
27993 Sep 22 23:14:36.157 INFO [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
27994 Sep 22 23:14:36.157 INFO [1] Transition from WaitActive to WaitQuorum
27995 Sep 22 23:14:36.157 WARN [1] new RM replaced this: None
27996 Sep 22 23:14:36.157 INFO [1] Starts reconcile loop
27997 Sep 22 23:14:36.157 INFO [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (950a06ad-9a63-4ae1-a8e0-14de8b7aaece) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
27998 Sep 22 23:14:36.157 INFO [2] Transition from WaitActive to WaitQuorum
27999 Sep 22 23:14:36.157 WARN [2] new RM replaced this: None
28000 Sep 22 23:14:36.157 INFO [2] Starts reconcile loop
28001 Sep 22 23:14:36.157 INFO [0] 127.0.0.1:38920 task reports connection:true
28002 Sep 22 23:14:36.157 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 WaitQuorum WaitQuorum WaitQuorum
28003 Sep 22 23:14:36.157 INFO [0]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28004 Sep 22 23:14:36.157 INFO [0]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28005 Sep 22 23:14:36.157 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28006 Sep 22 23:14:36.157 INFO [1]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28007 Sep 22 23:14:36.157 INFO [1]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28008 Sep 22 23:14:36.157 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28009 Sep 22 23:14:36.157 INFO [2]R flush_numbers[0..12]: [1, 1, 2, 2, 3, 3, 4, 4, 0, 0, 0, 0]
28010 Sep 22 23:14:36.157 INFO [2]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0]
28011 Sep 22 23:14:36.157 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
28012 Sep 22 23:14:36.157 INFO Max found gen is 2
28013 Sep 22 23:14:36.157 INFO Generation requested: 2 >= found:2
28014 Sep 22 23:14:36.157 INFO Next flush: 5
28015 Sep 22 23:14:36.157 INFO All extents match
28016 Sep 22 23:14:36.157 INFO No downstairs repair required
28017 Sep 22 23:14:36.157 INFO No initial repair work was required
28018 Sep 22 23:14:36.157 INFO Set Downstairs and Upstairs active
28019 Sep 22 23:14:36.157 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 is now active with session: 950a06ad-9a63-4ae1-a8e0-14de8b7aaece
28020 Sep 22 23:14:36.157 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 Set Active after no repair
28021 Sep 22 23:14:36.157 INFO Notify all downstairs, region set compare is done.
28022 Sep 22 23:14:36.157 INFO Set check for repair
28023 Sep 22 23:14:36.157 INFO [1] 127.0.0.1:44435 task reports connection:true
28024 Sep 22 23:14:36.158 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 Active Active Active
28025 Sep 22 23:14:36.158 INFO Set check for repair
28026 Sep 22 23:14:36.158 INFO [2] 127.0.0.1:51108 task reports connection:true
28027 Sep 22 23:14:36.158 INFO 0d1229ab-bd36-49b6-95e1-d6287586b840 Active Active Active
28028 Sep 22 23:14:36.158 INFO Set check for repair
28029 Sep 22 23:14:36.158 INFO [0] received reconcile message
28030 Sep 22 23:14:36.158 INFO [0] All repairs completed, exit
28031 Sep 22 23:14:36.158 INFO [0] Starts cmd_loop
28032 Sep 22 23:14:36.158 INFO [1] received reconcile message
28033 Sep 22 23:14:36.158 INFO [1] All repairs completed, exit
28034 Sep 22 23:14:36.158 INFO [1] Starts cmd_loop
28035 Sep 22 23:14:36.158 INFO [2] received reconcile message
28036 Sep 22 23:14:36.158 INFO [2] All repairs completed, exit
28037 Sep 22 23:14:36.158 INFO [2] Starts cmd_loop
28038 The guest has finished waiting for activation
28039 Sep 22 23:14:36.160 DEBG IO Read 1000 has deps []
28040 Sep 22 23:14:36.197 DEBG Read :1000 deps:[] res:true
28041 Sep 22 23:14:36.228 DEBG IO Write 1016 has deps [JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28042 Sep 22 23:14:36.228 DEBG up_ds_listen was notified
28043 Sep 22 23:14:36.228 DEBG up_ds_listen process 1016
28044 Sep 22 23:14:36.229 DEBG [A] ack job 1016:17, : downstairs
28045 Sep 22 23:14:36.229 DEBG up_ds_listen checked 1 jobs, back to waiting
28046 Sep 22 23:14:36.229 DEBG IO Flush 1017 has deps [JobId(1016), JobId(1015), JobId(1014)]
28047 Sep 22 23:14:36.253 DEBG Read :1000 deps:[] res:true
28048 Sep 22 23:14:36.312 DEBG Read :1000 deps:[] res:true
280492023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): volume fc5b0c26-5d3b-433c-b382-b2a002415eea constructed ok
28050 The guest has requested activation
280512023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 active request set
280522023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [0] received activate with gen 2
280532023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [0] client got ds_active_rx, promote! session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
280542023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [1] received activate with gen 2
280552023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [1] client got ds_active_rx, promote! session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
28056 Sep 22 23:14:36.349 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }
280572023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [2] received activate with gen 2
28058 Sep 22 23:14:36.349 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is being promoted (read-write)
280592023-09-22T23:14:36.349ZINFOcrucible-pantry (datafile): [2] client got ds_active_rx, promote! session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
28060 Sep 22 23:14:36.349 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }
28061 Sep 22 23:14:36.349 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is being promoted (read-write)
28062 Sep 22 23:14:36.349 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }
28063 Sep 22 23:14:36.349 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is being promoted (read-write)
28064 Sep 22 23:14:36.350 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 }, task: main
28065 Sep 22 23:14:36.350 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is now active (read-write)
28066 Sep 22 23:14:36.350 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 }, task: main
28067 Sep 22 23:14:36.350 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is now active (read-write)
28068 Sep 22 23:14:36.350 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 }, task: main
28069 Sep 22 23:14:36.350 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } is now active (read-write)
28070 Sep 22 23:14:36.350 INFO connection (127.0.0.1:52189): all done
28071 Sep 22 23:14:36.350 INFO connection (127.0.0.1:54318): all done
28072 Sep 22 23:14:36.350 INFO connection (127.0.0.1:37634): all done
28073 Sep 22 23:14:36.350 ERRO [0] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc 2
28074 Sep 22 23:14:36.350 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) Active Active Active ds_transition to Disabled
28075 Sep 22 23:14:36.351 INFO [0] Transition from Active to Disabled
28076 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 set inactive, session ca92c31b-eb28-41e5-843d-26b8bd24de02
28077 Sep 22 23:14:36.351 ERRO 127.0.0.1:63639: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 0
28078 Sep 22 23:14:36.351 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
28079 Sep 22 23:14:36.351 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:63639 closed, looper: 0
28080 Sep 22 23:14:36.351 ERRO [1] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc 2
28081 Sep 22 23:14:36.351 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) Disconnected Active Active ds_transition to Disabled
28082 Sep 22 23:14:36.351 INFO [1] Transition from Active to Disabled
28083 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 set inactive, session ca92c31b-eb28-41e5-843d-26b8bd24de02
28084 Sep 22 23:14:36.351 ERRO 127.0.0.1:43949: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 1
28085 Sep 22 23:14:36.351 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
28086 Sep 22 23:14:36.351 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:43949 closed, looper: 1
28087 Sep 22 23:14:36.351 ERRO [2] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc 2
28088 Sep 22 23:14:36.351 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) Disconnected Disconnected Active ds_transition to Disabled
28089 Sep 22 23:14:36.351 INFO [2] Transition from Active to Disabled
28090 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 set inactive, session ca92c31b-eb28-41e5-843d-26b8bd24de02
28091 Sep 22 23:14:36.351 ERRO 127.0.0.1:63175: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 2
28092 Sep 22 23:14:36.351 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
28093 Sep 22 23:14:36.351 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:63175 closed, looper: 2
28094 Sep 22 23:14:36.351 WARN [0] pm_task rx.recv() is None
28095 Sep 22 23:14:36.351 INFO [0] 127.0.0.1:63639 task reports connection:false
28096 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
28097 Sep 22 23:14:36.351 INFO [0] 127.0.0.1:63639 task reports offline
28098 Sep 22 23:14:36.351 INFO [1] 127.0.0.1:43949 task reports connection:false
28099 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
28100 Sep 22 23:14:36.351 INFO [1] 127.0.0.1:43949 task reports offline
28101 Sep 22 23:14:36.351 INFO [2] 127.0.0.1:63175 task reports connection:false
28102 Sep 22 23:14:36.351 INFO 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
28103 Sep 22 23:14:36.351 INFO [2] 127.0.0.1:63175 task reports offline
28104 Sep 22 23:14:36.351 WARN [1] pm_task rx.recv() is None
28105 Sep 22 23:14:36.351 WARN [2] pm_task rx.recv() is None
281062023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [0] downstairs client at 127.0.0.1:63639 has UUID bafba53d-f7d4-4132-95f6-92b6a45487a9
281072023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bafba53d-f7d4-4132-95f6-92b6a45487a9, encrypted: true, database_read_version: 1, database_write_version: 1 }
281082023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
281092023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [1] downstairs client at 127.0.0.1:43949 has UUID 1e7a075d-e51e-4361-bdf9-32f916f89647
281102023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1e7a075d-e51e-4361-bdf9-32f916f89647, encrypted: true, database_read_version: 1, database_write_version: 1 }
281112023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
281122023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [2] downstairs client at 127.0.0.1:63175 has UUID b89ddae9-a405-4d4a-865d-e95f1fd0a715
281132023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b89ddae9-a405-4d4a-865d-e95f1fd0a715, encrypted: true, database_read_version: 1, database_write_version: 1 }
281142023-09-22T23:14:36.352ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
28115 Sep 22 23:14:36.360 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28116 Sep 22 23:14:36.361 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28117 Sep 22 23:14:36.362 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28118 Sep 22 23:14:36.375 INFO Downstairs has completed Negotiation, task: proc
28119 Sep 22 23:14:36.376 INFO Downstairs has completed Negotiation, task: proc
28120 Sep 22 23:14:36.376 INFO Downstairs has completed Negotiation, task: proc
281212023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
281222023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [0] Transition from WaitActive to WaitQuorum
281232023-09-22T23:14:36.377ZWARNcrucible-pantry (datafile): [0] new RM replaced this: None
281242023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [0] Starts reconcile loop
281252023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
281262023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [1] Transition from WaitActive to WaitQuorum
281272023-09-22T23:14:36.377ZWARNcrucible-pantry (datafile): [1] new RM replaced this: None
281282023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [1] Starts reconcile loop
281292023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
281302023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [2] Transition from WaitActive to WaitQuorum
281312023-09-22T23:14:36.377ZWARNcrucible-pantry (datafile): [2] new RM replaced this: None
281322023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [2] Starts reconcile loop
281332023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63639 task reports connection:true
281342023-09-22T23:14:36.377ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 WaitQuorum WaitQuorum WaitQuorum
281352023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
281362023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
28137 The guest has finished waiting for activation
281382023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
281392023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [1]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
281402023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [1]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
281412023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
281422023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [2]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
281432023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [2]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
281442023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
281452023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Max found gen is 1
281462023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Generation requested: 2 >= found:1
281472023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Next flush: 1
281482023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): All extents match
281492023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): No downstairs repair required
281502023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): No initial repair work was required
281512023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Set Downstairs and Upstairs active
281522023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 is now active with session: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
281532023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Set Active after no repair
281542023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Notify all downstairs, region set compare is done.
281552023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Set check for repair
281562023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:43949 task reports connection:true
281572023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
281582023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Set check for repair
281592023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:63175 task reports connection:true
281602023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
281612023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): Set check for repair
281622023-09-22T23:14:36.378ZINFOcrucible-pantry (datafile): [0] received reconcile message
281632023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [0] All repairs completed, exit
281642023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [0] Starts cmd_loop
281652023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [1] received reconcile message
281662023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [1] All repairs completed, exit
281672023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [1] Starts cmd_loop
281682023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [2] received reconcile message
281692023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [2] All repairs completed, exit
281702023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): [2] Starts cmd_loop
281712023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): volume fc5b0c26-5d3b-433c-b382-b2a002415eea activated ok
281722023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): volume fc5b0c26-5d3b-433c-b382-b2a002415eea constructed and inserted ok
281732023-09-22T23:14:36.379ZINFOcrucible-pantry (dropshot): request completed latency_us = 370911 local_addr = 127.0.0.1:49411 method = POST remote_addr = 127.0.0.1:43906 req_id = 6e1960e8-b6da-4b37-898f-cb0ed7d8b107 response_code = 200 uri = /crucible/pantry/0/volume/fc5b0c26-5d3b-433c-b382-b2a002415eea
281742023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): Scrub check for fc5b0c26-5d3b-433c-b382-b2a002415eea
281752023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): Scrub for fc5b0c26-5d3b-433c-b382-b2a002415eea begins
281762023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): Scrub with total_size:1966080 block_size:512
281772023-09-22T23:14:36.379ZINFOcrucible-pantry (datafile): Scrubs from block 0 to 3840 in (256) 131072 size IOs pm:0
281782023-09-22T23:14:36.379ZINFOcrucible-pantry (dropshot): request completed latency_us = 491 local_addr = 127.0.0.1:49411 method = POST remote_addr = 127.0.0.1:43906 req_id = 86c01c82-da45-4c1a-ba27-6bcbb1a7137e response_code = 200 uri = /crucible/pantry/0/volume/fc5b0c26-5d3b-433c-b382-b2a002415eea/scrub
281792023-09-22T23:14:36.380ZINFOcrucible-pantry (dropshot): request completed latency_us = 166 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = 0fcc339e-b13c-47ab-b5fc-4d74dab524ed response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/is_finished
28180 Sep 22 23:14:36.564 DEBG IO Write 1018 has deps [JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28181 Sep 22 23:14:36.564 DEBG up_ds_listen was notified
28182 Sep 22 23:14:36.564 DEBG up_ds_listen process 1018
28183 Sep 22 23:14:36.564 DEBG [A] ack job 1018:19, : downstairs
28184 Sep 22 23:14:36.564 DEBG up_ds_listen checked 1 jobs, back to waiting
28185 Sep 22 23:14:36.596 DEBG Write :1000 deps:[] res:true
281862023-09-22T23:14:36.671ZINFOcrucible-pantry (datafile): Scrub at offset 256/3840 sp:256
281872023-09-22T23:14:36.900ZINFOcrucible-pantry (datafile): Scrub at offset 512/3840 sp:512
28188 Sep 22 23:14:36.930 DEBG IO Write 1019 has deps [JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28189 Sep 22 23:14:36.930 DEBG up_ds_listen was notified
28190 Sep 22 23:14:36.930 DEBG up_ds_listen process 1019
28191 Sep 22 23:14:36.930 DEBG [A] ack job 1019:20, : downstairs
28192 Sep 22 23:14:36.930 DEBG up_ds_listen checked 1 jobs, back to waiting
28193 Sep 22 23:14:36.930 DEBG IO Flush 1020 has deps [JobId(1019), JobId(1018), JobId(1017)]
28194 Sep 22 23:14:37.015 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
28195 Sep 22 23:14:37.016 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
28196 Sep 22 23:14:37.016 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:2
281972023-09-22T23:14:37.129ZINFOcrucible-pantry (datafile): Scrub at offset 768/3840 sp:768
28198 Sep 22 23:14:37.267 DEBG [0] Read AckReady 1000, : downstairs
28199 Sep 22 23:14:37.271 DEBG IO Write 1021 has deps [JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28200 Sep 22 23:14:37.271 DEBG up_ds_listen was notified
28201 Sep 22 23:14:37.271 DEBG up_ds_listen process 1021
28202 Sep 22 23:14:37.271 DEBG [A] ack job 1021:22, : downstairs
28203 Sep 22 23:14:37.271 DEBG up_ds_listen checked 1 jobs, back to waiting
28204 Sep 22 23:14:37.362 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 0
282052023-09-22T23:14:37.362ZINFOcrucible-pantry (datafile): Scrub at offset 1024/3840 sp:1024
28206 Sep 22 23:14:37.363 INFO [0] Proc runs for 127.0.0.1:63639 in state Disconnected
28207 Sep 22 23:14:37.363 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 1
28208 Sep 22 23:14:37.363 INFO [1] Proc runs for 127.0.0.1:43949 in state Disconnected
28209 Sep 22 23:14:37.363 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 looper connected, looper: 2
28210 Sep 22 23:14:37.363 INFO [2] Proc runs for 127.0.0.1:63175 in state Disconnected
28211 Sep 22 23:14:37.363 INFO accepted connection from 127.0.0.1:57238, task: main
28212 Sep 22 23:14:37.363 INFO accepted connection from 127.0.0.1:59756, task: main
28213 Sep 22 23:14:37.363 INFO accepted connection from 127.0.0.1:38472, task: main
28214 Sep 22 23:14:37.363 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
28215 Sep 22 23:14:37.363 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
28216 Sep 22 23:14:37.363 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
28217 Sep 22 23:14:37.363 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
28218 Sep 22 23:14:37.363 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
28219 Sep 22 23:14:37.363 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: ca92c31b-eb28-41e5-843d-26b8bd24de02, gen: 1 } connected, version 4, task: proc
28220 Sep 22 23:14:37.363 INFO [0] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) Disconnected Disconnected Disconnected ds_transition to WaitActive
28221 Sep 22 23:14:37.363 INFO [0] Transition from Disconnected to WaitActive
28222 Sep 22 23:14:37.363 INFO [1] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitActive Disconnected Disconnected ds_transition to WaitActive
28223 Sep 22 23:14:37.363 INFO [1] Transition from Disconnected to WaitActive
28224 Sep 22 23:14:37.363 INFO [2] 521f2004-29de-4d41-9494-447a49569d29 (ca92c31b-eb28-41e5-843d-26b8bd24de02) WaitActive WaitActive Disconnected ds_transition to WaitActive
28225 Sep 22 23:14:37.363 INFO [2] Transition from Disconnected to WaitActive
282262023-09-22T23:14:37.379ZINFOcrucible-pantry (datafile): Checking if live repair is needed
282272023-09-22T23:14:37.379ZINFOcrucible-pantry (datafile): No Live Repair required at this time
282282023-09-22T23:14:37.382ZINFOcrucible-pantry (dropshot): request completed latency_us = 215 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = dd4bfebc-ba03-4174-b9cf-3f9167bfc939 response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/is_finished
28229 Sep 22 23:14:37.516 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003)] res:true f:2 g:2
28230 Sep 22 23:14:37.516 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003)] res:true f:2 g:2
28231 Sep 22 23:14:37.517 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003)] res:true f:2 g:2
282322023-09-22T23:14:37.614ZINFOcrucible-pantry (datafile): Scrub at offset 1280/3840 sp:1280
28233 Sep 22 23:14:37.620 DEBG IO Write 1022 has deps [JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28234 Sep 22 23:14:37.620 DEBG up_ds_listen was notified
28235 Sep 22 23:14:37.620 DEBG up_ds_listen process 1022
28236 Sep 22 23:14:37.620 DEBG [A] ack job 1022:23, : downstairs
28237 Sep 22 23:14:37.620 DEBG up_ds_listen checked 1 jobs, back to waiting
28238 Sep 22 23:14:37.621 DEBG IO Flush 1023 has deps [JobId(1022), JobId(1021), JobId(1020)]
282392023-09-22T23:14:37.843ZINFOcrucible-pantry (datafile): Scrub at offset 1536/3840 sp:1536
28240 Sep 22 23:14:37.900 DEBG [1] Read already AckReady 1000, : downstairs
28241 Sep 22 23:14:37.961 DEBG IO Write 1024 has deps [JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28242 Sep 22 23:14:37.961 DEBG up_ds_listen was notified
28243 Sep 22 23:14:37.961 DEBG up_ds_listen process 1024
28244 Sep 22 23:14:37.961 DEBG [A] ack job 1024:25, : downstairs
28245 Sep 22 23:14:37.961 DEBG up_ds_listen checked 1 jobs, back to waiting
28246 Sep 22 23:14:38.089 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007), JobId(1006)] res:true f:3 g:2
28247 Sep 22 23:14:38.089 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007), JobId(1006)] res:true f:3 g:2
28248 Sep 22 23:14:38.089 DEBG Flush :1009 extent_limit None deps:[JobId(1008), JobId(1007), JobId(1006)] res:true f:3 g:2
282492023-09-22T23:14:38.090ZINFOcrucible-pantry (datafile): Scrub at offset 1792/3840 sp:1792
28250 Sep 22 23:14:38.304 DEBG IO Write 1025 has deps [JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
28251 Sep 22 23:14:38.304 DEBG up_ds_listen was notified
28252 Sep 22 23:14:38.304 DEBG up_ds_listen process 1025
28253 Sep 22 23:14:38.304 DEBG [A] ack job 1025:26, : downstairs
28254 Sep 22 23:14:38.304 DEBG up_ds_listen checked 1 jobs, back to waiting
28255 Sep 22 23:14:38.304 DEBG IO Flush 1026 has deps [JobId(1025), JobId(1024), JobId(1023)]
282562023-09-22T23:14:38.316ZINFOcrucible-pantry (datafile): Scrub at offset 2048/3840 sp:2048
282572023-09-22T23:14:38.384ZINFOcrucible-pantry (dropshot): request completed latency_us = 234 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = 81dcca62-e53d-4efa-866c-1484734c6836 response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/is_finished
28258 Sep 22 23:14:38.522 DEBG [2] Read already AckReady 1000, : downstairs
28259 Sep 22 23:14:38.527 DEBG up_ds_listen was notified
28260 Sep 22 23:14:38.527 DEBG up_ds_listen process 1000
28261 Sep 22 23:14:38.527 DEBG [A] ack job 1000:1, : downstairs
282622023-09-22T23:14:38.545ZINFOcrucible-pantry (datafile): Scrub at offset 2304/3840 sp:2304
28263 Sep 22 23:14:38.559 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
28264 Sep 22 23:14:38.559 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
28265 Sep 22 23:14:38.559 DEBG Flush :1012 extent_limit None deps:[JobId(1011), JobId(1010)] res:true f:4 g:2
28266 Sep 22 23:14:38.627 DEBG up_ds_listen checked 1 jobs, back to waiting
28267 0 512 ok
28268 512 1024 ok
28269 1024 1536 ok
28270 1536 2048 ok
28271 2048 2560 ok
28272 2560 3072 ok
28273 3072 3584 ok
28274 3584 4096 ok
28275 4096 4608 ok
28276 4608 5120 ok
28277 5120 5632 ok
28278 5632 6144 ok
28279 6144 6656 ok
28280 6656 7168 ok
28281 7168 7680 ok
28282 7680 8192 ok
28283 8192 8704 ok
28284 8704 9216 ok
28285 9216 9728 ok
28286 9728 10240 ok
28287 10240 10752 ok
28288 10752 11264 ok
28289 11264 11776 ok
28290 11776 12288 ok
28291 12288 12800 ok
28292 12800 13312 ok
28293 13312 13824 ok
28294 13824 14336 ok
28295 14336 14848 ok
28296 14848 15360 ok
28297 15360 15872 ok
28298 15872 16384 ok
28299 16384 16896 ok
28300 16896 17408 ok
28301 17408 17920 ok
28302 17920 18432 ok
28303 18432 18944 ok
28304 18944 19456 ok
28305 19456 19968 ok
28306 19968 20480 ok
28307 20480 20992 ok
28308 20992 21504 ok
28309 21504 22016 ok
28310 22016 22528 ok
28311 22528 23040 ok
28312 23040 23552 ok
28313 23552 24064 ok
28314 24064 24576 ok
28315 24576 25088 ok
28316 25088 25600 ok
28317 25600 26112 ok
28318 26112 26624 ok
28319 26624 27136 ok
28320 27136 27648 ok
28321 27648 28160 ok
28322 28160 28672 ok
28323 28672 29184 ok
28324 29184 29696 ok
28325 29696 30208 ok
28326 30208 30720 ok
28327 30720 31232 ok
28328 31232 31744 ok
28329 31744 32256 ok
28330 32256 32768 ok
28331 32768 33280 ok
28332 33280 33792 ok
28333 33792 34304 ok
28334 34304 34816 ok
28335 34816 35328 ok
28336 35328 35840 ok
28337 35840 36352 ok
28338 36352 36864 ok
28339 36864 37376 ok
28340 37376 37888 ok
28341 37888 38400 ok
28342 38400 38912 ok
28343 38912 39424 ok
28344 39424 39936 ok
28345 39936 40448 ok
28346 40448 40960 ok
28347 40960 41472 ok
28348 41472 41984 ok
28349 41984 42496 ok
28350 42496 43008 ok
28351 43008 43520 ok
28352 43520 44032 ok
28353 44032 44544 ok
28354 44544 45056 ok
28355 45056 45568 ok
28356 45568 46080 ok
28357 46080 46592 ok
28358 46592 47104 ok
28359 47104 47616 ok
28360 47616 48128 ok
28361 48128 48640 ok
28362 48640 49152 ok
28363 49152 49664 ok
28364 49664 50176 ok
28365 50176 50688 ok
28366 50688 51200 ok
28367 51200 51712 ok
28368 51712 52224 ok
28369 52224 52736 ok
28370 52736 53248 ok
28371 53248 53760 ok
28372 53760 54272 ok
28373 54272 54784 ok
28374 54784 55296 ok
28375 55296 55808 ok
28376 55808 56320 ok
28377 56320 56832 ok
28378 56832 57344 ok
28379 57344 57856 ok
28380 57856 58368 ok
28381 58368 58880 ok
28382 58880 59392 ok
28383 59392 59904 ok
28384 59904 60416 ok
28385 60416 60928 ok
28386 Sep 22 23:14:38.630 INFO Checking if live repair is needed
28387 Sep 22 23:14:38.630 INFO No Live Repair required at this time
28388 Sep 22 23:14:38.630 DEBG IO Flush 1001 has deps [JobId(1000)]
28389 60928 61440 ok
28390 61440 61952 ok
28391 61952 62464 ok
28392 62464 62976 ok
28393 62976 63488 ok
28394 63488 64000 ok
28395 64000 64512 ok
28396 64512 65024 ok
28397 65024 65536 ok
28398 65536 66048 ok
28399 66048 66560 ok
28400 66560 67072 ok
28401 67072 67584 ok
28402 67584 68096 ok
28403 68096 68608 ok
28404 68608 69120 ok
28405 69120 69632 ok
28406 69632 70144 ok
28407 70144 70656 ok
28408 70656 71168 ok
28409 71168 71680 ok
28410 71680 72192 ok
28411 72192 72704 ok
28412 72704 73216 ok
28413 73216 73728 ok
28414 73728 74240 ok
28415 74240 74752 ok
28416 74752 75264 ok
28417 75264 75776 ok
28418 75776 76288 ok
28419 76288 76800 ok
28420 76800 77312 ok
28421 77312 77824 ok
28422 77824 78336 ok
28423 78336 78848 ok
28424 78848 79360 ok
28425 79360 79872 ok
28426 79872 80384 ok
28427 80384 80896 ok
28428 80896 81408 ok
28429 81408 81920 ok
28430 81920 82432 ok
28431 82432 82944 ok
28432 82944 83456 ok
28433 83456 83968 ok
28434 83968 84480 ok
28435 84480 84992 ok
28436 84992 85504 ok
28437 85504 86016 ok
28438 86016 86528 ok
28439 86528 87040 ok
28440 87040 87552 ok
28441 87552 88064 ok
28442 88064 88576 ok
28443 88576 89088 ok
28444 89088 89600 ok
28445 89600 90112 ok
28446 90112 90624 ok
28447 90624 91136 ok
28448 91136 91648 ok
28449 91648 92160 ok
28450 92160 92672 ok
28451 92672 93184 ok
28452 93184 93696 ok
28453 93696 94208 ok
28454 94208 94720 ok
28455 94720 95232 ok
28456 95232 95744 ok
28457 95744 96256 ok
28458 96256 96768 ok
28459 96768 97280 ok
28460 97280 97792 ok
28461 97792 98304 ok
28462 98304 98816 ok
28463 98816 99328 ok
28464 99328 99840 ok
28465 99840 100352 ok
28466 100352 100864 ok
28467 100864 101376 ok
28468 101376 101888 ok
28469 101888 102400 ok
28470 102400 102912 ok
28471 102912 103424 ok
28472 103424 103936 ok
28473 103936 104448 ok
28474 104448 104960 ok
28475 104960 105472 ok
28476 105472 105984 ok
28477 105984 106496 ok
28478 106496 107008 ok
28479 107008 107520 ok
28480 107520 108032 ok
28481 108032 108544 ok
28482 108544 109056 ok
28483 109056 109568 ok
28484 109568 110080 ok
28485 110080 110592 ok
28486 110592 111104 ok
28487 111104 111616 ok
28488 111616 112128 ok
28489 112128 112640 ok
28490 112640 113152 ok
28491 113152 113664 ok
28492 113664 114176 ok
28493 114176 114688 ok
28494 114688 115200 ok
28495 115200 115712 ok
28496 115712 116224 ok
28497 116224 116736 ok
28498 116736 117248 ok
28499 117248 117760 ok
28500 117760 118272 ok
28501 118272 118784 ok
28502 118784 119296 ok
28503 119296 119808 ok
28504 119808 120320 ok
28505 120320 120832 ok
28506 120832 121344 ok
28507 121344 121856 ok
28508 121856 122368 ok
28509 122368 122880 ok
28510 122880 123392 ok
28511 123392 123904 ok
28512 123904 124416 ok
28513 124416 124928 ok
28514 124928 125440 ok
28515 125440 125952 ok
28516 125952 126464 ok
285172023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 0
285182023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [0] Proc runs for 127.0.0.1:38920 in state Disconnected
285192023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 1
28520 Sep 22 23:14:38.631 INFO accepted connection from 127.0.0.1:42804, task: main
285212023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [1] Proc runs for 127.0.0.1:44435 in state Disconnected
28522 Sep 22 23:14:38.631 INFO accepted connection from 127.0.0.1:34565, task: main
285232023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 looper connected looper = 2
285242023-09-22T23:14:38.631ZINFOcrucible-pantry (datafile): [2] Proc runs for 127.0.0.1:51108 in state Disconnected
28525 Sep 22 23:14:38.631 INFO accepted connection from 127.0.0.1:42753, task: main
28526 Sep 22 23:14:38.631 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28527 Sep 22 23:14:38.631 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28528 Sep 22 23:14:38.632 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:5 g:2
28529 126464 126976 ok
28530 126976 127488 ok
28531 127488 128000 ok
28532 128000 128512 ok
28533 128512 129024 ok
28534 129024 129536 ok
28535 129536 130048 ok
28536 130048 130560 ok
28537 130560 131072 ok
28538 131072 131584 ok
28539 131584 132096 ok
28540 132096 132608 ok
28541 132608 133120 ok
28542 133120 133632 ok
28543 133632 134144 ok
28544 134144 134656 ok
28545 134656 135168 ok
28546 135168 135680 ok
28547 135680 136192 ok
28548 136192 136704 ok
28549 136704 137216 ok
28550 137216 137728 ok
28551 137728 138240 ok
28552 138240 138752 ok
28553 138752 139264 ok
28554 139264 139776 ok
28555 139776 140288 ok
28556 140288 140800 ok
28557 140800 141312 ok
28558 141312 141824 ok
28559 141824 142336 ok
28560 142336 142848 ok
28561 142848 143360 ok
28562 143360 143872 ok
28563 143872 144384 ok
28564 144384 144896 ok
28565 144896 145408 ok
28566 145408 145920 ok
28567 145920 146432 ok
28568 146432 146944 ok
28569 146944 147456 ok
28570 147456 147968 ok
28571 147968 148480 ok
28572 148480 148992 ok
28573 148992 149504 ok
28574 149504 150016 ok
28575 150016 150528 ok
28576 150528 151040 ok
28577 151040 151552 ok
28578 151552 152064 ok
28579 152064 152576 ok
28580 152576 153088 ok
28581 153088 153600 ok
28582 153600 154112 ok
28583 154112 154624 ok
28584 154624 155136 ok
28585 155136 155648 ok
28586 155648 156160 ok
28587 156160 156672 ok
28588 156672 157184 ok
28589 157184 157696 ok
28590 157696 158208 ok
28591 158208 158720 ok
28592 158720 159232 ok
28593 159232 159744 ok
28594 159744 160256 ok
28595 160256 160768 ok
28596 160768 161280 ok
28597 161280 161792 ok
28598 161792 162304 ok
28599 162304 162816 ok
28600 162816 163328 ok
28601 163328 163840 ok
28602 163840 164352 ok
28603 164352 164864 ok
28604 164864 165376 ok
28605 165376 165888 ok
28606 165888 166400 ok
28607 166400 166912 ok
28608 166912 167424 ok
28609 167424 167936 ok
28610 167936 168448 ok
28611 168448 168960 ok
28612 168960 169472 ok
28613 169472 169984 ok
28614 169984 170496 ok
28615 170496 171008 ok
28616 171008 171520 ok
28617 171520 172032 ok
28618 172032 172544 ok
28619 172544 173056 ok
28620 173056 173568 ok
28621 173568 174080 ok
28622 174080 174592 ok
28623 174592 175104 ok
28624 175104 175616 ok
28625 175616 176128 ok
28626 176128 176640 ok
28627 176640 177152 ok
28628 177152 177664 ok
28629 177664 178176 ok
28630 178176 178688 ok
28631 178688 179200 ok
28632 179200 179712 ok
28633 179712 180224 ok
28634 180224 180736 ok
28635 180736 181248 ok
28636 181248 181760 ok
28637 181760 182272 ok
28638 182272 182784 ok
28639 182784 183296 ok
28640 183296 183808 ok
28641 183808 184320 ok
28642 184320 184832 ok
28643 184832 185344 ok
28644 185344 185856 ok
28645 185856 186368 ok
28646 186368 186880 ok
28647 186880 187392 ok
28648 187392 187904 ok
28649 187904 188416 ok
28650 188416 188928 ok
28651 188928 189440 ok
28652 189440 189952 ok
28653 189952 190464 ok
28654 190464 190976 ok
28655 190976 191488 ok
28656 191488 192000 ok
28657 Sep 22 23:14:38.632 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
28658 Sep 22 23:14:38.632 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
28659 Sep 22 23:14:38.632 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
28660 Sep 22 23:14:38.632 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
28661 Sep 22 23:14:38.632 INFO Connection request from 0d1229ab-bd36-49b6-95e1-d6287586b840 with version 4, task: proc
28662 Sep 22 23:14:38.633 INFO upstairs UpstairsConnection { upstairs_id: 0d1229ab-bd36-49b6-95e1-d6287586b840, session_id: 16060f7a-9986-4db1-95c8-74c4346b949d, gen: 1 } connected, version 4, task: proc
28663 Sep 22 23:14:38.633 DEBG up_ds_listen was notified
28664 Sep 22 23:14:38.633 DEBG up_ds_listen process 1001
28665 Sep 22 23:14:38.633 DEBG [A] ack job 1001:2, : downstairs
28666 Sep 22 23:14:38.633 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
28667 Sep 22 23:14:38.633 DEBG up_ds_listen checked 1 jobs, back to waiting
28668 192000 192512 ok
28669 192512 193024 ok
28670 193024 193536 ok
28671 193536 194048 ok
28672 194048 194560 ok
28673 194560 195072 ok
28674 195072 195584 ok
28675 195584 196096 ok
28676 196096 196608 ok
28677 196608 197120 ok
28678 197120 197632 ok
28679 197632 198144 ok
28680 198144 198656 ok
28681 198656 199168 ok
28682 199168 199680 ok
28683 199680 200192 ok
28684 200192 200704 ok
28685 200704 201216 ok
28686 201216 201728 ok
28687 201728 202240 ok
28688 202240 202752 ok
28689 202752 203264 ok
28690 203264 203776 ok
28691 203776 204288 ok
28692 204288 204800 ok
28693 204800 205312 ok
28694 205312 205824 ok
28695 205824 206336 ok
28696 206336 206848 ok
28697 206848 207360 ok
28698 207360 207872 ok
28699 207872 208384 ok
28700 208384 208896 ok
28701 208896 209408 ok
28702 209408 209920 ok
28703 209920 210432 ok
28704 210432 210944 ok
28705 210944 211456 ok
28706 211456 211968 ok
28707 211968 212480 ok
28708 212480 212992 ok
28709 212992 213504 ok
28710 213504 214016 ok
28711 214016 214528 ok
28712 214528 215040 ok
28713 215040 215552 ok
28714 215552 216064 ok
28715 216064 216576 ok
28716 216576 217088 ok
28717 217088 217600 ok
28718 217600 218112 ok
28719 218112 218624 ok
28720 218624 219136 ok
28721 219136 219648 ok
28722 219648 220160 ok
28723 220160 220672 ok
28724 220672 221184 ok
28725 221184 221696 ok
28726 221696 222208 ok
28727 222208 222720 ok
28728 222720 223232 ok
28729 223232 223744 ok
28730 223744 224256 ok
28731 224256 224768 ok
28732 224768 225280 ok
28733 225280 225792 ok
28734 225792 226304 ok
28735 226304 226816 ok
28736 226816 227328 ok
28737 227328 227840 ok
28738 227840 228352 ok
28739 228352 228864 ok
28740 228864 229376 ok
28741 229376 229888 ok
28742 229888 230400 ok
28743 230400 230912 ok
28744 230912 231424 ok
28745 231424 231936 ok
28746 231936 232448 ok
28747 232448 232960 ok
28748 232960 233472 ok
28749 233472 233984 ok
28750 233984 234496 ok
28751 234496 235008 ok
28752 235008 235520 ok
28753 235520 236032 ok
28754 236032 236544 ok
28755 236544 237056 ok
28756 237056 237568 ok
28757 237568 238080 ok
28758 238080 238592 ok
28759 238592 239104 ok
28760 239104 239616 ok
28761 239616 240128 ok
28762 240128 240640 ok
28763 240640 241152 ok
28764 241152 241664 ok
28765 241664 242176 ok
28766 242176 242688 ok
28767 242688 243200 ok
28768 243200 243712 ok
28769 243712 244224 ok
28770 244224 244736 ok
28771 244736 245248 ok
28772 245248 245760 ok
28773 245760 246272 ok
28774 246272 246784 ok
28775 246784 247296 ok
28776 247296 247808 ok
28777 247808 248320 ok
28778 248320 248832 ok
28779 248832 249344 ok
28780 249344 249856 ok
28781 249856 250368 ok
28782 250368 250880 ok
28783 250880 251392 ok
28784 251392 251904 ok
28785 251904 252416 ok
28786 252416 252928 ok
28787 252928 253440 ok
28788 253440 253952 ok
28789 253952 254464 ok
28790 254464 254976 ok
28791 254976 255488 ok
28792 255488 256000 ok
28793 256000 256512 ok
28794 256512 257024 ok
28795 257024 257536 ok
287962023-09-22T23:14:38.633ZINFOcrucible-pantry (datafile): [0] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) Disconnected Disconnected Disconnected ds_transition to WaitActive
287972023-09-22T23:14:38.634ZINFOcrucible-pantry (datafile): [0] Transition from Disconnected to WaitActive
287982023-09-22T23:14:38.634ZINFOcrucible-pantry (datafile): [1] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitActive Disconnected Disconnected ds_transition to WaitActive
28799 257536 258048 ok
28800 258048 258560 ok
28801 258560 259072 ok
28802 259072{"msg":"[1] Transition from Disconnected to WaitActive","v":0,"name":"crucible-pantry","level":30,"time":"2023-09-22T23:14:38.634062843Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"component":"datafile"}
28803 259584 ok
28804 259584 260096 ok
28805 260096 260608 ok
28806 260608 261120 ok
288072023-09-22T23:14:38.634ZINFOcrucible-pantry (datafile): [2] 0d1229ab-bd36-49b6-95e1-d6287586b840 (16060f7a-9986-4db1-95c8-74c4346b949d) WaitActive WaitActive Disconnected ds_transition to WaitActive
28808 261120 261632 ok
28809 261632 262144 ok
28810 262144 262656 ok
28811 262656 263168 ok
28812 263168 263680 ok
28813 263680 264192{"msg":"[2] Transition from Disconnected to WaitActive","v":0,"name":"crucible-pantry","level":30,"time":"2023-09-22T23:14:38.634161966Z","hostname":"ip-10-150-1-74.us-west-2.compute.internal","pid":4301,"component":"datafile"}
28814 ok
28815 264192 264704 ok
28816 264704 265216 ok
28817 265216 265728 ok
28818 265728 266240 ok
28819 266240 266752 ok
28820 266752 267264 ok
28821 267264 267776 ok
28822 267776 268288 ok
28823 268288 268800 ok
28824 268800 269312 ok
28825 269312 269824 ok
28826 269824 270336 ok
28827 270336 270848 ok
28828 270848 271360 ok
28829 271360 271872 ok
28830 271872 272384 ok
28831 272384 272896 ok
28832 272896 273408 ok
28833 273408 273920 ok
28834 273920 274432 ok
28835 274432 274944 ok
28836 274944 275456 ok
28837 275456 275968 ok
28838 275968 276480 ok
28839 276480 276992 ok
28840 276992 277504 ok
28841 277504 278016 ok
28842 278016 278528 ok
28843 278528 279040 ok
28844 279040 279552 ok
28845 279552 280064 ok
28846 280064 280576 ok
28847 280576 281088 ok
28848 281088 281600 ok
28849 281600 282112 ok
28850 282112 282624 ok
28851 282624 283136 ok
28852 283136 283648 ok
28853 283648 284160 ok
28854 284160 284672 ok
28855 284672 285184 ok
28856 285184 285696 ok
28857 285696 286208 ok
28858 286208 286720 ok
28859 286720 287232 ok
28860 287232 287744 ok
28861 287744 288256 ok
28862 288256 288768 ok
28863 288768 289280 ok
28864 289280 289792 ok
28865 289792 290304 ok
28866 290304 290816 ok
28867 290816 291328 ok
28868 291328 291840 ok
28869 291840 292352 ok
28870 292352 292864 ok
28871 292864 293376 ok
28872 293376 293888 ok
28873 293888 294400 ok
28874 294400 294912 ok
28875 294912 295424 ok
28876 295424 295936 ok
28877 295936 296448 ok
28878 296448 296960 ok
28879 296960 297472 ok
28880 297472 297984 ok
28881 297984 298496 ok
28882 298496 299008 ok
28883 299008 299520 ok
28884 299520 300032 ok
28885 300032 300544 ok
28886 300544 301056 ok
28887 301056 301568 ok
28888 301568 302080 ok
28889 302080 302592 ok
28890 302592 303104 ok
28891 303104 303616 ok
28892 303616 304128 ok
28893 304128 304640 ok
28894 304640 305152 ok
28895 305152 305664 ok
28896 305664 306176 ok
28897 306176 306688 ok
28898 306688 307200 ok
28899 307200 307712 ok
28900 307712 308224 ok
28901 308224 308736 ok
28902 308736 309248 ok
28903 309248 309760 ok
28904 309760 310272 ok
28905 310272 310784 ok
28906 310784 311296 ok
28907 311296 311808 ok
28908 311808 312320 ok
28909 312320 312832 ok
28910 312832 313344 ok
28911 313344 313856 ok
28912 313856 314368 ok
28913 314368 314880 ok
28914 314880 315392 ok
28915 315392 315904 ok
28916 315904 316416 ok
28917 316416 316928 ok
28918 316928 317440 ok
28919 317440 317952 ok
28920 317952 318464 ok
28921 318464 318976 ok
28922 318976 319488 ok
28923 319488 320000 ok
28924 320000 320512 ok
28925 320512 321024 ok
28926 321024 321536 ok
28927 321536 322048 ok
28928 322048 322560 ok
28929 322560 323072 ok
28930 323072 323584 ok
28931 323584 324096 ok
28932 324096 324608 ok
28933 324608 325120 ok
28934 325120 325632 ok
28935 325632 326144 ok
28936 326144 326656 ok
28937 326656 327168 ok
28938 327168 327680 ok
28939 327680 328192 ok
28940 328192 328704 ok
28941 328704 329216 ok
28942 329216 329728 ok
28943 329728 330240 ok
28944 330240 330752 ok
28945 330752 331264 ok
28946 331264 331776 ok
28947 331776 332288 ok
28948 332288 332800 ok
28949 332800 333312 ok
28950 333312 333824 ok
28951 333824 334336 ok
28952 334336 334848 ok
28953 334848 335360 ok
28954 335360 335872 ok
28955 335872 336384 ok
28956 336384 336896 ok
28957 336896 337408 ok
28958 337408 337920 ok
28959 337920 338432 ok
28960 338432 338944 ok
28961 338944 339456 ok
28962 339456 339968 ok
28963 339968 340480 ok
28964 340480 340992 ok
28965 340992 341504 ok
28966 341504 342016 ok
28967 342016 342528 ok
28968 342528 343040 ok
28969 343040 343552 ok
28970 343552 344064 ok
28971 344064 344576 ok
28972 344576 345088 ok
28973 345088 345600 ok
28974 345600 346112 ok
28975 346112 346624 ok
28976 346624 347136 ok
28977 347136 347648 ok
28978 347648 348160 ok
28979 348160 348672 ok
28980 348672 349184 ok
28981 349184 349696 ok
28982 349696 350208 ok
28983 350208 350720 ok
28984 350720 351232 ok
28985 351232 351744 ok
28986 351744 352256 ok
28987 352256 352768 ok
28988 352768 353280 ok
28989 353280 353792 ok
28990 353792 354304 ok
28991 354304 354816 ok
28992 354816 355328 ok
28993 355328 355840 ok
28994 355840 356352 ok
28995 356352 356864 ok
28996 356864 357376 ok
28997 357376 357888 ok
28998 357888 358400 ok
28999 358400 358912 ok
29000 358912 359424 ok
29001 359424 359936 ok
29002 359936 360448 ok
29003 360448 360960 ok
29004 360960 361472 ok
29005 361472 361984 ok
29006 361984 362496 ok
29007 362496 363008 ok
29008 363008 363520 ok
29009 363520 364032 ok
29010 364032 364544 ok
29011 364544 365056 ok
29012 365056 365568 ok
29013 365568 366080 ok
29014 366080 366592 ok
29015 366592 367104 ok
29016 367104 367616 ok
29017 367616 368128 ok
29018 368128 368640 ok
29019 368640 369152 ok
29020 369152 369664 ok
29021 369664 370176 ok
29022 370176 370688 ok
29023 370688 371200 ok
29024 371200 371712 ok
29025 371712 372224 ok
29026 372224 372736 ok
29027 372736 373248 ok
29028 373248 373760 ok
29029 373760 374272 ok
29030 374272 374784 ok
29031 374784 375296 ok
29032 375296 375808 ok
29033 375808 376320 ok
29034 376320 376832 ok
29035 376832 377344 ok
29036 377344 377856 ok
29037 377856 378368 ok
29038 378368 378880 ok
29039 378880 379392 ok
29040 379392 379904 ok
29041 379904 380416 ok
29042 380416 380928 ok
29043 380928 381440 ok
29044 381440 381952 ok
29045 381952 382464 ok
29046 382464 382976 ok
29047 382976 383488 ok
29048 383488 384000 ok
29049 384000 384512 ok
29050 384512 385024 ok
29051 385024 385536 ok
29052 385536 386048 ok
29053 386048 386560 ok
29054 386560 387072 ok
29055 387072 387584 ok
29056 387584 388096 ok
29057 388096 388608 ok
29058 388608 389120 ok
29059 389120 389632 ok
29060 389632 390144 ok
29061 390144 390656 ok
29062 390656 391168 ok
29063 391168 391680 ok
29064 391680 392192 ok
29065 392192 392704 ok
29066 392704 393216 ok
29067 393216 393728 ok
29068 393728 394240 ok
29069 394240 394752 ok
29070 394752 395264 ok
29071 395264 395776 ok
29072 395776 396288 ok
29073 396288 396800 ok
29074 396800 397312 ok
29075 397312 397824 ok
29076 397824 398336 ok
29077 398336 398848 ok
29078 398848 399360 ok
29079 399360 399872 ok
29080 399872 400384 ok
29081 400384 400896 ok
29082 400896 401408 ok
29083 401408 401920 ok
29084 401920 402432 ok
29085 402432 402944 ok
29086 402944 403456 ok
29087 403456 403968 ok
29088 403968 404480 ok
29089 404480 404992 ok
29090 404992 405504 ok
29091 405504 406016 ok
29092 406016 406528 ok
29093 406528 407040 ok
29094 407040 407552 ok
29095 407552 408064 ok
29096 408064 408576 ok
29097 408576 409088 ok
29098 409088 409600 ok
29099 409600 410112 ok
29100 410112 410624 ok
29101 410624 411136 ok
29102 411136 411648 ok
29103 411648 412160 ok
29104 412160 412672 ok
29105 412672 413184 ok
29106 413184 413696 ok
29107 413696 414208 ok
29108 414208 414720 ok
29109 414720 415232 ok
29110 415232 415744 ok
29111 415744 416256 ok
29112 416256 416768 ok
29113 416768 417280 ok
29114 417280 417792 ok
29115 417792 418304 ok
29116 418304 418816 ok
29117 418816 419328 ok
29118 419328 419840 ok
29119 419840 420352 ok
29120 420352 420864 ok
29121 420864 421376 ok
29122 421376 421888 ok
29123 421888 422400 ok
29124 422400 422912 ok
29125 422912 423424 ok
29126 423424 423936 ok
29127 423936 424448 ok
29128 424448 424960 ok
29129 424960 425472 ok
29130 425472 425984 ok
29131 425984 426496 ok
29132 426496 427008 ok
29133 427008 427520 ok
29134 427520 428032 ok
29135 428032 428544 ok
29136 428544 429056 ok
29137 429056 429568 ok
29138 429568 430080 ok
29139 430080 430592 ok
29140 430592 431104 ok
29141 431104 431616 ok
29142 431616 432128 ok
29143 432128 432640 ok
29144 432640 433152 ok
29145 433152 433664 ok
29146 433664 434176 ok
29147 434176 434688 ok
29148 434688 435200 ok
29149 435200 435712 ok
29150 435712 436224 ok
29151 436224 436736 ok
29152 436736 437248 ok
29153 437248 437760 ok
29154 437760 438272 ok
29155 438272 438784 ok
29156 438784 439296 ok
29157 439296 439808 ok
29158 439808 440320 ok
29159 440320 440832 ok
29160 440832 441344 ok
29161 441344 441856 ok
29162 441856 442368 ok
29163 442368 442880 ok
29164 442880 443392 ok
29165 443392 443904 ok
29166 443904 444416 ok
29167 444416 444928 ok
29168 444928 445440 ok
29169 445440 445952 ok
29170 445952 446464 ok
29171 446464 446976 ok
29172 446976 447488 ok
29173 447488 448000 ok
29174 448000 448512 ok
29175 448512 449024 ok
29176 449024 449536 ok
29177 449536 450048 ok
29178 450048 450560 ok
29179 450560 451072 ok
29180 451072 451584 ok
29181 451584 452096 ok
29182 452096 452608 ok
29183 452608 453120 ok
29184 453120 453632 ok
29185 453632 454144 ok
29186 454144 454656 ok
29187 454656 455168 ok
29188 455168 455680 ok
29189 455680 456192 ok
29190 456192 456704 ok
29191 456704 457216 ok
29192 457216 457728 ok
29193 457728 458240 ok
29194 458240 458752 ok
29195 458752 459264 ok
29196 459264 459776 ok
29197 459776 460288 ok
29198 460288 460800 ok
29199 460800 461312 ok
29200 461312 461824 ok
29201 461824 462336 ok
29202 462336 462848 ok
29203 462848 463360 ok
29204 463360 463872 ok
29205 463872 464384 ok
29206 464384 464896 ok
29207 464896 465408 ok
29208 465408 465920 ok
29209 465920 466432 ok
29210 466432 466944 ok
29211 466944 467456 ok
29212 467456 467968 ok
29213 467968 468480 ok
29214 468480 468992 ok
29215 468992 469504 ok
29216 469504 470016 ok
29217 470016 470528 ok
29218 470528 471040 ok
29219 471040 471552 ok
29220 471552 472064 ok
29221 472064 472576 ok
29222 472576 473088 ok
29223 473088 473600 ok
29224 473600 474112 ok
29225 474112 474624 ok
29226 474624 475136 ok
29227 475136 475648 ok
29228 475648 476160 ok
29229 476160 476672 ok
29230 476672 477184 ok
29231 477184 477696 ok
29232 477696 478208 ok
29233 478208 478720 ok
29234 478720 479232 ok
29235 479232 479744 ok
29236 479744 480256 ok
29237 480256 480768 ok
29238 480768 481280 ok
29239 481280 481792 ok
29240 481792 482304 ok
29241 482304 482816 ok
29242 482816 483328 ok
29243 483328 483840 ok
29244 483840 484352 ok
29245 484352 484864 ok
29246 484864 485376 ok
29247 485376 485888 ok
29248 485888 486400 ok
29249 486400 486912 ok
29250 486912 487424 ok
29251 487424 487936 ok
29252 487936 488448 ok
29253 488448 488960 ok
29254 488960 489472 ok
29255 489472 489984 ok
29256 489984 490496 ok
29257 490496 491008 ok
29258 491008 491520 ok
29259 491520 492032 ok
29260 492032 492544 ok
29261 492544 493056 ok
29262 493056 493568 ok
29263 493568 494080 ok
29264 494080 494592 ok
29265 494592 495104 ok
29266 495104 495616 ok
29267 495616 496128 ok
29268 496128 496640 ok
29269 496640 497152 ok
29270 497152 497664 ok
29271 497664 498176 ok
29272 498176 498688 ok
29273 498688 499200 ok
29274 499200 499712 ok
29275 499712 500224 ok
29276 500224 500736 ok
29277 500736 501248 ok
29278 501248 501760 ok
29279 501760 502272 ok
29280 502272 502784 ok
29281 502784 503296 ok
29282 503296 503808 ok
29283 503808 504320 ok
29284 504320 504832 ok
29285 504832 505344 ok
29286 505344 505856 ok
29287 505856 506368 ok
29288 506368 506880 ok
29289 506880 507392 ok
29290 507392 507904 ok
29291 507904 508416 ok
29292 508416 508928 ok
29293 508928 509440 ok
29294 509440 509952 ok
29295 509952 510464 ok
29296 510464 510976 ok
29297 510976 511488 ok
29298 511488 512000 ok
29299 512000 512512 ok
29300 512512 513024 ok
29301 513024 513536 ok
29302 513536 514048 ok
29303 514048 514560 ok
29304 514560 515072 ok
29305 515072 515584 ok
29306 515584 516096 ok
29307 516096 516608 ok
29308 516608 517120 ok
29309 517120 517632 ok
29310 517632 518144 ok
29311 518144 518656 ok
29312 518656 519168 ok
29313 519168 519680 ok
29314 519680 520192 ok
29315 520192 520704 ok
29316 520704 521216 ok
29317 521216 521728 ok
29318 521728 522240 ok
29319 522240 522752 ok
29320 522752 523264 ok
29321 523264 523776 ok
29322 523776 524288 ok
29323 524288 524800 ok
29324 524800 525312 ok
29325 525312 525824 ok
29326 525824 526336 ok
29327 526336 526848 ok
29328 526848 527360 ok
29329 527360 527872 ok
29330 527872 528384 ok
29331 528384 528896 ok
29332 528896 529408 ok
29333 529408 529920 ok
29334 529920 530432 ok
29335 530432 530944 ok
29336 530944 531456 ok
29337 531456 531968 ok
29338 531968 532480 ok
29339 532480 532992 ok
29340 532992 533504 ok
29341 533504 534016 ok
29342 534016 534528 ok
29343 534528 535040 ok
29344 535040 535552 ok
29345 535552 536064 ok
29346 536064 536576 ok
29347 536576 537088 ok
29348 537088 537600 ok
29349 537600 538112 ok
29350 538112 538624 ok
29351 538624 539136 ok
29352 539136 539648 ok
29353 539648 540160 ok
29354 540160 540672 ok
29355 540672 541184 ok
29356 541184 541696 ok
29357 541696 542208 ok
29358 542208 542720 ok
29359 542720 543232 ok
29360 543232 543744 ok
29361 543744 544256 ok
29362 544256 544768 ok
29363 544768 545280 ok
29364 545280 545792 ok
29365 545792 546304 ok
29366 546304 546816 ok
29367 546816 547328 ok
29368 547328 547840 ok
29369 547840 548352 ok
29370 548352 548864 ok
29371 548864 549376 ok
29372 549376 549888 ok
29373 549888 550400 ok
29374 550400 550912 ok
29375 550912 551424 ok
29376 551424 551936 ok
29377 551936 552448 ok
29378 552448 552960 ok
29379 552960 553472 ok
29380 553472 553984 ok
29381 553984 554496 ok
29382 554496 555008 ok
29383 555008 555520 ok
29384 555520 556032 ok
29385 556032 556544 ok
29386 556544 557056 ok
29387 557056 557568 ok
29388 557568 558080 ok
29389 558080 558592 ok
29390 558592 559104 ok
29391 559104 559616 ok
29392 559616 560128 ok
29393 560128 560640 ok
29394 560640 561152 ok
29395 561152 561664 ok
29396 561664 562176 ok
29397 562176 562688 ok
29398 562688 563200 ok
29399 563200 563712 ok
29400 563712 564224 ok
29401 564224 564736 ok
29402 564736 565248 ok
29403 565248 565760 ok
29404 565760 566272 ok
29405 566272 566784 ok
29406 566784 567296 ok
29407 567296 567808 ok
29408 567808 568320 ok
29409 568320 568832 ok
29410 568832 569344 ok
29411 569344 569856 ok
29412 569856 570368 ok
29413 570368 570880 ok
29414 570880 571392 ok
29415 571392 571904 ok
29416 571904 572416 ok
29417 572416 572928 ok
29418 572928 573440 ok
29419 573440 573952 ok
29420 573952 574464 ok
29421 574464 574976 ok
29422 574976 575488 ok
29423 575488 576000 ok
29424 576000 576512 ok
29425 576512 577024 ok
29426 577024 577536 ok
29427 577536 578048 ok
29428 578048 578560 ok
29429 578560 579072 ok
29430 579072 579584 ok
29431 579584 580096 ok
29432 580096 580608 ok
29433 580608 581120 ok
29434 581120 581632 ok
29435 581632 582144 ok
29436 582144 582656 ok
29437 582656 583168 ok
29438 583168 583680 ok
29439 583680 584192 ok
29440 584192 584704 ok
29441 584704 585216 ok
29442 585216 585728 ok
29443 585728 586240 ok
29444 586240 586752 ok
29445 586752 587264 ok
29446 587264 587776 ok
29447 587776 588288 ok
29448 588288 588800 ok
29449 588800 589312 ok
29450 589312 589824 ok
29451 589824 590336 ok
29452 590336 590848 ok
29453 590848 591360 ok
29454 591360 591872 ok
29455 591872 592384 ok
29456 592384 592896 ok
29457 592896 593408 ok
29458 593408 593920 ok
29459 593920 594432 ok
29460 594432 594944 ok
29461 594944 595456 ok
29462 595456 595968 ok
29463 595968 596480 ok
29464 596480 596992 ok
29465 596992 597504 ok
29466 597504 598016 ok
29467 598016 598528 ok
29468 598528 599040 ok
29469 599040 599552 ok
29470 599552 600064 ok
29471 600064 600576 ok
29472 600576 601088 ok
29473 601088 601600 ok
29474 601600 602112 ok
29475 602112 602624 ok
29476 602624 603136 ok
29477 603136 603648 ok
29478 603648 604160 ok
29479 604160 604672 ok
29480 604672 605184 ok
29481 605184 605696 ok
29482 605696 606208 ok
29483 606208 606720 ok
29484 606720 607232 ok
29485 607232 607744 ok
29486 607744 608256 ok
29487 608256 608768 ok
29488 608768 609280 ok
29489 609280 609792 ok
29490 609792 610304 ok
29491 610304 610816 ok
29492 610816 611328 ok
29493 611328 611840 ok
29494 611840 612352 ok
29495 612352 612864 ok
29496 612864 613376 ok
29497 613376 613888 ok
29498 613888 614400 ok
29499 614400 614912 ok
29500 614912 615424 ok
29501 615424 615936 ok
29502 615936 616448 ok
29503 616448 616960 ok
29504 616960 617472 ok
29505 617472 617984 ok
29506 617984 618496 ok
29507 618496 619008 ok
29508 619008 619520 ok
29509 619520 620032 ok
29510 620032 620544 ok
29511 620544 621056 ok
29512 621056 621568 ok
29513 621568 622080 ok
29514 622080 622592 ok
29515 622592 623104 ok
29516 623104 623616 ok
29517 623616 624128 ok
29518 624128 624640 ok
29519 624640 625152 ok
29520 625152 625664 ok
29521 625664 626176 ok
29522 626176 626688 ok
29523 626688 627200 ok
29524 627200 627712 ok
29525 627712 628224 ok
29526 628224 628736 ok
29527 628736 629248 ok
29528 629248 629760 ok
29529 629760 630272 ok
29530 630272 630784 ok
29531 630784 631296 ok
29532 631296 631808 ok
29533 631808 632320 ok
29534 632320 632832 ok
29535 632832 633344 ok
29536 633344 633856 ok
29537 633856 634368 ok
29538 634368 634880 ok
29539 634880 635392 ok
29540 635392 635904 ok
29541 635904 636416 ok
29542 636416 636928 ok
29543 636928 637440 ok
29544 637440 637952 ok
29545 637952 638464 ok
29546 638464 638976 ok
29547 638976 639488 ok
29548 639488 640000 ok
29549 640000 640512 ok
29550 640512 641024 ok
29551 641024 641536 ok
29552 641536 642048 ok
29553 642048 642560 ok
29554 642560 643072 ok
29555 643072 643584 ok
29556 643584 644096 ok
29557 644096 644608 ok
29558 644608 645120 ok
29559 645120 645632 ok
29560 645632 646144 ok
29561 646144 646656 ok
29562 646656 647168 ok
29563 647168 647680 ok
29564 647680 648192 ok
29565 648192 648704 ok
29566 648704 649216 ok
29567 649216 649728 ok
29568 649728 650240 ok
29569 650240 650752 ok
29570 650752 651264 ok
29571 651264 651776 ok
29572 651776 652288 ok
29573 652288 652800 ok
29574 652800 653312 ok
29575 653312 653824 ok
29576 653824 654336 ok
29577 654336 654848 ok
29578 654848 655360 ok
29579 655360 655872 ok
29580 655872 656384 ok
29581 656384 656896 ok
29582 656896 657408 ok
29583 657408 657920 ok
29584 657920 658432 ok
29585 658432 658944 ok
29586 658944 659456 ok
29587 659456 659968 ok
29588 659968 660480 ok
29589 660480 660992 ok
29590 660992 661504 ok
29591 661504 662016 ok
29592 662016 662528 ok
29593 662528 663040 ok
29594 663040 663552 ok
29595 663552 664064 ok
29596 664064 664576 ok
29597 664576 665088 ok
29598 665088 665600 ok
29599 665600 666112 ok
29600 666112 666624 ok
29601 666624 667136 ok
29602 667136 667648 ok
29603 667648 668160 ok
29604 668160 668672 ok
29605 668672 669184 ok
29606 669184 669696 ok
29607 669696 670208 ok
29608 670208 670720 ok
29609 670720 671232 ok
29610 671232 671744 ok
29611 671744 672256 ok
29612 672256 672768 ok
29613 672768 673280 ok
29614 673280 673792 ok
29615 673792 674304 ok
29616 674304 674816 ok
29617 674816 675328 ok
29618 675328 675840 ok
29619 675840 676352 ok
29620 676352 676864 ok
29621 676864 677376 ok
29622 677376 677888 ok
29623 677888 678400 ok
29624 678400 678912 ok
29625 678912 679424 ok
29626 679424 679936 ok
29627 679936 680448 ok
29628 680448 680960 ok
29629 680960 681472 ok
29630 681472 681984 ok
29631 681984 682496 ok
29632 682496 683008 ok
29633 683008 683520 ok
29634 683520 684032 ok
29635 684032 684544 ok
29636 684544 685056 ok
29637 685056 685568 ok
29638 685568 686080 ok
29639 686080 686592 ok
29640 686592 687104 ok
29641 687104 687616 ok
29642 687616 688128 ok
29643 688128 688640 ok
29644 688640 689152 ok
29645 689152 689664 ok
29646 689664 690176 ok
29647 690176 690688 ok
29648 690688 691200 ok
29649 691200 691712 ok
29650 691712 692224 ok
29651 692224 692736 ok
29652 692736 693248 ok
29653 693248 693760 ok
29654 693760 694272 ok
29655 694272 694784 ok
29656 694784 695296 ok
29657 695296 695808 ok
29658 695808 696320 ok
29659 696320 696832 ok
29660 696832 697344 ok
29661 697344 697856 ok
29662 697856 698368 ok
29663 698368 698880 ok
29664 698880 699392 ok
29665 699392 699904 ok
29666 699904 700416 ok
29667 700416 700928 ok
29668 700928 701440 ok
29669 701440 701952 ok
29670 701952 702464 ok
29671 702464 702976 ok
29672 702976 703488 ok
29673 703488 704000 ok
29674 704000 704512 ok
29675 704512 705024 ok
29676 705024 705536 ok
29677 705536 706048 ok
29678 706048 706560 ok
29679 706560 707072 ok
29680 707072 707584 ok
29681 707584 708096 ok
29682 708096 708608 ok
29683 708608 709120 ok
29684 709120 709632 ok
29685 709632 710144 ok
29686 710144 710656 ok
29687 710656 711168 ok
29688 711168 711680 ok
29689 711680 712192 ok
29690 712192 712704 ok
29691 712704 713216 ok
29692 713216 713728 ok
29693 713728 714240 ok
29694 714240 714752 ok
29695 714752 715264 ok
29696 715264 715776 ok
29697 715776 716288 ok
29698 716288 716800 ok
29699 716800 717312 ok
29700 717312 717824 ok
29701 717824 718336 ok
29702 718336 718848 ok
29703 718848 719360 ok
29704 719360 719872 ok
29705 719872 720384 ok
29706 720384 720896 ok
29707 720896 721408 ok
29708 721408 721920 ok
29709 721920 722432 ok
29710 722432 722944 ok
29711 722944 723456 ok
29712 723456 723968 ok
29713 723968 724480 ok
29714 724480 724992 ok
29715 724992 725504 ok
29716 725504 726016 ok
29717 726016 726528 ok
29718 726528 727040 ok
29719 727040 727552 ok
29720 727552 728064 ok
29721 728064 728576 ok
29722 728576 729088 ok
29723 729088 729600 ok
29724 729600 730112 ok
29725 730112 730624 ok
29726 730624 731136 ok
29727 731136 731648 ok
29728 731648 732160 ok
29729 732160 732672 ok
29730 732672 733184 ok
29731 733184 733696 ok
29732 733696 734208 ok
29733 734208 734720 ok
29734 734720 735232 ok
29735 735232 735744 ok
29736 735744 736256 ok
29737 736256 736768 ok
29738 736768 737280 ok
29739 737280 737792 ok
29740 737792 738304 ok
29741 738304 738816 ok
29742 738816 739328 ok
29743 739328 739840 ok
29744 739840 740352 ok
29745 740352 740864 ok
29746 740864 741376 ok
29747 741376 741888 ok
29748 741888 742400 ok
29749 742400 742912 ok
29750 742912 743424 ok
29751 743424 743936 ok
29752 743936 744448 ok
29753 744448 744960 ok
29754 744960 745472 ok
29755 745472 745984 ok
29756 745984 746496 ok
29757 746496 747008 ok
29758 747008 747520 ok
29759 747520 748032 ok
29760 748032 748544 ok
29761 748544 749056 ok
29762 749056 749568 ok
29763 749568 750080 ok
29764 750080 750592 ok
29765 750592 751104 ok
29766 751104 751616 ok
29767 751616 752128 ok
29768 752128 752640 ok
29769 752640 753152 ok
29770 753152 753664 ok
29771 753664 754176 ok
29772 754176 754688 ok
29773 754688 755200 ok
29774 755200 755712 ok
29775 755712 756224 ok
29776 756224 756736 ok
29777 756736 757248 ok
29778 757248 757760 ok
29779 757760 758272 ok
29780 758272 758784 ok
29781 758784 759296 ok
29782 759296 759808 ok
29783 759808 760320 ok
29784 760320 760832 ok
29785 760832 761344 ok
29786 761344 761856 ok
29787 761856 762368 ok
29788 762368 762880 ok
29789 762880 763392 ok
29790 763392 763904 ok
29791 763904 764416 ok
29792 764416 764928 ok
29793 764928 765440 ok
29794 765440 765952 ok
29795 765952 766464 ok
29796 766464 766976 ok
29797 766976 767488 ok
29798 767488 768000 ok
29799 768000 768512 ok
29800 768512 769024 ok
29801 769024 769536 ok
29802 769536 770048 ok
29803 770048 770560 ok
29804 770560 771072 ok
29805 771072 771584 ok
29806 771584 772096 ok
29807 772096 772608 ok
29808 772608 773120 ok
29809 773120 773632 ok
29810 773632 774144 ok
29811 774144 774656 ok
29812 774656 775168 ok
29813 775168 775680 ok
29814 775680 776192 ok
29815 776192 776704 ok
29816 776704 777216 ok
29817 777216 777728 ok
29818 777728 778240 ok
29819 778240 778752 ok
29820 778752 779264 ok
29821 779264 779776 ok
29822 779776 780288 ok
29823 780288 780800 ok
29824 780800 781312 ok
29825 781312 781824 ok
29826 781824 782336 ok
29827 782336 782848 ok
29828 782848 783360 ok
29829 783360 783872 ok
29830 783872 784384 ok
29831 784384 784896 ok
29832 784896 785408 ok
29833 785408 785920 ok
29834 785920 786432 ok
29835 786432 786944 ok
29836 786944 787456 ok
29837 787456 787968 ok
29838 787968 788480 ok
29839 788480 788992 ok
29840 788992 789504 ok
29841 789504 790016 ok
29842 790016 790528 ok
29843 790528 791040 ok
29844 791040 791552 ok
29845 791552 792064 ok
29846 792064 792576 ok
29847 792576 793088 ok
29848 793088 793600 ok
29849 793600 794112 ok
29850 794112 794624 ok
29851 794624 795136 ok
29852 795136 795648 ok
29853 795648 796160 ok
29854 796160 796672 ok
29855 796672 797184 ok
29856 797184 797696 ok
29857 797696 798208 ok
29858 798208 798720 ok
29859 798720 799232 ok
29860 799232 799744 ok
29861 799744 800256 ok
29862 800256 800768 ok
29863 800768 801280 ok
29864 801280 801792 ok
29865 801792 802304 ok
29866 802304 802816 ok
29867 802816 803328 ok
29868 803328 803840 ok
29869 803840 804352 ok
29870 804352 804864 ok
29871 804864 805376 ok
29872 805376 805888 ok
29873 805888 806400 ok
29874 806400 806912 ok
29875 806912 807424 ok
29876 807424 807936 ok
29877 807936 808448 ok
29878 808448 808960 ok
29879 808960 809472 ok
29880 809472 809984 ok
29881 809984 810496 ok
29882 810496 811008 ok
29883 811008 811520 ok
29884 811520 812032 ok
29885 812032 812544 ok
29886 812544 813056 ok
29887 813056 813568 ok
29888 813568 814080 ok
29889 814080 814592 ok
29890 814592 815104 ok
29891 815104 815616 ok
29892 815616 816128 ok
29893 816128 816640 ok
29894 816640 817152 ok
29895 817152 817664 ok
29896 817664 818176 ok
29897 818176 818688 ok
29898 818688 819200 ok
29899 819200 819712 ok
29900 819712 820224 ok
29901 820224 820736 ok
29902 820736 821248 ok
29903 821248 821760 ok
29904 821760 822272 ok
29905 822272 822784 ok
29906 822784 823296 ok
29907 823296 823808 ok
29908 823808 824320 ok
29909 824320 824832 ok
29910 824832 825344 ok
29911 825344 825856 ok
29912 825856 826368 ok
29913 826368 826880 ok
29914 826880 827392 ok
29915 827392 827904 ok
29916 827904 828416 ok
29917 828416 828928 ok
29918 828928 829440 ok
29919 829440 829952 ok
29920 829952 830464 ok
29921 830464 830976 ok
29922 830976 831488 ok
29923 831488 832000 ok
29924 832000 832512 ok
29925 832512 833024 ok
29926 833024 833536 ok
29927 833536 834048 ok
29928 834048 834560 ok
29929 834560 835072 ok
29930 835072 835584 ok
29931 835584 836096 ok
29932 836096 836608 ok
29933 836608 837120 ok
29934 837120 837632 ok
29935 837632 838144 ok
29936 838144 838656 ok
29937 838656 839168 ok
29938 839168 839680 ok
29939 839680 840192 ok
29940 840192 840704 ok
29941 840704 841216 ok
29942 841216 841728 ok
29943 841728 842240 ok
29944 842240 842752 ok
29945 842752 843264 ok
29946 843264 843776 ok
29947 843776 844288 ok
29948 844288 844800 ok
29949 844800 845312 ok
29950 845312 845824 ok
29951 845824 846336 ok
29952 846336 846848 ok
29953 846848 847360 ok
29954 847360 847872 ok
29955 847872 848384 ok
29956 848384 848896 ok
29957 848896 849408 ok
29958 849408 849920 ok
29959 849920 850432 ok
29960 850432 850944 ok
29961 850944 851456 ok
29962 851456 851968 ok
29963 851968 852480 ok
29964 852480 852992 ok
29965 852992 853504 ok
29966 853504 854016 ok
29967 854016 854528 ok
29968 854528 855040 ok
29969 855040 855552 ok
29970 855552 856064 ok
29971 856064 856576 ok
29972 856576 857088 ok
29973 857088 857600 ok
29974 857600 858112 ok
29975 858112 858624 ok
29976 858624 859136 ok
29977 859136 859648 ok
29978 859648 860160 ok
29979 860160 860672 ok
29980 860672 861184 ok
29981 861184 861696 ok
29982 861696 862208 ok
29983 862208 862720 ok
29984 862720 863232 ok
29985 863232 863744 ok
29986 863744 864256 ok
29987 864256 864768 ok
29988 864768 865280 ok
29989 865280 865792 ok
29990 865792 866304 ok
29991 866304 866816 ok
29992 866816 867328 ok
29993 867328 867840 ok
29994 867840 868352 ok
29995 868352 868864 ok
29996 868864 869376 ok
29997 869376 869888 ok
29998 869888 870400 ok
29999 870400 870912 ok
30000 870912 871424 ok
30001 871424 871936 ok
30002 871936 872448 ok
30003 872448 872960 ok
30004 872960 873472 ok
30005 873472 873984 ok
30006 873984 874496 ok
30007 874496 875008 ok
30008 875008 875520 ok
30009 875520 876032 ok
30010 876032 876544 ok
30011 876544 877056 ok
30012 877056 877568 ok
30013 877568 878080 ok
30014 878080 878592 ok
30015 878592 879104 ok
30016 879104 879616 ok
30017 879616 880128 ok
30018 880128 880640 ok
30019 880640 881152 ok
30020 881152 881664 ok
30021 881664 882176 ok
30022 882176 882688 ok
30023 882688 883200 ok
30024 883200 883712 ok
30025 883712 884224 ok
30026 884224 884736 ok
30027 884736 885248 ok
30028 885248 885760 ok
30029 885760 886272 ok
30030 886272 886784 ok
30031 886784 887296 ok
30032 887296 887808 ok
30033 887808 888320 ok
30034 888320 888832 ok
30035 888832 889344 ok
30036 889344 889856 ok
30037 889856 890368 ok
30038 890368 890880 ok
30039 890880 891392 ok
30040 891392 891904 ok
30041 891904 892416 ok
30042 892416 892928 ok
30043 892928 893440 ok
30044 893440 893952 ok
30045 893952 894464 ok
30046 894464 894976 ok
30047 894976 895488 ok
30048 895488 896000 ok
30049 896000 896512 ok
30050 896512 897024 ok
30051 897024 897536 ok
30052 897536 898048 ok
30053 898048 898560 ok
30054 898560 899072 ok
30055 899072 899584 ok
30056 899584 900096 ok
30057 900096 900608 ok
30058 900608 901120 ok
30059 901120 901632 ok
30060 901632 902144 ok
30061 902144 902656 ok
30062 902656 903168 ok
30063 903168 903680 ok
30064 903680 904192 ok
30065 904192 904704 ok
30066 904704 905216 ok
30067 905216 905728 ok
30068 905728 906240 ok
30069 906240 906752 ok
30070 906752 907264 ok
30071 907264 907776 ok
30072 907776 908288 ok
30073 908288 908800 ok
30074 908800 909312 ok
30075 909312 909824 ok
30076 909824 910336 ok
30077 910336 910848 ok
30078 910848 911360 ok
30079 911360 911872 ok
30080 911872 912384 ok
30081 912384 912896 ok
30082 912896 913408 ok
30083 913408 913920 ok
30084 913920 914432 ok
30085 914432 914944 ok
30086 914944 915456 ok
30087 915456 915968 ok
30088 915968 916480 ok
30089 916480 916992 ok
30090 916992 917504 ok
30091 917504 918016 ok
30092 918016 918528 ok
30093 918528 919040 ok
30094 919040 919552 ok
30095 919552 920064 ok
30096 920064 920576 ok
30097 920576 921088 ok
30098 921088 921600 ok
30099 921600 922112 ok
30100 922112 922624 ok
30101 922624 923136 ok
30102 923136 923648 ok
30103 923648 924160 ok
30104 924160 924672 ok
30105 924672 925184 ok
30106 925184 925696 ok
30107 925696 926208 ok
30108 926208 926720 ok
30109 926720 927232 ok
30110 927232 927744 ok
30111 927744 928256 ok
30112 928256 928768 ok
30113 928768 929280 ok
30114 929280 929792 ok
30115 929792 930304 ok
30116 930304 930816 ok
30117 930816 931328 ok
30118 931328 931840 ok
30119 931840 932352 ok
30120 932352 932864 ok
30121 932864 933376 ok
30122 933376 933888 ok
30123 933888 934400 ok
30124 934400 934912 ok
30125 934912 935424 ok
30126 935424 935936 ok
30127 935936 936448 ok
30128 936448 936960 ok
30129 936960 937472 ok
30130 937472 937984 ok
30131 937984 938496 ok
30132 938496 939008 ok
30133 939008 939520 ok
30134 939520 940032 ok
30135 940032 940544 ok
30136 940544 941056 ok
30137 941056 941568 ok
30138 941568 942080 ok
30139 942080 942592 ok
30140 942592 943104 ok
30141 943104 943616 ok
30142 943616 944128 ok
30143 944128 944640 ok
30144 944640 945152 ok
30145 945152 945664 ok
30146 945664 946176 ok
30147 946176 946688 ok
30148 946688 947200 ok
30149 947200 947712 ok
30150 947712 948224 ok
30151 948224 948736 ok
30152 948736 949248 ok
30153 949248 949760 ok
30154 949760 950272 ok
30155 950272 950784 ok
30156 950784 951296 ok
30157 951296 951808 ok
30158 951808 952320 ok
30159 952320 952832 ok
30160 952832 953344 ok
30161 953344 953856 ok
30162 953856 954368 ok
30163 954368 954880 ok
30164 954880 955392 ok
30165 955392 955904 ok
30166 955904 956416 ok
30167 956416 956928 ok
30168 956928 957440 ok
30169 957440 957952 ok
30170 957952 958464 ok
30171 958464 958976 ok
30172 958976 959488 ok
30173 959488 960000 ok
30174 960000 960512 ok
30175 960512 961024 ok
30176 961024 961536 ok
30177 961536 962048 ok
30178 962048 962560 ok
30179 962560 963072 ok
30180 963072 963584 ok
30181 963584 964096 ok
30182 964096 964608 ok
30183 964608 965120 ok
30184 965120 965632 ok
30185 965632 966144 ok
30186 966144 966656 ok
30187 966656 967168 ok
30188 967168 967680 ok
30189 967680 968192 ok
30190 968192 968704 ok
30191 968704 969216 ok
30192 969216 969728 ok
30193 969728 970240 ok
30194 970240 970752 ok
30195 970752 971264 ok
30196 971264 971776 ok
30197 971776 972288 ok
30198 972288 972800 ok
30199 972800 973312 ok
30200 973312 973824 ok
30201 973824 974336 ok
30202 974336 974848 ok
30203 974848 975360 ok
30204 975360 975872 ok
30205 975872 976384 ok
30206 976384 976896 ok
30207 976896 977408 ok
30208 977408 977920 ok
30209 977920 978432 ok
30210 978432 978944 ok
30211 978944 979456 ok
30212 979456 979968 ok
30213 979968 980480 ok
30214 980480 980992 ok
30215 980992 981504 ok
30216 981504 982016 ok
30217 982016 982528 ok
30218 982528 983040 ok
30219 983040 983552 ok
30220 983552 984064 ok
30221 984064 984576 ok
30222 984576 985088 ok
30223 985088 985600 ok
30224 985600 986112 ok
30225 986112 986624 ok
30226 986624 987136 ok
30227 987136 987648 ok
30228 987648 988160 ok
30229 988160 988672 ok
30230 988672 989184 ok
30231 989184 989696 ok
30232 989696 990208 ok
30233 990208 990720 ok
30234 990720 991232 ok
30235 991232 991744 ok
30236 991744 992256 ok
30237 992256 992768 ok
30238 992768 993280 ok
30239 993280 993792 ok
30240 993792 994304 ok
30241 994304 994816 ok
30242 994816 995328 ok
30243 995328 995840 ok
30244 995840 996352 ok
30245 996352 996864 ok
30246 996864 997376 ok
30247 997376 997888 ok
30248 997888 998400 ok
30249 998400 998912 ok
30250 998912 999424 ok
30251 999424 999936 ok
30252 999936 1000448 ok
30253 1000448 1000960 ok
30254 1000960 1001472 ok
30255 1001472 1001984 ok
30256 1001984 1002496 ok
30257 1002496 1003008 ok
30258 1003008 1003520 ok
30259 1003520 1004032 ok
30260 1004032 1004544 ok
30261 1004544 1005056 ok
30262 1005056 1005568 ok
30263 1005568 1006080 ok
30264 1006080 1006592 ok
30265 1006592 1007104 ok
30266 1007104 1007616 ok
30267 1007616 1008128 ok
30268 1008128 1008640 ok
30269 1008640 1009152 ok
30270 1009152 1009664 ok
30271 1009664 1010176 ok
30272 1010176 1010688 ok
30273 1010688 1011200 ok
30274 1011200 1011712 ok
30275 1011712 1012224 ok
30276 1012224 1012736 ok
30277 1012736 1013248 ok
30278 1013248 1013760 ok
30279 1013760 1014272 ok
30280 1014272 1014784 ok
30281 1014784 1015296 ok
30282 1015296 1015808 ok
30283 1015808 1016320 ok
30284 1016320 1016832 ok
30285 1016832 1017344 ok
30286 1017344 1017856 ok
30287 1017856 1018368 ok
30288 1018368 1018880 ok
30289 1018880 1019392 ok
30290 1019392 1019904 ok
30291 1019904 1020416 ok
30292 1020416 1020928 ok
30293 1020928 1021440 ok
30294 1021440 1021952 ok
30295 1021952 1022464 ok
30296 1022464 1022976 ok
30297 1022976 1023488 ok
30298 1023488 1024000 ok
30299 1024000 1024512 ok
30300 1024512 1025024 ok
30301 1025024 1025536 ok
30302 1025536 1026048 ok
30303 1026048 1026560 ok
30304 1026560 1027072 ok
30305 1027072 1027584 ok
30306 1027584 1028096 ok
30307 1028096 1028608 ok
30308 1028608 1029120 ok
30309 1029120 1029632 ok
30310 1029632 1030144 ok
30311 1030144 1030656 ok
30312 1030656 1031168 ok
30313 1031168 1031680 ok
30314 1031680 1032192 ok
30315 1032192 1032704 ok
30316 1032704 1033216 ok
30317 1033216 1033728 ok
30318 1033728 1034240 ok
30319 1034240 1034752 ok
30320 1034752 1035264 ok
30321 1035264 1035776 ok
30322 1035776 1036288 ok
30323 1036288 1036800 ok
30324 1036800 1037312 ok
30325 1037312 1037824 ok
30326 1037824 1038336 ok
30327 1038336 1038848 ok
30328 1038848 1039360 ok
30329 1039360 1039872 ok
30330 1039872 1040384 ok
30331 1040384 1040896 ok
30332 1040896 1041408 ok
30333 1041408 1041920 ok
30334 1041920 1042432 ok
30335 1042432 1042944 ok
30336 1042944 1043456 ok
30337 1043456 1043968 ok
30338 1043968 1044480 ok
30339 1044480 1044992 ok
30340 1044992 1045504 ok
30341 1045504 1046016 ok
30342 1046016 1046528 ok
30343 1046528 1047040 ok
30344 1047040 1047552 ok
30345 1047552 1048064 ok
30346 1048064 1048576 ok
30347 1048576 1049088 ok
30348 1049088 1049600 ok
30349 1049600 1050112 ok
30350 1050112 1050624 ok
30351 1050624 1051136 ok
30352 1051136 1051648 ok
30353 1051648 1052160 ok
30354 1052160 1052672 ok
30355 1052672 1053184 ok
30356 1053184 1053696 ok
30357 1053696 1054208 ok
30358 1054208 1054720 ok
30359 1054720 1055232 ok
30360 1055232 1055744 ok
30361 1055744 1056256 ok
30362 1056256 1056768 ok
30363 1056768 1057280 ok
30364 1057280 1057792 ok
30365 1057792 1058304 ok
30366 1058304 1058816 ok
30367 1058816 1059328 ok
30368 1059328 1059840 ok
30369 1059840 1060352 ok
30370 1060352 1060864 ok
30371 1060864 1061376 ok
30372 1061376 1061888 ok
30373 1061888 1062400 ok
30374 1062400 1062912 ok
30375 1062912 1063424 ok
30376 1063424 1063936 ok
30377 1063936 1064448 ok
30378 1064448 1064960 ok
30379 1064960 1065472 ok
30380 1065472 1065984 ok
30381 1065984 1066496 ok
30382 1066496 1067008 ok
30383 1067008 1067520 ok
30384 1067520 1068032 ok
30385 1068032 1068544 ok
30386 1068544 1069056 ok
30387 1069056 1069568 ok
30388 1069568 1070080 ok
30389 1070080 1070592 ok
30390 1070592 1071104 ok
30391 1071104 1071616 ok
30392 1071616 1072128 ok
30393 1072128 1072640 ok
30394 1072640 1073152 ok
30395 1073152 1073664 ok
30396 1073664 1074176 ok
30397 1074176 1074688 ok
30398 1074688 1075200 ok
30399 1075200 1075712 ok
30400 1075712 1076224 ok
30401 1076224 1076736 ok
30402 1076736 1077248 ok
30403 1077248 1077760 ok
30404 1077760 1078272 ok
30405 1078272 1078784 ok
30406 1078784 1079296 ok
30407 1079296 1079808 ok
30408 1079808 1080320 ok
30409 1080320 1080832 ok
30410 1080832 1081344 ok
30411 1081344 1081856 ok
30412 1081856 1082368 ok
30413 1082368 1082880 ok
30414 1082880 1083392 ok
30415 1083392 1083904 ok
30416 1083904 1084416 ok
30417 1084416 1084928 ok
30418 1084928 1085440 ok
30419 1085440 1085952 ok
30420 1085952 1086464 ok
30421 1086464 1086976 ok
30422 1086976 1087488 ok
30423 1087488 1088000 ok
30424 1088000 1088512 ok
30425 1088512 1089024 ok
30426 1089024 1089536 ok
30427 1089536 1090048 ok
30428 1090048 1090560 ok
30429 1090560 1091072 ok
30430 1091072 1091584 ok
30431 1091584 1092096 ok
30432 1092096 1092608 ok
30433 1092608 1093120 ok
30434 1093120 1093632 ok
30435 1093632 1094144 ok
30436 1094144 1094656 ok
30437 1094656 1095168 ok
30438 1095168 1095680 ok
30439 1095680 1096192 ok
30440 1096192 1096704 ok
30441 1096704 1097216 ok
30442 1097216 1097728 ok
30443 1097728 1098240 ok
30444 1098240 1098752 ok
30445 1098752 1099264 ok
30446 1099264 1099776 ok
30447 1099776 1100288 ok
30448 1100288 1100800 ok
30449 1100800 1101312 ok
30450 1101312 1101824 ok
30451 1101824 1102336 ok
30452 1102336 1102848 ok
30453 1102848 1103360 ok
30454 1103360 1103872 ok
30455 1103872 1104384 ok
30456 1104384 1104896 ok
30457 1104896 1105408 ok
30458 1105408 1105920 ok
30459 1105920 1106432 ok
30460 1106432 1106944 ok
30461 1106944 1107456 ok
30462 1107456 1107968 ok
30463 1107968 1108480 ok
30464 1108480 1108992 ok
30465 1108992 1109504 ok
30466 1109504 1110016 ok
30467 1110016 1110528 ok
30468 1110528 1111040 ok
30469 1111040 1111552 ok
30470 1111552 1112064 ok
30471 1112064 1112576 ok
30472 1112576 1113088 ok
30473 1113088 1113600 ok
30474 1113600 1114112 ok
30475 1114112 1114624 ok
30476 1114624 1115136 ok
30477 1115136 1115648 ok
30478 1115648 1116160 ok
30479 1116160 1116672 ok
30480 1116672 1117184 ok
30481 1117184 1117696 ok
30482 1117696 1118208 ok
30483 1118208 1118720 ok
30484 1118720 1119232 ok
30485 1119232 1119744 ok
30486 1119744 1120256 ok
30487 1120256 1120768 ok
30488 1120768 1121280 ok
30489 1121280 1121792 ok
30490 1121792 1122304 ok
30491 1122304 1122816 ok
30492 1122816 1123328 ok
30493 1123328 1123840 ok
30494 1123840 1124352 ok
30495 1124352 1124864 ok
30496 1124864 1125376 ok
30497 1125376 1125888 ok
30498 1125888 1126400 ok
30499 1126400 1126912 ok
30500 1126912 1127424 ok
30501 1127424 1127936 ok
30502 1127936 1128448 ok
30503 1128448 1128960 ok
30504 1128960 1129472 ok
30505 1129472 1129984 ok
30506 1129984 1130496 ok
30507 1130496 1131008 ok
30508 1131008 1131520 ok
30509 1131520 1132032 ok
30510 1132032 1132544 ok
30511 1132544 1133056 ok
30512 1133056 1133568 ok
30513 1133568 1134080 ok
30514 1134080 1134592 ok
30515 1134592 1135104 ok
30516 1135104 1135616 ok
30517 1135616 1136128 ok
30518 1136128 1136640 ok
30519 1136640 1137152 ok
30520 1137152 1137664 ok
30521 1137664 1138176 ok
30522 1138176 1138688 ok
30523 1138688 1139200 ok
30524 1139200 1139712 ok
30525 1139712 1140224 ok
30526 1140224 1140736 ok
30527 1140736 1141248 ok
30528 1141248 1141760 ok
30529 1141760 1142272 ok
30530 1142272 1142784 ok
30531 1142784 1143296 ok
30532 1143296 1143808 ok
30533 1143808 1144320 ok
30534 1144320 1144832 ok
30535 1144832 1145344 ok
30536 1145344 1145856 ok
30537 1145856 1146368 ok
30538 1146368 1146880 ok
30539 1146880 1147392 ok
30540 1147392 1147904 ok
30541 1147904 1148416 ok
30542 1148416 1148928 ok
30543 1148928 1149440 ok
30544 1149440 1149952 ok
30545 1149952 1150464 ok
30546 1150464 1150976 ok
30547 1150976 1151488 ok
30548 1151488 1152000 ok
30549 1152000 1152512 ok
30550 1152512 1153024 ok
30551 1153024 1153536 ok
30552 1153536 1154048 ok
30553 1154048 1154560 ok
30554 1154560 1155072 ok
30555 1155072 1155584 ok
30556 1155584 1156096 ok
30557 1156096 1156608 ok
30558 1156608 1157120 ok
30559 1157120 1157632 ok
30560 1157632 1158144 ok
30561 1158144 1158656 ok
30562 1158656 1159168 ok
30563 1159168 1159680 ok
30564 1159680 1160192 ok
30565 1160192 1160704 ok
30566 1160704 1161216 ok
30567 1161216 1161728 ok
30568 1161728 1162240 ok
30569 1162240 1162752 ok
30570 1162752 1163264 ok
30571 1163264 1163776 ok
30572 1163776 1164288 ok
30573 1164288 1164800 ok
30574 1164800 1165312 ok
30575 1165312 1165824 ok
30576 1165824 1166336 ok
30577 1166336 1166848 ok
30578 1166848 1167360 ok
30579 1167360 1167872 ok
30580 1167872 1168384 ok
30581 1168384 1168896 ok
30582 1168896 1169408 ok
30583 1169408 1169920 ok
30584 1169920 1170432 ok
30585 1170432 1170944 ok
30586 1170944 1171456 ok
30587 1171456 1171968 ok
30588 1171968 1172480 ok
30589 1172480 1172992 ok
30590 1172992 1173504 ok
30591 1173504 1174016 ok
30592 1174016 1174528 ok
30593 1174528 1175040 ok
30594 1175040 1175552 ok
30595 1175552 1176064 ok
30596 1176064 1176576 ok
30597 1176576 1177088 ok
30598 1177088 1177600 ok
30599 1177600 1178112 ok
30600 1178112 1178624 ok
30601 1178624 1179136 ok
30602 1179136 1179648 ok
30603 1179648 1180160 ok
30604 1180160 1180672 ok
30605 1180672 1181184 ok
30606 1181184 1181696 ok
30607 1181696 1182208 ok
30608 1182208 1182720 ok
30609 1182720 1183232 ok
30610 1183232 1183744 ok
30611 1183744 1184256 ok
30612 1184256 1184768 ok
30613 1184768 1185280 ok
30614 1185280 1185792 ok
30615 1185792 1186304 ok
30616 1186304 1186816 ok
30617 1186816 1187328 ok
30618 1187328 1187840 ok
30619 1187840 1188352 ok
30620 1188352 1188864 ok
30621 1188864 1189376 ok
30622 1189376 1189888 ok
30623 1189888 1190400 ok
30624 1190400 1190912 ok
30625 1190912 1191424 ok
30626 1191424 1191936 ok
30627 1191936 1192448 ok
30628 1192448 1192960 ok
30629 1192960 1193472 ok
30630 1193472 1193984 ok
30631 1193984 1194496 ok
30632 1194496 1195008 ok
30633 1195008 1195520 ok
30634 1195520 1196032 ok
30635 1196032 1196544 ok
30636 1196544 1197056 ok
30637 1197056 1197568 ok
30638 1197568 1198080 ok
30639 1198080 1198592 ok
30640 1198592 1199104 ok
30641 1199104 1199616 ok
30642 1199616 1200128 ok
30643 1200128 1200640 ok
30644 1200640 1201152 ok
30645 1201152 1201664 ok
30646 1201664 1202176 ok
30647 1202176 1202688 ok
30648 1202688 1203200 ok
30649 1203200 1203712 ok
30650 1203712 1204224 ok
30651 1204224 1204736 ok
30652 1204736 1205248 ok
30653 1205248 1205760 ok
30654 1205760 1206272 ok
30655 1206272 1206784 ok
30656 1206784 1207296 ok
30657 1207296 1207808 ok
30658 1207808 1208320 ok
30659 1208320 1208832 ok
30660 1208832 1209344 ok
30661 1209344 1209856 ok
30662 1209856 1210368 ok
30663 1210368 1210880 ok
30664 1210880 1211392 ok
30665 1211392 1211904 ok
30666 1211904 1212416 ok
30667 1212416 1212928 ok
30668 1212928 1213440 ok
30669 1213440 1213952 ok
30670 1213952 1214464 ok
30671 1214464 1214976 ok
30672 1214976 1215488 ok
30673 1215488 1216000 ok
30674 1216000 1216512 ok
30675 1216512 1217024 ok
30676 1217024 1217536 ok
30677 1217536 1218048 ok
30678 1218048 1218560 ok
30679 1218560 1219072 ok
30680 1219072 1219584 ok
30681 1219584 1220096 ok
30682 1220096 1220608 ok
30683 1220608 1221120 ok
30684 1221120 1221632 ok
30685 1221632 1222144 ok
30686 1222144 1222656 ok
30687 1222656 1223168 ok
30688 1223168 1223680 ok
30689 1223680 1224192 ok
30690 1224192 1224704 ok
30691 1224704 1225216 ok
30692 1225216 1225728 ok
30693 1225728 1226240 ok
30694 1226240 1226752 ok
30695 1226752 1227264 ok
30696 1227264 1227776 ok
30697 1227776 1228288 ok
30698 1228288 1228800 ok
30699 1228800 1229312 ok
30700 1229312 1229824 ok
30701 1229824 1230336 ok
30702 1230336 1230848 ok
30703 1230848 1231360 ok
30704 1231360 1231872 ok
30705 1231872 1232384 ok
30706 1232384 1232896 ok
30707 1232896 1233408 ok
30708 1233408 1233920 ok
30709 1233920 1234432 ok
30710 1234432 1234944 ok
30711 1234944 1235456 ok
30712 1235456 1235968 ok
30713 1235968 1236480 ok
30714 1236480 1236992 ok
30715 1236992 1237504 ok
30716 1237504 1238016 ok
30717 1238016 1238528 ok
30718 1238528 1239040 ok
30719 1239040 1239552 ok
30720 1239552 1240064 ok
30721 1240064 1240576 ok
30722 1240576 1241088 ok
30723 1241088 1241600 ok
30724 1241600 1242112 ok
30725 1242112 1242624 ok
30726 1242624 1243136 ok
30727 1243136 1243648 ok
30728 1243648 1244160 ok
30729 1244160 1244672 ok
30730 1244672 1245184 ok
30731 1245184 1245696 ok
30732 1245696 1246208 ok
30733 1246208 1246720 ok
30734 1246720 1247232 ok
30735 1247232 1247744 ok
30736 1247744 1248256 ok
30737 1248256 1248768 ok
30738 1248768 1249280 ok
30739 1249280 1249792 ok
30740 1249792 1250304 ok
30741 1250304 1250816 ok
30742 1250816 1251328 ok
30743 1251328 1251840 ok
30744 1251840 1252352 ok
30745 1252352 1252864 ok
30746 1252864 1253376 ok
30747 1253376 1253888 ok
30748 1253888 1254400 ok
30749 1254400 1254912 ok
30750 1254912 1255424 ok
30751 1255424 1255936 ok
30752 1255936 1256448 ok
30753 1256448 1256960 ok
30754 1256960 1257472 ok
30755 1257472 1257984 ok
30756 1257984 1258496 ok
30757 1258496 1259008 ok
30758 1259008 1259520 ok
30759 1259520 1260032 ok
30760 1260032 1260544 ok
30761 1260544 1261056 ok
30762 1261056 1261568 ok
30763 1261568 1262080 ok
30764 1262080 1262592 ok
30765 1262592 1263104 ok
30766 1263104 1263616 ok
30767 1263616 1264128 ok
30768 1264128 1264640 ok
30769 1264640 1265152 ok
30770 1265152 1265664 ok
30771 1265664 1266176 ok
30772 1266176 1266688 ok
30773 1266688 1267200 ok
30774 1267200 1267712 ok
30775 1267712 1268224 ok
30776 1268224 1268736 ok
30777 1268736 1269248 ok
30778 1269248 1269760 ok
30779 1269760 1270272 ok
30780 1270272 1270784 ok
30781 1270784 1271296 ok
30782 1271296 1271808 ok
30783 1271808 1272320 ok
30784 1272320 1272832 ok
30785 1272832 1273344 ok
30786 1273344 1273856 ok
30787 1273856 1274368 ok
30788 1274368 1274880 ok
30789 1274880 1275392 ok
30790 1275392 1275904 ok
30791 1275904 1276416 ok
30792 1276416 1276928 ok
30793 1276928 1277440 ok
30794 1277440 1277952 ok
30795 1277952 1278464 ok
30796 1278464 1278976 ok
30797 1278976 1279488 ok
30798 1279488 1280000 ok
30799 1280000 1280512 ok
30800 1280512 1281024 ok
30801 1281024 1281536 ok
30802 1281536 1282048 ok
30803 1282048 1282560 ok
30804 1282560 1283072 ok
30805 1283072 1283584 ok
30806 1283584 1284096 ok
30807 1284096 1284608 ok
30808 1284608 1285120 ok
30809 1285120 1285632 ok
30810 1285632 1286144 ok
30811 1286144 1286656 ok
30812 1286656 1287168 ok
30813 1287168 1287680 ok
30814 1287680 1288192 ok
30815 1288192 1288704 ok
30816 1288704 1289216 ok
30817 1289216 1289728 ok
30818 1289728 1290240 ok
30819 1290240 1290752 ok
30820 1290752 1291264 ok
30821 1291264 1291776 ok
30822 1291776 1292288 ok
30823 1292288 1292800 ok
30824 1292800 1293312 ok
30825 1293312 1293824 ok
30826 1293824 1294336 ok
30827 1294336 1294848 ok
30828 1294848 1295360 ok
30829 1295360 1295872 ok
30830 1295872 1296384 ok
30831 1296384 1296896 ok
30832 1296896 1297408 ok
30833 1297408 1297920 ok
30834 1297920 1298432 ok
30835 1298432 1298944 ok
30836 1298944 1299456 ok
30837 1299456 1299968 ok
30838 1299968 1300480 ok
30839 1300480 1300992 ok
30840 1300992 1301504 ok
30841 1301504 1302016 ok
30842 1302016 1302528 ok
30843 1302528 1303040 ok
30844 1303040 1303552 ok
30845 1303552 1304064 ok
30846 1304064 1304576 ok
30847 1304576 1305088 ok
30848 1305088 1305600 ok
30849 1305600 1306112 ok
30850 1306112 1306624 ok
30851 1306624 1307136 ok
30852 1307136 1307648 ok
30853 1307648 1308160 ok
30854 1308160 1308672 ok
30855 1308672 1309184 ok
30856 1309184 1309696 ok
30857 1309696 1310208 ok
30858 1310208 1310720 ok
30859 1310720 1311232 ok
30860 1311232 1311744 ok
30861 1311744 1312256 ok
30862 1312256 1312768 ok
30863 1312768 1313280 ok
30864 1313280 1313792 ok
30865 1313792 1314304 ok
30866 1314304 1314816 ok
30867 1314816 1315328 ok
30868 1315328 1315840 ok
30869 1315840 1316352 ok
30870 1316352 1316864 ok
30871 1316864 1317376 ok
30872 1317376 1317888 ok
30873 1317888 1318400 ok
30874 1318400 1318912 ok
30875 1318912 1319424 ok
30876 1319424 1319936 ok
30877 1319936 1320448 ok
30878 1320448 1320960 ok
30879 1320960 1321472 ok
30880 1321472 1321984 ok
30881 1321984 1322496 ok
30882 1322496 1323008 ok
30883 1323008 1323520 ok
30884 1323520 1324032 ok
30885 1324032 1324544 ok
30886 1324544 1325056 ok
30887 1325056 1325568 ok
30888 1325568 1326080 ok
30889 1326080 1326592 ok
30890 1326592 1327104 ok
30891 1327104 1327616 ok
30892 1327616 1328128 ok
30893 1328128 1328640 ok
30894 1328640 1329152 ok
30895 1329152 1329664 ok
30896 1329664 1330176 ok
30897 1330176 1330688 ok
30898 1330688 1331200 ok
30899 1331200 1331712 ok
30900 1331712 1332224 ok
30901 1332224 1332736 ok
30902 1332736 1333248 ok
30903 1333248 1333760 ok
30904 1333760 1334272 ok
30905 1334272 1334784 ok
30906 1334784 1335296 ok
30907 1335296 1335808 ok
30908 1335808 1336320 ok
30909 1336320 1336832 ok
30910 1336832 1337344 ok
30911 1337344 1337856 ok
30912 1337856 1338368 ok
30913 1338368 1338880 ok
30914 1338880 1339392 ok
30915 1339392 1339904 ok
30916 1339904 1340416 ok
30917 1340416 1340928 ok
30918 1340928 1341440 ok
30919 1341440 1341952 ok
30920 1341952 1342464 ok
30921 1342464 1342976 ok
30922 1342976 1343488 ok
30923 1343488 1344000 ok
30924 1344000 1344512 ok
30925 1344512 1345024 ok
30926 1345024 1345536 ok
30927 1345536 1346048 ok
30928 1346048 1346560 ok
30929 1346560 1347072 ok
30930 1347072 1347584 ok
30931 1347584 1348096 ok
30932 1348096 1348608 ok
30933 1348608 1349120 ok
30934 1349120 1349632 ok
30935 1349632 1350144 ok
30936 1350144 1350656 ok
30937 1350656 1351168 ok
30938 1351168 1351680 ok
30939 1351680 1352192 ok
30940 1352192 1352704 ok
30941 1352704 1353216 ok
30942 1353216 1353728 ok
30943 1353728 1354240 ok
30944 1354240 1354752 ok
30945 1354752 1355264 ok
30946 1355264 1355776 ok
30947 1355776 1356288 ok
30948 1356288 1356800 ok
30949 1356800 1357312 ok
30950 1357312 1357824 ok
30951 1357824 1358336 ok
30952 1358336 1358848 ok
30953 1358848 1359360 ok
30954 1359360 1359872 ok
30955 1359872 1360384 ok
30956 1360384 1360896 ok
30957 1360896 1361408 ok
30958 1361408 1361920 ok
30959 1361920 1362432 ok
30960 1362432 1362944 ok
30961 1362944 1363456 ok
30962 1363456 1363968 ok
30963 1363968 1364480 ok
30964 1364480 1364992 ok
30965 1364992 1365504 ok
30966 1365504 1366016 ok
30967 1366016 1366528 ok
30968 1366528 1367040 ok
30969 1367040 1367552 ok
30970 1367552 1368064 ok
30971 1368064 1368576 ok
30972 1368576 1369088 ok
30973 1369088 1369600 ok
30974 1369600 1370112 ok
30975 1370112 1370624 ok
30976 1370624 1371136 ok
30977 1371136 1371648 ok
30978 1371648 1372160 ok
30979 1372160 1372672 ok
30980 1372672 1373184 ok
30981 1373184 1373696 ok
30982 1373696 1374208 ok
30983 1374208 1374720 ok
30984 1374720 1375232 ok
30985 1375232 1375744 ok
30986 1375744 1376256 ok
30987 1376256 1376768 ok
30988 1376768 1377280 ok
30989 1377280 1377792 ok
30990 1377792 1378304 ok
30991 1378304 1378816 ok
30992 1378816 1379328 ok
30993 1379328 1379840 ok
30994 1379840 1380352 ok
30995 1380352 1380864 ok
30996 1380864 1381376 ok
30997 1381376 1381888 ok
30998 1381888 1382400 ok
30999 1382400 1382912 ok
31000 1382912 1383424 ok
31001 1383424 1383936 ok
31002 1383936 1384448 ok
31003 1384448 1384960 ok
31004 1384960 1385472 ok
31005 1385472 1385984 ok
31006 1385984 1386496 ok
31007 1386496 1387008 ok
31008 1387008 1387520 ok
31009 1387520 1388032 ok
31010 1388032 1388544 ok
31011 1388544 1389056 ok
31012 1389056 1389568 ok
31013 1389568 1390080 ok
31014 1390080 1390592 ok
31015 1390592 1391104 ok
31016 1391104 1391616 ok
31017 1391616 1392128 ok
31018 1392128 1392640 ok
31019 1392640 1393152 ok
31020 1393152 1393664 ok
31021 1393664 1394176 ok
31022 1394176 1394688 ok
31023 1394688 1395200 ok
31024 1395200 1395712 ok
31025 1395712 1396224 ok
31026 1396224 1396736 ok
31027 1396736 1397248 ok
31028 1397248 1397760 ok
31029 1397760 1398272 ok
31030 1398272 1398784 ok
31031 1398784 1399296 ok
31032 1399296 1399808 ok
31033 1399808 1400320 ok
31034 1400320 1400832 ok
31035 1400832 1401344 ok
31036 1401344 1401856 ok
31037 1401856 1402368 ok
31038 1402368 1402880 ok
31039 1402880 1403392 ok
31040 1403392 1403904 ok
31041 1403904 1404416 ok
31042 1404416 1404928 ok
31043 1404928 1405440 ok
31044 1405440 1405952 ok
31045 1405952 1406464 ok
31046 1406464 1406976 ok
31047 1406976 1407488 ok
31048 1407488 1408000 ok
31049 1408000 1408512 ok
31050 1408512 1409024 ok
31051 1409024 1409536 ok
31052 1409536 1410048 ok
31053 1410048 1410560 ok
31054 1410560 1411072 ok
31055 1411072 1411584 ok
31056 1411584 1412096 ok
31057 1412096 1412608 ok
31058 1412608 1413120 ok
31059 1413120 1413632 ok
31060 1413632 1414144 ok
31061 1414144 1414656 ok
31062 1414656 1415168 ok
31063 1415168 1415680 ok
31064 1415680 1416192 ok
31065 1416192 1416704 ok
31066 1416704 1417216 ok
31067 1417216 1417728 ok
31068 1417728 1418240 ok
31069 1418240 1418752 ok
31070 1418752 1419264 ok
31071 1419264 1419776 ok
31072 1419776 1420288 ok
31073 1420288 1420800 ok
31074 1420800 1421312 ok
31075 1421312 1421824 ok
31076 1421824 1422336 ok
31077 1422336 1422848 ok
31078 1422848 1423360 ok
31079 1423360 1423872 ok
31080 1423872 1424384 ok
31081 1424384 1424896 ok
31082 1424896 1425408 ok
31083 1425408 1425920 ok
31084 1425920 1426432 ok
31085 1426432 1426944 ok
31086 1426944 1427456 ok
31087 1427456 1427968 ok
31088 1427968 1428480 ok
31089 1428480 1428992 ok
31090 1428992 1429504 ok
31091 1429504 1430016 ok
31092 1430016 1430528 ok
31093 1430528 1431040 ok
31094 1431040 1431552 ok
31095 1431552 1432064 ok
31096 1432064 1432576 ok
31097 1432576 1433088 ok
31098 1433088 1433600 ok
31099 1433600 1434112 ok
31100 1434112 1434624 ok
31101 1434624 1435136 ok
31102 1435136 1435648 ok
31103 1435648 1436160 ok
31104 1436160 1436672 ok
31105 1436672 1437184 ok
31106 1437184 1437696 ok
31107 1437696 1438208 ok
31108 1438208 1438720 ok
31109 1438720 1439232 ok
31110 1439232 1439744 ok
31111 1439744 1440256 ok
31112 1440256 1440768 ok
31113 1440768 1441280 ok
31114 1441280 1441792 ok
31115 1441792 1442304 ok
31116 1442304 1442816 ok
31117 1442816 1443328 ok
31118 1443328 1443840 ok
31119 1443840 1444352 ok
31120 1444352 1444864 ok
31121 1444864 1445376 ok
31122 1445376 1445888 ok
31123 1445888 1446400 ok
31124 1446400 1446912 ok
31125 1446912 1447424 ok
31126 1447424 1447936 ok
31127 1447936 1448448 ok
31128 1448448 1448960 ok
31129 1448960 1449472 ok
31130 1449472 1449984 ok
31131 1449984 1450496 ok
31132 1450496 1451008 ok
31133 1451008 1451520 ok
31134 1451520 1452032 ok
31135 1452032 1452544 ok
31136 1452544 1453056 ok
31137 1453056 1453568 ok
31138 1453568 1454080 ok
31139 1454080 1454592 ok
31140 1454592 1455104 ok
31141 1455104 1455616 ok
31142 1455616 1456128 ok
31143 1456128 1456640 ok
31144 1456640 1457152 ok
31145 1457152 1457664 ok
31146 1457664 1458176 ok
31147 1458176 1458688 ok
31148 1458688 1459200 ok
31149 1459200 1459712 ok
31150 1459712 1460224 ok
31151 1460224 1460736 ok
31152 1460736 1461248 ok
31153 1461248 1461760 ok
31154 1461760 1462272 ok
31155 1462272 1462784 ok
31156 1462784 1463296 ok
31157 1463296 1463808 ok
31158 1463808 1464320 ok
31159 1464320 1464832 ok
31160 1464832 1465344 ok
31161 1465344 1465856 ok
31162 1465856 1466368 ok
31163 1466368 1466880 ok
31164 1466880 1467392 ok
31165 1467392 1467904 ok
31166 1467904 1468416 ok
31167 1468416 1468928 ok
31168 1468928 1469440 ok
31169 1469440 1469952 ok
31170 1469952 1470464 ok
31171 1470464 1470976 ok
31172 1470976 1471488 ok
31173 1471488 1472000 ok
31174 1472000 1472512 ok
31175 1472512 1473024 ok
31176 1473024 1473536 ok
31177 1473536 1474048 ok
31178 1474048 1474560 ok
31179 1474560 1475072 ok
31180 1475072 1475584 ok
31181 1475584 1476096 ok
31182 1476096 1476608 ok
31183 1476608 1477120 ok
31184 1477120 1477632 ok
31185 1477632 1478144 ok
31186 1478144 1478656 ok
31187 1478656 1479168 ok
31188 1479168 1479680 ok
31189 1479680 1480192 ok
31190 1480192 1480704 ok
31191 1480704 1481216 ok
31192 1481216 1481728 ok
31193 1481728 1482240 ok
31194 1482240 1482752 ok
31195 1482752 1483264 ok
31196 1483264 1483776 ok
31197 1483776 1484288 ok
31198 1484288 1484800 ok
31199 1484800 1485312 ok
31200 1485312 1485824 ok
31201 1485824 1486336 ok
31202 1486336 1486848 ok
31203 1486848 1487360 ok
31204 1487360 1487872 ok
31205 1487872 1488384 ok
31206 1488384 1488896 ok
31207 1488896 1489408 ok
31208 1489408 1489920 ok
31209 1489920 1490432 ok
31210 1490432 1490944 ok
31211 1490944 1491456 ok
31212 1491456 1491968 ok
31213 1491968 1492480 ok
31214 1492480 1492992 ok
31215 1492992 1493504 ok
31216 1493504 1494016 ok
31217 1494016 1494528 ok
31218 1494528 1495040 ok
31219 1495040 1495552 ok
31220 1495552 1496064 ok
31221 1496064 1496576 ok
31222 1496576 1497088 ok
31223 1497088 1497600 ok
31224 1497600 1498112 ok
31225 1498112 1498624 ok
31226 1498624 1499136 ok
31227 1499136 1499648 ok
31228 1499648 1500160 ok
31229 1500160 1500672 ok
31230 1500672 1501184 ok
31231 1501184 1501696 ok
31232 1501696 1502208 ok
31233 1502208 1502720 ok
31234 1502720 1503232 ok
31235 1503232 1503744 ok
31236 1503744 1504256 ok
31237 1504256 1504768 ok
31238 1504768 1505280 ok
31239 1505280 1505792 ok
31240 1505792 1506304 ok
31241 1506304 1506816 ok
31242 1506816 1507328 ok
31243 1507328 1507840 ok
31244 1507840 1508352 ok
31245 1508352 1508864 ok
31246 1508864 1509376 ok
31247 1509376 1509888 ok
31248 1509888 1510400 ok
31249 1510400 1510912 ok
31250 1510912 1511424 ok
31251 1511424 1511936 ok
31252 1511936 1512448 ok
31253 1512448 1512960 ok
31254 1512960 1513472 ok
31255 1513472 1513984 ok
31256 1513984 1514496 ok
31257 1514496 1515008 ok
31258 1515008 1515520 ok
31259 1515520 1516032 ok
31260 1516032 1516544 ok
31261 1516544 1517056 ok
31262 1517056 1517568 ok
31263 1517568 1518080 ok
31264 1518080 1518592 ok
31265 1518592 1519104 ok
31266 1519104 1519616 ok
31267 1519616 1520128 ok
31268 1520128 1520640 ok
31269 1520640 1521152 ok
31270 1521152 1521664 ok
31271 1521664 1522176 ok
31272 1522176 1522688 ok
31273 1522688 1523200 ok
31274 1523200 1523712 ok
31275 1523712 1524224 ok
31276 1524224 1524736 ok
31277 1524736 1525248 ok
31278 1525248 1525760 ok
31279 1525760 1526272 ok
31280 1526272 1526784 ok
31281 1526784 1527296 ok
31282 1527296 1527808 ok
31283 1527808 1528320 ok
31284 1528320 1528832 ok
31285 1528832 1529344 ok
31286 1529344 1529856 ok
31287 1529856 1530368 ok
31288 1530368 1530880 ok
31289 1530880 1531392 ok
31290 1531392 1531904 ok
31291 1531904 1532416 ok
31292 1532416 1532928 ok
31293 1532928 1533440 ok
31294 1533440 1533952 ok
31295 1533952 1534464 ok
31296 1534464 1534976 ok
31297 1534976 1535488 ok
31298 1535488 1536000 ok
31299 1536000 1536512 ok
31300 1536512 1537024 ok
31301 1537024 1537536 ok
31302 1537536 1538048 ok
31303 1538048 1538560 ok
31304 1538560 1539072 ok
31305 1539072 1539584 ok
31306 1539584 1540096 ok
31307 1540096 1540608 ok
31308 1540608 1541120 ok
31309 1541120 1541632 ok
31310 1541632 1542144 ok
31311 1542144 1542656 ok
31312 1542656 1543168 ok
31313 1543168 1543680 ok
31314 1543680 1544192 ok
31315 1544192 1544704 ok
31316 1544704 1545216 ok
31317 1545216 1545728 ok
31318 1545728 1546240 ok
31319 1546240 1546752 ok
31320 1546752 1547264 ok
31321 1547264 1547776 ok
31322 1547776 1548288 ok
31323 1548288 1548800 ok
31324 1548800 1549312 ok
31325 1549312 1549824 ok
31326 1549824 1550336 ok
31327 1550336 1550848 ok
31328 1550848 1551360 ok
31329 1551360 1551872 ok
31330 1551872 1552384 ok
31331 1552384 1552896 ok
31332 1552896 1553408 ok
31333 1553408 1553920 ok
31334 1553920 1554432 ok
31335 1554432 1554944 ok
31336 1554944 1555456 ok
31337 1555456 1555968 ok
31338 1555968 1556480 ok
31339 1556480 1556992 ok
31340 1556992 1557504 ok
31341 1557504 1558016 ok
31342 1558016 1558528 ok
31343 1558528 1559040 ok
31344 1559040 1559552 ok
31345 1559552 1560064 ok
31346 1560064 1560576 ok
31347 1560576 1561088 ok
31348 1561088 1561600 ok
31349 1561600 1562112 ok
31350 1562112 1562624 ok
31351 1562624 1563136 ok
31352 1563136 1563648 ok
31353 1563648 1564160 ok
31354 1564160 1564672 ok
31355 1564672 1565184 ok
31356 1565184 1565696 ok
31357 1565696 1566208 ok
31358 1566208 1566720 ok
31359 1566720 1567232 ok
31360 1567232 1567744 ok
31361 1567744 1568256 ok
31362 1568256 1568768 ok
31363 1568768 1569280 ok
31364 1569280 1569792 ok
31365 1569792 1570304 ok
31366 1570304 1570816 ok
31367 1570816 1571328 ok
31368 1571328 1571840 ok
31369 1571840 1572352 ok
31370 1572352 1572864 ok
31371 1572864 1573376 ok
31372 1573376 1573888 ok
31373 1573888 1574400 ok
31374 1574400 1574912 ok
31375 1574912 1575424 ok
31376 1575424 1575936 ok
31377 1575936 1576448 ok
31378 1576448 1576960 ok
31379 1576960 1577472 ok
31380 1577472 1577984 ok
31381 1577984 1578496 ok
31382 1578496 1579008 ok
31383 1579008 1579520 ok
31384 1579520 1580032 ok
31385 1580032 1580544 ok
31386 1580544 1581056 ok
31387 1581056 1581568 ok
31388 1581568 1582080 ok
31389 1582080 1582592 ok
31390 1582592 1583104 ok
31391 1583104 1583616 ok
31392 1583616 1584128 ok
31393 1584128 1584640 ok
31394 1584640 1585152 ok
31395 1585152 1585664 ok
31396 1585664 1586176 ok
31397 1586176 1586688 ok
31398 1586688 1587200 ok
31399 1587200 1587712 ok
31400 1587712 1588224 ok
31401 1588224 1588736 ok
31402 1588736 1589248 ok
31403 1589248 1589760 ok
31404 1589760 1590272 ok
31405 1590272 1590784 ok
31406 1590784 1591296 ok
31407 1591296 1591808 ok
31408 1591808 1592320 ok
31409 1592320 1592832 ok
31410 1592832 1593344 ok
31411 1593344 1593856 ok
31412 1593856 1594368 ok
31413 1594368 1594880 ok
31414 1594880 1595392 ok
31415 1595392 1595904 ok
31416 1595904 1596416 ok
31417 1596416 1596928 ok
31418 1596928 1597440 ok
31419 1597440 1597952 ok
31420 1597952 1598464 ok
31421 1598464 1598976 ok
31422 1598976 1599488 ok
31423 1599488 1600000 ok
31424 1600000 1600512 ok
31425 1600512 1601024 ok
31426 1601024 1601536 ok
31427 1601536 1602048 ok
31428 1602048 1602560 ok
31429 1602560 1603072 ok
31430 1603072 1603584 ok
31431 1603584 1604096 ok
31432 1604096 1604608 ok
31433 1604608 1605120 ok
31434 1605120 1605632 ok
31435 1605632 1606144 ok
31436 1606144 1606656 ok
31437 1606656 1607168 ok
31438 1607168 1607680 ok
31439 1607680 1608192 ok
31440 1608192 1608704 ok
31441 1608704 1609216 ok
31442 1609216 1609728 ok
31443 1609728 1610240 ok
31444 1610240 1610752 ok
31445 1610752 1611264 ok
31446 1611264 1611776 ok
31447 1611776 1612288 ok
31448 1612288 1612800 ok
31449 1612800 1613312 ok
31450 1613312 1613824 ok
31451 1613824 1614336 ok
31452 1614336 1614848 ok
31453 1614848 1615360 ok
31454 1615360 1615872 ok
31455 1615872 1616384 ok
31456 1616384 1616896 ok
31457 1616896 1617408 ok
31458 1617408 1617920 ok
31459 1617920 1618432 ok
31460 1618432 1618944 ok
31461 1618944 1619456 ok
31462 1619456 1619968 ok
31463 1619968 1620480 ok
31464 1620480 1620992 ok
31465 1620992 1621504 ok
31466 1621504 1622016 ok
31467 1622016 1622528 ok
31468 1622528 1623040 ok
31469 1623040 1623552 ok
31470 1623552 1624064 ok
31471 1624064 1624576 ok
31472 1624576 1625088 ok
31473 1625088 1625600 ok
31474 1625600 1626112 ok
31475 1626112 1626624 ok
31476 1626624 1627136 ok
31477 1627136 1627648 ok
31478 1627648 1628160 ok
31479 1628160 1628672 ok
31480 1628672 1629184 ok
31481 1629184 1629696 ok
31482 1629696 1630208 ok
31483 1630208 1630720 ok
31484 1630720 1631232 ok
31485 1631232 1631744 ok
31486 1631744 1632256 ok
31487 1632256 1632768 ok
31488 1632768 1633280 ok
31489 1633280 1633792 ok
31490 1633792 1634304 ok
31491 1634304 1634816 ok
31492 1634816 1635328 ok
31493 1635328 1635840 ok
31494 1635840 1636352 ok
31495 1636352 1636864 ok
31496 1636864 1637376 ok
31497 1637376 1637888 ok
31498 1637888 1638400 ok
31499 1638400 1638912 ok
31500 1638912 1639424 ok
31501 1639424 1639936 ok
31502 1639936 1640448 ok
31503 1640448 1640960 ok
31504 1640960 1641472 ok
31505 1641472 1641984 ok
31506 1641984 1642496 ok
31507 1642496 1643008 ok
31508 1643008 1643520 ok
31509 1643520 1644032 ok
31510 1644032 1644544 ok
31511 1644544 1645056 ok
31512 1645056 1645568 ok
31513 1645568 1646080 ok
31514 1646080 1646592 ok
31515 1646592 1647104 ok
31516 1647104 1647616 ok
31517 1647616 1648128 ok
31518 1648128 1648640 ok
31519 1648640 1649152 ok
31520 1649152 1649664 ok
31521 1649664 1650176 ok
31522 1650176 1650688 ok
31523 1650688 1651200 ok
31524 1651200 1651712 ok
31525 1651712 1652224 ok
31526 1652224 1652736 ok
31527 1652736 1653248 ok
31528 1653248 1653760 ok
31529 1653760 1654272 ok
31530 1654272 1654784 ok
31531 1654784 1655296 ok
31532 1655296 1655808 ok
31533 1655808 1656320 ok
31534 1656320 1656832 ok
31535 1656832 1657344 ok
31536 1657344 1657856 ok
31537 1657856 1658368 ok
31538 1658368 1658880 ok
31539 1658880 1659392 ok
31540 1659392 1659904 ok
31541 1659904 1660416 ok
31542 1660416 1660928 ok
31543 1660928 1661440 ok
31544 1661440 1661952 ok
31545 1661952 1662464 ok
31546 1662464 1662976 ok
31547 1662976 1663488 ok
31548 1663488 1664000 ok
31549 1664000 1664512 ok
31550 1664512 1665024 ok
31551 1665024 1665536 ok
31552 1665536 1666048 ok
31553 1666048 1666560 ok
31554 1666560 1667072 ok
31555 1667072 1667584 ok
31556 1667584 1668096 ok
31557 1668096 1668608 ok
31558 1668608 1669120 ok
31559 1669120 1669632 ok
31560 1669632 1670144 ok
31561 1670144 1670656 ok
31562 1670656 1671168 ok
31563 1671168 1671680 ok
31564 1671680 1672192 ok
31565 1672192 1672704 ok
31566 1672704 1673216 ok
31567 1673216 1673728 ok
31568 1673728 1674240 ok
31569 1674240 1674752 ok
31570 1674752 1675264 ok
31571 1675264 1675776 ok
31572 1675776 1676288 ok
31573 1676288 1676800 ok
31574 1676800 1677312 ok
31575 1677312 1677824 ok
31576 1677824 1678336 ok
31577 1678336 1678848 ok
31578 1678848 1679360 ok
31579 1679360 1679872 ok
31580 1679872 1680384 ok
31581 1680384 1680896 ok
31582 1680896 1681408 ok
31583 1681408 1681920 ok
31584 1681920 1682432 ok
31585 1682432 1682944 ok
31586 1682944 1683456 ok
31587 1683456 1683968 ok
31588 1683968 1684480 ok
31589 1684480 1684992 ok
31590 1684992 1685504 ok
31591 1685504 1686016 ok
31592 1686016 1686528 ok
31593 1686528 1687040 ok
31594 1687040 1687552 ok
31595 1687552 1688064 ok
31596 1688064 1688576 ok
31597 1688576 1689088 ok
31598 1689088 1689600 ok
31599 1689600 1690112 ok
31600 1690112 1690624 ok
31601 1690624 1691136 ok
31602 1691136 1691648 ok
31603 1691648 1692160 ok
31604 1692160 1692672 ok
31605 1692672 1693184 ok
31606 1693184 1693696 ok
31607 1693696 1694208 ok
31608 1694208 1694720 ok
31609 1694720 1695232 ok
31610 1695232 1695744 ok
31611 1695744 1696256 ok
31612 1696256 1696768 ok
31613 1696768 1697280 ok
31614 1697280 1697792 ok
31615 1697792 1698304 ok
31616 1698304 1698816 ok
31617 1698816 1699328 ok
31618 1699328 1699840 ok
31619 1699840 1700352 ok
31620 1700352 1700864 ok
31621 1700864 1701376 ok
31622 1701376 1701888 ok
31623 1701888 1702400 ok
31624 1702400 1702912 ok
31625 1702912 1703424 ok
31626 1703424 1703936 ok
31627 1703936 1704448 ok
31628 1704448 1704960 ok
31629 1704960 1705472 ok
31630 1705472 1705984 ok
31631 1705984 1706496 ok
31632 1706496 1707008 ok
31633 1707008 1707520 ok
31634 1707520 1708032 ok
31635 1708032 1708544 ok
31636 1708544 1709056 ok
31637 1709056 1709568 ok
31638 1709568 1710080 ok
31639 1710080 1710592 ok
31640 1710592 1711104 ok
31641 1711104 1711616 ok
31642 1711616 1712128 ok
31643 1712128 1712640 ok
31644 1712640 1713152 ok
31645 1713152 1713664 ok
31646 1713664 1714176 ok
31647 1714176 1714688 ok
31648 1714688 1715200 ok
31649 1715200 1715712 ok
31650 1715712 1716224 ok
31651 1716224 1716736 ok
31652 1716736 1717248 ok
31653 1717248 1717760 ok
31654 1717760 1718272 ok
31655 1718272 1718784 ok
31656 1718784 1719296 ok
31657 1719296 1719808 ok
31658 1719808 1720320 ok
31659 1720320 1720832 ok
31660 1720832 1721344 ok
31661 1721344 1721856 ok
31662 1721856 1722368 ok
31663 1722368 1722880 ok
31664 1722880 1723392 ok
31665 1723392 1723904 ok
31666 1723904 1724416 ok
31667 1724416 1724928 ok
31668 1724928 1725440 ok
31669 1725440 1725952 ok
31670 1725952 1726464 ok
31671 1726464 1726976 ok
31672 1726976 1727488 ok
31673 1727488 1728000 ok
31674 1728000 1728512 ok
31675 1728512 1729024 ok
31676 1729024 1729536 ok
31677 1729536 1730048 ok
31678 1730048 1730560 ok
31679 1730560 1731072 ok
31680 1731072 1731584 ok
31681 1731584 1732096 ok
31682 1732096 1732608 ok
31683 1732608 1733120 ok
31684 1733120 1733632 ok
31685 1733632 1734144 ok
31686 1734144 1734656 ok
31687 1734656 1735168 ok
31688 1735168 1735680 ok
31689 1735680 1736192 ok
31690 1736192 1736704 ok
31691 1736704 1737216 ok
31692 1737216 1737728 ok
31693 1737728 1738240 ok
31694 1738240 1738752 ok
31695 1738752 1739264 ok
31696 1739264 1739776 ok
31697 1739776 1740288 ok
31698 1740288 1740800 ok
31699 1740800 1741312 ok
31700 1741312 1741824 ok
31701 1741824 1742336 ok
31702 1742336 1742848 ok
31703 1742848 1743360 ok
31704 1743360 1743872 ok
31705 1743872 1744384 ok
31706 1744384 1744896 ok
31707 1744896 1745408 ok
31708 1745408 1745920 ok
31709 1745920 1746432 ok
31710 1746432 1746944 ok
31711 1746944 1747456 ok
31712 1747456 1747968 ok
31713 1747968 1748480 ok
31714 1748480 1748992 ok
31715 1748992 1749504 ok
31716 1749504 1750016 ok
31717 1750016 1750528 ok
31718 1750528 1751040 ok
31719 1751040 1751552 ok
31720 1751552 1752064 ok
31721 1752064 1752576 ok
31722 1752576 1753088 ok
31723 1753088 1753600 ok
31724 1753600 1754112 ok
31725 1754112 1754624 ok
31726 1754624 1755136 ok
31727 1755136 1755648 ok
31728 1755648 1756160 ok
31729 1756160 1756672 ok
31730 1756672 1757184 ok
31731 1757184 1757696 ok
31732 1757696 1758208 ok
31733 1758208 1758720 ok
31734 1758720 1759232 ok
31735 1759232 1759744 ok
31736 1759744 1760256 ok
31737 1760256 1760768 ok
31738 1760768 1761280 ok
31739 1761280 1761792 ok
31740 1761792 1762304 ok
31741 1762304 1762816 ok
31742 1762816 1763328 ok
31743 1763328 1763840 ok
31744 1763840 1764352 ok
31745 1764352 1764864 ok
31746 1764864 1765376 ok
31747 1765376 1765888 ok
31748 1765888 1766400 ok
31749 1766400 1766912 ok
31750 1766912 1767424 ok
31751 1767424 1767936 ok
31752 1767936 1768448 ok
31753 1768448 1768960 ok
31754 1768960 1769472 ok
31755 1769472 1769984 ok
31756 1769984 1770496 ok
31757 1770496 1771008 ok
31758 1771008 1771520 ok
31759 1771520 1772032 ok
31760 1772032 1772544 ok
31761 1772544 1773056 ok
31762 1773056 1773568 ok
31763 1773568 1774080 ok
31764 1774080 1774592 ok
31765 1774592 1775104 ok
31766 1775104 1775616 ok
31767 1775616 1776128 ok
31768 1776128 1776640 ok
31769 1776640 1777152 ok
31770 1777152 1777664 ok
31771 1777664 1778176 ok
31772 1778176 1778688 ok
31773 1778688 1779200 ok
31774 1779200 1779712 ok
31775 1779712 1780224 ok
31776 1780224 1780736 ok
31777 1780736 1781248 ok
31778 1781248 1781760 ok
31779 1781760 1782272 ok
31780 1782272 1782784 ok
31781 1782784 1783296 ok
31782 1783296 1783808 ok
31783 1783808 1784320 ok
31784 1784320 1784832 ok
31785 1784832 1785344 ok
31786 1785344 1785856 ok
31787 1785856 1786368 ok
31788 1786368 1786880 ok
31789 1786880 1787392 ok
31790 1787392 1787904 ok
31791 1787904 1788416 ok
31792 1788416 1788928 ok
31793 1788928 1789440 ok
31794 1789440 1789952 ok
31795 1789952 1790464 ok
31796 1790464 1790976 ok
31797 1790976 1791488 ok
31798 1791488 1792000 ok
31799 1792000 1792512 ok
31800 1792512 1793024 ok
31801 1793024 1793536 ok
31802 1793536 1794048 ok
31803 1794048 1794560 ok
31804 1794560 1795072 ok
31805 1795072 1795584 ok
31806 1795584 1796096 ok
31807 1796096 1796608 ok
31808 1796608 1797120 ok
31809 1797120 1797632 ok
31810 1797632 1798144 ok
31811 1798144 1798656 ok
31812 1798656 1799168 ok
31813 1799168 1799680 ok
31814 1799680 1800192 ok
31815 1800192 1800704 ok
31816 1800704 1801216 ok
31817 1801216 1801728 ok
31818 1801728 1802240 ok
31819 1802240 1802752 ok
31820 1802752 1803264 ok
31821 1803264 1803776 ok
31822 1803776 1804288 ok
31823 1804288 1804800 ok
31824 1804800 1805312 ok
31825 1805312 1805824 ok
31826 1805824 1806336 ok
31827 1806336 1806848 ok
31828 1806848 1807360 ok
31829 1807360 1807872 ok
31830 1807872 1808384 ok
31831 1808384 1808896 ok
31832 1808896 1809408 ok
31833 1809408 1809920 ok
31834 1809920 1810432 ok
31835 1810432 1810944 ok
31836 1810944 1811456 ok
31837 1811456 1811968 ok
31838 1811968 1812480 ok
31839 1812480 1812992 ok
31840 1812992 1813504 ok
31841 1813504 1814016 ok
31842 1814016 1814528 ok
31843 1814528 1815040 ok
31844 1815040 1815552 ok
31845 1815552 1816064 ok
31846 1816064 1816576 ok
31847 1816576 1817088 ok
31848 1817088 1817600 ok
31849 1817600 1818112 ok
31850 1818112 1818624 ok
31851 1818624 1819136 ok
31852 1819136 1819648 ok
31853 1819648 1820160 ok
31854 1820160 1820672 ok
31855 1820672 1821184 ok
31856 1821184 1821696 ok
31857 1821696 1822208 ok
31858 1822208 1822720 ok
31859 1822720 1823232 ok
31860 1823232 1823744 ok
31861 1823744 1824256 ok
31862 1824256 1824768 ok
31863 1824768 1825280 ok
31864 1825280 1825792 ok
31865 1825792 1826304 ok
31866 1826304 1826816 ok
31867 1826816 1827328 ok
31868 1827328 1827840 ok
31869 1827840 1828352 ok
31870 1828352 1828864 ok
31871 1828864 1829376 ok
31872 1829376 1829888 ok
31873 1829888 1830400 ok
31874 1830400 1830912 ok
31875 1830912 1831424 ok
31876 1831424 1831936 ok
31877 1831936 1832448 ok
31878 1832448 1832960 ok
31879 1832960 1833472 ok
31880 1833472 1833984 ok
31881 1833984 1834496 ok
31882 1834496 1835008 ok
31883 1835008 1835520 ok
31884 1835520 1836032 ok
31885 1836032 1836544 ok
31886 1836544 1837056 ok
31887 1837056 1837568 ok
31888 1837568 1838080 ok
31889 1838080 1838592 ok
31890 1838592 1839104 ok
31891 1839104 1839616 ok
31892 1839616 1840128 ok
31893 1840128 1840640 ok
31894 1840640 1841152 ok
31895 1841152 1841664 ok
31896 1841664 1842176 ok
31897 1842176 1842688 ok
31898 1842688 1843200 ok
31899 1843200 1843712 ok
31900 1843712 1844224 ok
31901 1844224 1844736 ok
31902 1844736 1845248 ok
31903 1845248 1845760 ok
31904 1845760 1846272 ok
31905 1846272 1846784 ok
31906 1846784 1847296 ok
31907 1847296 1847808 ok
31908 1847808 1848320 ok
31909 1848320 1848832 ok
31910 1848832 1849344 ok
31911 1849344 1849856 ok
31912 1849856 1850368 ok
31913 1850368 1850880 ok
31914 1850880 1851392 ok
31915 1851392 1851904 ok
31916 1851904 1852416 ok
31917 1852416 1852928 ok
31918 1852928 1853440 ok
31919 1853440 1853952 ok
31920 1853952 1854464 ok
31921 1854464 1854976 ok
31922 1854976 1855488 ok
31923 1855488 1856000 ok
31924 1856000 1856512 ok
31925 1856512 1857024 ok
31926 1857024 1857536 ok
31927 1857536 1858048 ok
31928 1858048 1858560 ok
31929 1858560 1859072 ok
31930 1859072 1859584 ok
31931 1859584 1860096 ok
31932 1860096 1860608 ok
31933 1860608 1861120 ok
31934 1861120 1861632 ok
31935 1861632 1862144 ok
31936 1862144 1862656 ok
31937 1862656 1863168 ok
31938 1863168 1863680 ok
31939 1863680 1864192 ok
31940 1864192 1864704 ok
31941 1864704 1865216 ok
31942 1865216 1865728 ok
31943 1865728 1866240 ok
31944 1866240 1866752 ok
31945 1866752 1867264 ok
31946 1867264 1867776 ok
31947 1867776 1868288 ok
31948 1868288 1868800 ok
31949 1868800 1869312 ok
31950 1869312 1869824 ok
31951 1869824 1870336 ok
31952 1870336 1870848 ok
31953 1870848 1871360 ok
31954 1871360 1871872 ok
31955 1871872 1872384 ok
31956 1872384 1872896 ok
31957 1872896 1873408 ok
31958 1873408 1873920 ok
31959 1873920 1874432 ok
31960 1874432 1874944 ok
31961 1874944 1875456 ok
31962 1875456 1875968 ok
31963 1875968 1876480 ok
31964 1876480 1876992 ok
31965 1876992 1877504 ok
31966 1877504 1878016 ok
31967 1878016 1878528 ok
31968 1878528 1879040 ok
31969 1879040 1879552 ok
31970 1879552 1880064 ok
31971 1880064 1880576 ok
31972 1880576 1881088 ok
31973 1881088 1881600 ok
31974 1881600 1882112 ok
31975 1882112 1882624 ok
31976 1882624 1883136 ok
31977 1883136 1883648 ok
31978 1883648 1884160 ok
31979 1884160 1884672 ok
31980 1884672 1885184 ok
31981 1885184 1885696 ok
31982 1885696 1886208 ok
31983 1886208 1886720 ok
31984 1886720 1887232 ok
31985 1887232 1887744 ok
31986 1887744 1888256 ok
31987 1888256 1888768 ok
31988 1888768 1889280 ok
31989 1889280 1889792 ok
31990 1889792 1890304 ok
31991 1890304 1890816 ok
31992 1890816 1891328 ok
31993 1891328 1891840 ok
31994 1891840 1892352 ok
31995 1892352 1892864 ok
31996 1892864 1893376 ok
31997 1893376 1893888 ok
31998 1893888 1894400 ok
31999 1894400 1894912 ok
32000 1894912 1895424 ok
32001 1895424 1895936 ok
32002 1895936 1896448 ok
32003 1896448 1896960 ok
32004 1896960 1897472 ok
32005 1897472 1897984 ok
32006 1897984 1898496 ok
32007 1898496 1899008 ok
32008 1899008 1899520 ok
32009 1899520 1900032 ok
32010 1900032 1900544 ok
32011 1900544 1901056 ok
32012 1901056 1901568 ok
32013 1901568 1902080 ok
32014 1902080 1902592 ok
32015 1902592 1903104 ok
32016 1903104 1903616 ok
32017 1903616 1904128 ok
32018 1904128 1904640 ok
32019 1904640 1905152 ok
32020 1905152 1905664 ok
32021 1905664 1906176 ok
32022 1906176 1906688 ok
32023 1906688 1907200 ok
32024 1907200 1907712 ok
32025 1907712 1908224 ok
32026 1908224 1908736 ok
32027 1908736 1909248 ok
32028 1909248 1909760 ok
32029 1909760 1910272 ok
32030 1910272 1910784 ok
32031 1910784 1911296 ok
32032 1911296 1911808 ok
32033 1911808 1912320 ok
32034 1912320 1912832 ok
32035 1912832 1913344 ok
32036 1913344 1913856 ok
32037 1913856 1914368 ok
32038 1914368 1914880 ok
32039 1914880 1915392 ok
32040 1915392 1915904 ok
32041 1915904 1916416 ok
32042 1916416 1916928 ok
32043 1916928 1917440 ok
32044 1917440 1917952 ok
32045 1917952 1918464 ok
32046 1918464 1918976 ok
32047 1918976 1919488 ok
32048 1919488 1920000 ok
32049 1920000 1920512 ok
32050 1920512 1921024 ok
32051 1921024 1921536 ok
32052 1921536 1922048 ok
32053 1922048 1922560 ok
32054 1922560 1923072 ok
32055 1923072 1923584 ok
32056 1923584 1924096 ok
32057 1924096 1924608 ok
32058 1924608 1925120 ok
32059 1925120 1925632 ok
32060 1925632 1926144 ok
32061 1926144 1926656 ok
32062 1926656 1927168 ok
32063 1927168 1927680 ok
32064 1927680 1928192 ok
32065 1928192 1928704 ok
32066 1928704 1929216 ok
32067 1929216 1929728 ok
32068 1929728 1930240 ok
32069 1930240 1930752 ok
32070 1930752 1931264 ok
32071 1931264 1931776 ok
32072 1931776 1932288 ok
32073 1932288 1932800 ok
32074 1932800 1933312 ok
32075 1933312 1933824 ok
32076 1933824 1934336 ok
32077 1934336 1934848 ok
32078 1934848 1935360 ok
32079 1935360 1935872 ok
32080 1935872 1936384 ok
32081 1936384 1936896 ok
32082 1936896 1937408 ok
32083 1937408 1937920 ok
32084 1937920 1938432 ok
32085 1938432 1938944 ok
32086 1938944 1939456 ok
32087 1939456 1939968 ok
32088 1939968 1940480 ok
32089 1940480 1940992 ok
32090 1940992 1941504 ok
32091 1941504 1942016 ok
32092 1942016 1942528 ok
32093 1942528 1943040 ok
32094 1943040 1943552 ok
32095 1943552 1944064 ok
32096 1944064 1944576 ok
32097 1944576 1945088 ok
32098 1945088 1945600 ok
32099 1945600 1946112 ok
32100 1946112 1946624 ok
32101 1946624 1947136 ok
32102 1947136 1947648 ok
32103 1947648 1948160 ok
32104 1948160 1948672 ok
32105 1948672 1949184 ok
32106 1949184 1949696 ok
32107 1949696 1950208 ok
32108 1950208 1950720 ok
32109 1950720 1951232 ok
32110 1951232 1951744 ok
32111 1951744 1952256 ok
32112 1952256 1952768 ok
32113 1952768 1953280 ok
32114 1953280 1953792 ok
32115 1953792 1954304 ok
32116 1954304 1954816 ok
32117 1954816 1955328 ok
32118 1955328 1955840 ok
32119 1955840 1956352 ok
32120 1956352 1956864 ok
32121 1956864 1957376 ok
32122 1957376 1957888 ok
32123 1957888 1958400 ok
32124 1958400 1958912 ok
32125 1958912 1959424 ok
32126 1959424 1959936 ok
32127 1959936 1960448 ok
32128 1960448 1960960 ok
32129 1960960 1961472 ok
32130 1961472 1961984 ok
32131 1961984 1962496 ok
32132 1962496 1963008 ok
32133 1963008 1963520 ok
32134 1963520 1964032 ok
32135 1964032 1964544 ok
32136 1964544 1965056 ok
32137 1965056 1965568 ok
32138 1965568 1966080 ok
32139 Sep 22 23:14:38.718 DEBG IO Write 1027 has deps [JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32140 Sep 22 23:14:38.718 DEBG up_ds_listen was notified
32141 Sep 22 23:14:38.719 DEBG up_ds_listen process 1027
32142 Sep 22 23:14:38.719 DEBG [A] ack job 1027:28, : downstairs
32143 Sep 22 23:14:38.719 DEBG up_ds_listen checked 1 jobs, back to waiting
32144 test test::test_pantry_import_from_url_ovmf ... ok
321452023-09-22T23:14:38.781ZINFOcrucible-pantry (datafile): Scrub at offset 2560/3840 sp:2560
32146 Sep 22 23:14:38.897 DEBG Write :1001 deps:[] res:true
32147 Sep 22 23:14:38.928 DEBG Write :1001 deps:[] res:true
32148 Sep 22 23:14:38.929 WARN returning error on flush!
32149 Sep 22 23:14:38.929 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:false f:1 g:1
32150 Sep 22 23:14:38.960 DEBG Write :1001 deps:[] res:true
32151 Sep 22 23:14:38.961 WARN returning error on flush!
32152 Sep 22 23:14:38.961 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:false f:1 g:1
32153 Sep 22 23:14:38.980 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
32154 Sep 22 23:14:38.980 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
32155 Sep 22 23:14:38.980 DEBG Flush :1002 extent_limit None deps:[JobId(1001), JobId(1000)] res:true f:1 g:1
32156 Sep 22 23:14:38.980 INFO [lossy] sleeping 1 second
32157 Sep 22 23:14:38.980 DEBG IO Flush 1028 has deps [JobId(1027), JobId(1026)]
321582023-09-22T23:14:39.019ZINFOcrucible-pantry (datafile): Scrub at offset 2816/3840 sp:2816
32159 Sep 22 23:14:39.061 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
32160 Sep 22 23:14:39.061 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
32161 Sep 22 23:14:39.061 DEBG Flush :1015 extent_limit None deps:[JobId(1014), JobId(1013)] res:true f:5 g:2
321622023-09-22T23:14:39.245ZINFOcrucible-pantry (datafile): Scrub at offset 3072/3840 sp:3072
32163 Sep 22 23:14:39.315 DEBG IO Write 1029 has deps [JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32164 Sep 22 23:14:39.315 DEBG up_ds_listen was notified
32165 Sep 22 23:14:39.315 DEBG up_ds_listen process 1029
32166 Sep 22 23:14:39.315 DEBG [A] ack job 1029:30, : downstairs
32167 Sep 22 23:14:39.315 DEBG up_ds_listen checked 1 jobs, back to waiting
321682023-09-22T23:14:39.441ZINFOcrucible-pantry (dropshot): request completed latency_us = 294 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = ac8f04b1-d23a-4521-ad73-8c1868967f89 response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/is_finished
321692023-09-22T23:14:39.472ZINFOcrucible-pantry (datafile): Scrub at offset 3328/3840 sp:3328
32170 Sep 22 23:14:39.562 DEBG Flush :1018 extent_limit None deps:[JobId(1017), JobId(1016)] res:true f:6 g:2
32171 Sep 22 23:14:39.562 DEBG Flush :1018 extent_limit None deps:[JobId(1017), JobId(1016)] res:true f:6 g:2
32172 Sep 22 23:14:39.562 DEBG Flush :1018 extent_limit None deps:[JobId(1017), JobId(1016)] res:true f:6 g:2
32173 Sep 22 23:14:39.652 DEBG IO Write 1030 has deps [JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32174 Sep 22 23:14:39.652 DEBG up_ds_listen was notified
32175 Sep 22 23:14:39.652 DEBG up_ds_listen process 1030
32176 Sep 22 23:14:39.653 DEBG [A] ack job 1030:31, : downstairs
32177 Sep 22 23:14:39.653 DEBG up_ds_listen checked 1 jobs, back to waiting
32178 Sep 22 23:14:39.653 DEBG IO Flush 1031 has deps [JobId(1030), JobId(1029), JobId(1028)]
321792023-09-22T23:14:39.698ZINFOcrucible-pantry (datafile): Scrub at offset 3584/3840 sp:3584
321802023-09-22T23:14:39.925ZINFOcrucible-pantry (datafile): Scrub at offset 3840/3840 sp:3840
321812023-09-22T23:14:39.926ZINFOcrucible-pantry (datafile): Scrub fc5b0c26-5d3b-433c-b382-b2a002415eea done in 3 seconds. Retries:0 scrub_size:131072 size:3840 pause_milli:0
32182 Sep 22 23:14:39.929 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019)] res:true f:7 g:2
32183 Sep 22 23:14:39.929 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019)] res:true f:7 g:2
32184 Sep 22 23:14:39.929 DEBG Flush :1021 extent_limit None deps:[JobId(1020), JobId(1019)] res:true f:7 g:2
32185 Sep 22 23:14:39.989 DEBG IO Write 1032 has deps [JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32186 Sep 22 23:14:39.989 DEBG up_ds_listen was notified
32187 Sep 22 23:14:39.989 DEBG up_ds_listen process 1032
32188 Sep 22 23:14:39.989 DEBG [A] ack job 1032:33, : downstairs
32189 Sep 22 23:14:39.989 DEBG up_ds_listen checked 1 jobs, back to waiting
32190 Sep 22 23:14:39.990 INFO [lossy] sleeping 1 second
32191 Sep 22 23:14:40.326 DEBG IO Write 1033 has deps [JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32192 Sep 22 23:14:40.326 DEBG up_ds_listen was notified
32193 Sep 22 23:14:40.326 DEBG up_ds_listen process 1033
32194 Sep 22 23:14:40.327 DEBG [A] ack job 1033:34, : downstairs
32195 Sep 22 23:14:40.327 DEBG up_ds_listen checked 1 jobs, back to waiting
32196 Sep 22 23:14:40.327 DEBG IO Flush 1034 has deps [JobId(1033), JobId(1032), JobId(1031)]
321972023-09-22T23:14:40.473ZINFOcrucible-pantry (dropshot): request completed latency_us = 244 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = 7c4006b9-5718-4f1e-b45c-a8eea8ded2bb response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/is_finished
321982023-09-22T23:14:40.474ZINFOcrucible-pantry (dropshot): request completed latency_us = 175 local_addr = 127.0.0.1:49411 method = GET remote_addr = 127.0.0.1:43906 req_id = 05db0b20-31e1-4c60-9e9c-a667babfa6a4 response_code = 200 uri = /crucible/pantry/0/job/9582738c-4762-4d3d-9423-661cc66a6ac0/ok
321992023-09-22T23:14:40.474ZINFOcrucible-pantry (datafile): detach removing entry for volume fc5b0c26-5d3b-433c-b382-b2a002415eea
322002023-09-22T23:14:40.474ZINFOcrucible-pantry (datafile): detaching volume fc5b0c26-5d3b-433c-b382-b2a002415eea
32201 Sep 22 23:14:40.475 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
32202 Sep 22 23:14:40.475 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
32203 Sep 22 23:14:40.475 DEBG Flush :1022 extent_limit None deps:[] res:true f:8 g:2
322042023-09-22T23:14:40.476ZINFOcrucible-pantry (datafile): Request to deactivate this guest
322052023-09-22T23:14:40.476ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 set deactivating.
322062023-09-22T23:14:40.478ZINFOcrucible-pantry (dropshot): request completed latency_us = 3422 local_addr = 127.0.0.1:49411 method = DELETE remote_addr = 127.0.0.1:43906 req_id = 96e59cb2-2739-4ad0-9fb4-f89347e35f8b response_code = 204 uri = /crucible/pantry/0/volume/fc5b0c26-5d3b-433c-b382-b2a002415eea
322072023-09-22T23:14:40.478ZINFOcrucible-pantry: Upstairs starts
322082023-09-22T23:14:40.478ZINFOcrucible-pantry: Crucible Version: BuildInfo { version: "0.0.1", git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46", git_commit_timestamp: "2023-09-22T22:51:18.000000000Z", git_branch: "main", rustc_semver: "1.70.0", rustc_channel: "stable", rustc_host_triple: "x86_64-unknown-illumos", rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca", cargo_triple: "x86_64-unknown-illumos", debug: true, opt_level: 0, }
322092023-09-22T23:14:40.478ZINFOcrucible-pantry: Upstairs <-> Downstairs Message Version: 4
322102023-09-22T23:14:40.478ZINFOcrucible-pantry: Crucible stats registered with UUID: 521f2004-29de-4d41-9494-447a49569d29
322112023-09-22T23:14:40.478ZINFOcrucible-pantry: Crucible 521f2004-29de-4d41-9494-447a49569d29 has session id: b89f643a-f10d-4978-b118-fac458329f63
322122023-09-22T23:14:40.478ZINFOcrucible-pantry: [0] connecting to 127.0.0.1:63639 looper = 0
322132023-09-22T23:14:40.478ZINFOcrucible-pantry: [1] connecting to 127.0.0.1:43949 looper = 1
322142023-09-22T23:14:40.479ZINFOcrucible-pantry: [2] connecting to 127.0.0.1:63175 looper = 2
322152023-09-22T23:14:40.479ZINFOcrucible-pantry: up_listen starts task = up_listen
322162023-09-22T23:14:40.479ZINFOcrucible-pantry: Wait for all three downstairs to come online
322172023-09-22T23:14:40.479ZINFOcrucible-pantry: Flush timeout: 0.5
322182023-09-22T23:14:40.479ZINFOcrucible-pantry: [0] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 0
322192023-09-22T23:14:40.479ZINFOcrucible-pantry: [0] Proc runs for 127.0.0.1:63639 in state New
322202023-09-22T23:14:40.479ZINFOcrucible-pantry: [1] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 1
322212023-09-22T23:14:40.479ZINFOcrucible-pantry: [1] Proc runs for 127.0.0.1:43949 in state New
322222023-09-22T23:14:40.479ZINFOcrucible-pantry: [2] 521f2004-29de-4d41-9494-447a49569d29 looper connected looper = 2
322232023-09-22T23:14:40.479ZINFOcrucible-pantry: [2] Proc runs for 127.0.0.1:63175 in state New
32224 Sep 22 23:14:40.479 INFO accepted connection from 127.0.0.1:51284, task: main
32225 Sep 22 23:14:40.479 INFO accepted connection from 127.0.0.1:54524, task: main
32226 Sep 22 23:14:40.479 INFO accepted connection from 127.0.0.1:49615, task: main
32227 Sep 22 23:14:40.479 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
32228 Sep 22 23:14:40.479 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } connected, version 4, task: proc
32229 Sep 22 23:14:40.479 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
32230 Sep 22 23:14:40.479 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } connected, version 4, task: proc
32231 Sep 22 23:14:40.479 INFO Connection request from 521f2004-29de-4d41-9494-447a49569d29 with version 4, task: proc
32232 Sep 22 23:14:40.479 INFO upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } connected, version 4, task: proc
322332023-09-22T23:14:40.480ZINFOcrucible-pantry: [0] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) New New New ds_transition to WaitActive
322342023-09-22T23:14:40.480ZINFOcrucible-pantry: [0] Transition from New to WaitActive
322352023-09-22T23:14:40.480ZINFOcrucible-pantry: [1] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) WaitActive New New ds_transition to WaitActive
322362023-09-22T23:14:40.480ZINFOcrucible-pantry: [1] Transition from New to WaitActive
322372023-09-22T23:14:40.480ZINFOcrucible-pantry: [2] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) WaitActive WaitActive New ds_transition to WaitActive
322382023-09-22T23:14:40.480ZINFOcrucible-pantry: [2] Transition from New to WaitActive
32239 The guest has requested activation
322402023-09-22T23:14:40.480ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 active request set
322412023-09-22T23:14:40.480ZINFOcrucible-pantry: [0] received activate with gen 3
322422023-09-22T23:14:40.480ZINFOcrucible-pantry: [0] client got ds_active_rx, promote! session 92660512-15df-4806-a057-1d8e47122931
322432023-09-22T23:14:40.480ZINFOcrucible-pantry: [1] received activate with gen 3
322442023-09-22T23:14:40.480ZINFOcrucible-pantry: [1] client got ds_active_rx, promote! session 92660512-15df-4806-a057-1d8e47122931
322452023-09-22T23:14:40.480ZINFOcrucible-pantry: [2] received activate with gen 3
322462023-09-22T23:14:40.480ZINFOcrucible-pantry: [2] client got ds_active_rx, promote! session 92660512-15df-4806-a057-1d8e47122931
32247 Sep 22 23:14:40.480 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 }
32248 Sep 22 23:14:40.480 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is being promoted (read-write)
32249 Sep 22 23:14:40.480 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 }
32250 Sep 22 23:14:40.480 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is being promoted (read-write)
32251 Sep 22 23:14:40.481 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 }
32252 Sep 22 23:14:40.481 WARN Signaling to UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 } thread that UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is being promoted (read-write)
32253 Sep 22 23:14:40.481 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }, task: main
32254 Sep 22 23:14:40.481 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is now active (read-write)
32255 Sep 22 23:14:40.481 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }, task: main
32256 Sep 22 23:14:40.481 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is now active (read-write)
32257 Sep 22 23:14:40.481 WARN Another upstairs UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc, gen: 2 }, task: main
32258 Sep 22 23:14:40.481 INFO UpstairsConnection { upstairs_id: 521f2004-29de-4d41-9494-447a49569d29, session_id: 92660512-15df-4806-a057-1d8e47122931, gen: 3 } is now active (read-write)
32259 Sep 22 23:14:40.481 INFO connection (127.0.0.1:33714): all done
32260 Sep 22 23:14:40.481 INFO connection (127.0.0.1:36045): all done
32261 Sep 22 23:14:40.481 INFO connection (127.0.0.1:62745): all done
322622023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 92660512-15df-4806-a057-1d8e47122931 3
322632023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) Active Active Active ds_transition to Disabled
322642023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] Transition from Active to Disabled
322652023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 set inactive, session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
322662023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): 127.0.0.1:63639: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 0
322672023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
322682023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:63639 closed looper = 0
322692023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 92660512-15df-4806-a057-1d8e47122931 3
322702023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) Disconnected Active Active ds_transition to Disabled
322712023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] Transition from Active to Disabled
322722023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 set inactive, session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
322732023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): 127.0.0.1:43949: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 1
322742023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
322752023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:43949 closed looper = 1
322762023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) cmd_loop saw YouAreNoLongerActive 521f2004-29de-4d41-9494-447a49569d29 92660512-15df-4806-a057-1d8e47122931 3
322772023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 (1e7e774e-f305-4cf5-a9d4-7b0b96e564fc) Disconnected Disconnected Active ds_transition to Disabled
322782023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] Transition from Active to Disabled
322792023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 set inactive, session 1e7e774e-f305-4cf5-a9d4-7b0b96e564fc
322802023-09-22T23:14:40.482ZERROcrucible-pantry (datafile): 127.0.0.1:63175: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 3 than ours 2 looper = 2
322812023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 Gone missing, transition from Disabled to Disconnected
322822023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] 521f2004-29de-4d41-9494-447a49569d29 connection to 127.0.0.1:63175 closed looper = 2
322832023-09-22T23:14:40.482ZWARNcrucible-pantry (datafile): [0] pm_task rx.recv() is None
322842023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63639 task reports connection:false
322852023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
322862023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [0] 127.0.0.1:63639 task reports offline
322872023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:43949 task reports connection:false
322882023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
322892023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [1] 127.0.0.1:43949 task reports offline
322902023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:63175 task reports connection:false
322912023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): 521f2004-29de-4d41-9494-447a49569d29 Disconnected Disconnected Disconnected
322922023-09-22T23:14:40.482ZINFOcrucible-pantry (datafile): [2] 127.0.0.1:63175 task reports offline
322932023-09-22T23:14:40.482ZWARNcrucible-pantry (datafile): [1] pm_task rx.recv() is None
322942023-09-22T23:14:40.482ZWARNcrucible-pantry (datafile): [2] pm_task rx.recv() is None
322952023-09-22T23:14:40.483ZINFOcrucible-pantry: [0] downstairs client at 127.0.0.1:63639 has UUID bafba53d-f7d4-4132-95f6-92b6a45487a9
322962023-09-22T23:14:40.483ZINFOcrucible-pantry: [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: bafba53d-f7d4-4132-95f6-92b6a45487a9, encrypted: true, database_read_version: 1, database_write_version: 1 }
322972023-09-22T23:14:40.483ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
322982023-09-22T23:14:40.483ZINFOcrucible-pantry: [1] downstairs client at 127.0.0.1:43949 has UUID 1e7a075d-e51e-4361-bdf9-32f916f89647
322992023-09-22T23:14:40.483ZINFOcrucible-pantry: [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1e7a075d-e51e-4361-bdf9-32f916f89647, encrypted: true, database_read_version: 1, database_write_version: 1 }
323002023-09-22T23:14:40.483ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
323012023-09-22T23:14:40.483ZINFOcrucible-pantry: [2] downstairs client at 127.0.0.1:63175 has UUID b89ddae9-a405-4d4a-865d-e95f1fd0a715
323022023-09-22T23:14:40.483ZINFOcrucible-pantry: [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: b89ddae9-a405-4d4a-865d-e95f1fd0a715, encrypted: true, database_read_version: 1, database_write_version: 1 }
323032023-09-22T23:14:40.483ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 WaitActive WaitActive WaitActive
32304 Sep 22 23:14:40.490 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
32305 Sep 22 23:14:40.491 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
32306 Sep 22 23:14:40.493 INFO Current flush_numbers [0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
32307 Sep 22 23:14:40.505 INFO Downstairs has completed Negotiation, task: proc
32308 Sep 22 23:14:40.506 INFO Downstairs has completed Negotiation, task: proc
32309 Sep 22 23:14:40.507 INFO Downstairs has completed Negotiation, task: proc
323102023-09-22T23:14:40.507ZINFOcrucible-pantry: [0] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
323112023-09-22T23:14:40.507ZINFOcrucible-pantry: [0] Transition from WaitActive to WaitQuorum
323122023-09-22T23:14:40.507ZWARNcrucible-pantry: [0] new RM replaced this: None
323132023-09-22T23:14:40.507ZINFOcrucible-pantry: [0] Starts reconcile loop
323142023-09-22T23:14:40.507ZINFOcrucible-pantry: [1] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
323152023-09-22T23:14:40.507ZINFOcrucible-pantry: [1] Transition from WaitActive to WaitQuorum
323162023-09-22T23:14:40.507ZWARNcrucible-pantry: [1] new RM replaced this: None
323172023-09-22T23:14:40.508ZINFOcrucible-pantry: [1] Starts reconcile loop
323182023-09-22T23:14:40.508ZINFOcrucible-pantry: [2] 521f2004-29de-4d41-9494-447a49569d29 (92660512-15df-4806-a057-1d8e47122931) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
323192023-09-22T23:14:40.508ZINFOcrucible-pantry: [2] Transition from WaitActive to WaitQuorum
323202023-09-22T23:14:40.508ZWARNcrucible-pantry: [2] new RM replaced this: None
323212023-09-22T23:14:40.508ZINFOcrucible-pantry: [2] Starts reconcile loop
323222023-09-22T23:14:40.508ZINFOcrucible-pantry: [0] 127.0.0.1:63639 task reports connection:true
323232023-09-22T23:14:40.508ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 WaitQuorum WaitQuorum WaitQuorum
323242023-09-22T23:14:40.508ZINFOcrucible-pantry: [0]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
323252023-09-22T23:14:40.508ZINFOcrucible-pantry: [0]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
323262023-09-22T23:14:40.508ZINFOcrucible-pantry: [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
323272023-09-22T23:14:40.508ZINFOcrucible-pantry: [1]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
323282023-09-22T23:14:40.508ZINFOcrucible-pantry: [1]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
323292023-09-22T23:14:40.508ZINFOcrucible-pantry: [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
323302023-09-22T23:14:40.508ZINFOcrucible-pantry: [2]R flush_numbers[0..12]: [1, 2, 3, 4, 5, 6, 7, 7, 0, 0, 0, 0]
323312023-09-22T23:14:40.508ZINFOcrucible-pantry: [2]R generation[0..12]: [2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0]
323322023-09-22T23:14:40.508ZINFOcrucible-pantry: [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
323332023-09-22T23:14:40.508ZINFOcrucible-pantry: Max found gen is 3
323342023-09-22T23:14:40.508ZINFOcrucible-pantry: Generation requested: 3 >= found:3
323352023-09-22T23:14:40.508ZINFOcrucible-pantry: Next flush: 8
323362023-09-22T23:14:40.508ZINFOcrucible-pantry: All extents match
32337 The guest has finished waiting for activation
323382023-09-22T23:14:40.508ZINFOcrucible-pantry: No downstairs repair required
323392023-09-22T23:14:40.508ZINFOcrucible-pantry: No initial repair work was required
323402023-09-22T23:14:40.508ZINFOcrucible-pantry: Set Downstairs and Upstairs active
323412023-09-22T23:14:40.508ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 is now active with session: 92660512-15df-4806-a057-1d8e47122931
323422023-09-22T23:14:40.508ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 Set Active after no repair
323432023-09-22T23:14:40.508ZINFOcrucible-pantry: Notify all downstairs, region set compare is done.
323442023-09-22T23:14:40.508ZINFOcrucible-pantry: Set check for repair
323452023-09-22T23:14:40.508ZINFOcrucible-pantry: [1] 127.0.0.1:43949 task reports connection:true
323462023-09-22T23:14:40.508ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
323472023-09-22T23:14:40.508ZINFOcrucible-pantry: Set check for repair
323482023-09-22T23:14:40.508ZINFOcrucible-pantry: [2] 127.0.0.1:63175 task reports connection:true
323492023-09-22T23:14:40.508ZINFOcrucible-pantry: 521f2004-29de-4d41-9494-447a49569d29 Active Active Active
323502023-09-22T23:14:40.508ZINFOcrucible-pantry: Set check for repair
323512023-09-22T23:14:40.508ZINFOcrucible-pantry: [0] received reconcile message
323522023-09-22T23:14:40.508ZINFOcrucible-pantry: [0] All repairs completed, exit
323532023-09-22T23:14:40.508ZINFOcrucible-pantry: [0] Starts cmd_loop
323542023-09-22T23:14:40.508ZINFOcrucible-pantry: [1] received reconcile message
323552023-09-22T23:14:40.508ZINFOcrucible-pantry: [1] All repairs completed, exit
323562023-09-22T23:14:40.508ZINFOcrucible-pantry: [1] Starts cmd_loop
323572023-09-22T23:14:40.508ZINFOcrucible-pantry: [2] received reconcile message
323582023-09-22T23:14:40.509ZINFOcrucible-pantry: [2] All repairs completed, exit
323592023-09-22T23:14:40.509ZINFOcrucible-pantry: [2] Starts cmd_loop
32360 Sep 22 23:14:40.548 DEBG Read :1000 deps:[] res:true
32361 Sep 22 23:14:40.606 DEBG Read :1000 deps:[] res:true
32362 Sep 22 23:14:40.664 DEBG Read :1000 deps:[] res:true
32363 Sep 22 23:14:40.666 DEBG IO Write 1035 has deps [JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32364 Sep 22 23:14:40.666 DEBG up_ds_listen was notified
32365 Sep 22 23:14:40.666 DEBG up_ds_listen process 1035
32366 Sep 22 23:14:40.666 DEBG [A] ack job 1035:36, : downstairs
32367 Sep 22 23:14:40.666 DEBG up_ds_listen checked 1 jobs, back to waiting
32368 Sep 22 23:14:41.006 DEBG IO Write 1036 has deps [JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32369 Sep 22 23:14:41.006 DEBG up_ds_listen was notified
32370 Sep 22 23:14:41.006 DEBG up_ds_listen process 1036
32371 Sep 22 23:14:41.007 DEBG [A] ack job 1036:37, : downstairs
32372 Sep 22 23:14:41.007 DEBG up_ds_listen checked 1 jobs, back to waiting
32373 Sep 22 23:14:41.007 DEBG IO Flush 1037 has deps [JobId(1036), JobId(1035), JobId(1034)]
32374 Sep 22 23:14:41.349 DEBG IO Write 1038 has deps [JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32375 Sep 22 23:14:41.349 DEBG up_ds_listen was notified
32376 Sep 22 23:14:41.350 DEBG up_ds_listen process 1038
32377 Sep 22 23:14:41.350 DEBG [A] ack job 1038:39, : downstairs
32378 Sep 22 23:14:41.350 DEBG up_ds_listen checked 1 jobs, back to waiting
32379 Sep 22 23:14:41.692 DEBG IO Write 1039 has deps [JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32380 Sep 22 23:14:41.692 DEBG up_ds_listen was notified
32381 Sep 22 23:14:41.692 DEBG up_ds_listen process 1039
32382 Sep 22 23:14:41.692 DEBG [A] ack job 1039:40, : downstairs
32383 Sep 22 23:14:41.692 DEBG up_ds_listen checked 1 jobs, back to waiting
32384 Sep 22 23:14:41.693 DEBG IO Flush 1040 has deps [JobId(1039), JobId(1038), JobId(1037)]
32385 Sep 22 23:14:42.098 DEBG IO Write 1041 has deps [JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32386 Sep 22 23:14:42.099 DEBG up_ds_listen was notified
32387 Sep 22 23:14:42.099 DEBG up_ds_listen process 1041
32388 Sep 22 23:14:42.099 DEBG [A] ack job 1041:42, : downstairs
32389 Sep 22 23:14:42.099 DEBG up_ds_listen checked 1 jobs, back to waiting
32390 Sep 22 23:14:42.257 INFO [lossy] skipping 1003
32391 Sep 22 23:14:42.288 DEBG Write :1003 deps:[JobId(1002)] res:true
32392 Sep 22 23:14:42.319 DEBG Write :1003 deps:[JobId(1002)] res:true
32393 Sep 22 23:14:42.350 DEBG Write :1003 deps:[JobId(1002)] res:true
32394 Sep 22 23:14:42.351 DEBG IO Flush 1042 has deps [JobId(1041), JobId(1040)]
32395 Sep 22 23:14:42.685 DEBG IO Write 1043 has deps [JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32396 Sep 22 23:14:42.686 DEBG up_ds_listen was notified
32397 Sep 22 23:14:42.686 DEBG up_ds_listen process 1043
32398 Sep 22 23:14:42.686 DEBG [A] ack job 1043:44, : downstairs
32399 Sep 22 23:14:42.686 DEBG up_ds_listen checked 1 jobs, back to waiting
32400 Sep 22 23:14:43.023 DEBG IO Write 1044 has deps [JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32401 Sep 22 23:14:43.024 DEBG up_ds_listen was notified
32402 Sep 22 23:14:43.024 DEBG up_ds_listen process 1044
32403 Sep 22 23:14:43.024 DEBG [A] ack job 1044:45, : downstairs
32404 Sep 22 23:14:43.024 DEBG up_ds_listen checked 1 jobs, back to waiting
32405 Sep 22 23:14:43.024 DEBG IO Flush 1045 has deps [JobId(1044), JobId(1043), JobId(1042)]
32406 test test::test_pantry_scrub ... ok
32407 Sep 22 23:14:43.360 DEBG IO Write 1046 has deps [JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32408 Sep 22 23:14:43.360 DEBG up_ds_listen was notified
32409 Sep 22 23:14:43.361 DEBG up_ds_listen process 1046
32410 Sep 22 23:14:43.361 DEBG [A] ack job 1046:47, : downstairs
32411 Sep 22 23:14:43.361 DEBG up_ds_listen checked 1 jobs, back to waiting
32412 Sep 22 23:14:43.695 DEBG IO Write 1047 has deps [JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32413 Sep 22 23:14:43.696 DEBG up_ds_listen was notified
32414 Sep 22 23:14:43.696 DEBG up_ds_listen process 1047
32415 Sep 22 23:14:43.696 DEBG [A] ack job 1047:48, : downstairs
32416 Sep 22 23:14:43.696 DEBG up_ds_listen checked 1 jobs, back to waiting
32417 Sep 22 23:14:43.696 DEBG IO Flush 1048 has deps [JobId(1047), JobId(1046), JobId(1045)]
32418 Sep 22 23:14:44.031 DEBG IO Write 1049 has deps [JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32419 Sep 22 23:14:44.031 DEBG up_ds_listen was notified
32420 Sep 22 23:14:44.031 DEBG up_ds_listen process 1049
32421 Sep 22 23:14:44.031 DEBG [A] ack job 1049:50, : downstairs
32422 Sep 22 23:14:44.031 DEBG up_ds_listen checked 1 jobs, back to waiting
32423 Sep 22 23:14:44.368 DEBG IO Write 1050 has deps [JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32424 Sep 22 23:14:44.368 DEBG up_ds_listen was notified
32425 Sep 22 23:14:44.368 DEBG up_ds_listen process 1050
32426 Sep 22 23:14:44.368 DEBG [A] ack job 1050:51, : downstairs
32427 Sep 22 23:14:44.368 DEBG up_ds_listen checked 1 jobs, back to waiting
32428 Sep 22 23:14:44.369 DEBG IO Flush 1051 has deps [JobId(1050), JobId(1049), JobId(1048)]
32429 Sep 22 23:14:44.705 DEBG IO Write 1052 has deps [JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32430 Sep 22 23:14:44.705 DEBG up_ds_listen was notified
32431 Sep 22 23:14:44.705 DEBG up_ds_listen process 1052
32432 Sep 22 23:14:44.705 DEBG [A] ack job 1052:53, : downstairs
32433 Sep 22 23:14:44.705 DEBG up_ds_listen checked 1 jobs, back to waiting
32434 Sep 22 23:14:45.042 DEBG IO Write 1053 has deps [JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32435 Sep 22 23:14:45.042 DEBG up_ds_listen was notified
32436 Sep 22 23:14:45.042 DEBG up_ds_listen process 1053
32437 Sep 22 23:14:45.043 DEBG [A] ack job 1053:54, : downstairs
32438 Sep 22 23:14:45.043 DEBG up_ds_listen checked 1 jobs, back to waiting
32439 Sep 22 23:14:45.043 DEBG IO Flush 1054 has deps [JobId(1053), JobId(1052), JobId(1051)]
32440 Sep 22 23:14:45.043 INFO [lossy] sleeping 1 second
32441 Sep 22 23:14:45.043 INFO [lossy] sleeping 1 second
32442 Sep 22 23:14:45.043 INFO [lossy] sleeping 1 second
32443 Sep 22 23:14:45.380 DEBG IO Write 1055 has deps [JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32444 Sep 22 23:14:45.380 DEBG up_ds_listen was notified
32445 Sep 22 23:14:45.380 DEBG up_ds_listen process 1055
32446 Sep 22 23:14:45.380 DEBG [A] ack job 1055:56, : downstairs
32447 Sep 22 23:14:45.380 DEBG up_ds_listen checked 1 jobs, back to waiting
32448 Sep 22 23:14:45.779 DEBG IO Write 1056 has deps [JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32449 Sep 22 23:14:45.779 DEBG up_ds_listen was notified
32450 Sep 22 23:14:45.779 DEBG up_ds_listen process 1056
32451 Sep 22 23:14:45.780 DEBG [A] ack job 1056:57, : downstairs
32452 Sep 22 23:14:45.780 DEBG up_ds_listen checked 1 jobs, back to waiting
32453 Sep 22 23:14:45.937 DEBG IO Flush 1057 has deps [JobId(1056), JobId(1055), JobId(1054)]
32454 Sep 22 23:14:46.275 DEBG IO Write 1058 has deps [JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32455 Sep 22 23:14:46.275 DEBG up_ds_listen was notified
32456 Sep 22 23:14:46.275 DEBG up_ds_listen process 1058
32457 Sep 22 23:14:46.275 DEBG [A] ack job 1058:59, : downstairs
32458 Sep 22 23:14:46.275 DEBG up_ds_listen checked 1 jobs, back to waiting
32459 Sep 22 23:14:46.308 DEBG Write :1004 deps:[JobId(1002)] res:true
32460 Sep 22 23:14:46.339 DEBG Write :1004 deps:[JobId(1002)] res:true
32461 Sep 22 23:14:46.340 WARN returning error on flush!
32462 Sep 22 23:14:46.340 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003), JobId(1002)] res:false f:2 g:1
32463 Sep 22 23:14:46.340 WARN returning error on flush!
32464 Sep 22 23:14:46.340 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003), JobId(1002)] res:false f:2 g:1
32465 Sep 22 23:14:46.340 INFO [lossy] skipping 1005
32466 Sep 22 23:14:46.370 DEBG Write :1004 deps:[JobId(1002)] res:true
32467 Sep 22 23:14:46.391 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003), JobId(1002)] res:true f:2 g:1
32468 Sep 22 23:14:46.391 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003), JobId(1002)] res:true f:2 g:1
32469 Sep 22 23:14:46.391 DEBG Flush :1005 extent_limit None deps:[JobId(1004), JobId(1003), JobId(1002)] res:true f:2 g:1
32470 Sep 22 23:14:46.724 DEBG IO Write 1059 has deps [JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32471 Sep 22 23:14:46.725 DEBG up_ds_listen was notified
32472 Sep 22 23:14:46.725 DEBG up_ds_listen process 1059
32473 Sep 22 23:14:46.725 DEBG [A] ack job 1059:60, : downstairs
32474 Sep 22 23:14:46.725 DEBG up_ds_listen checked 1 jobs, back to waiting
32475 Sep 22 23:14:46.725 DEBG IO Flush 1060 has deps [JobId(1059), JobId(1058), JobId(1057)]
32476 Sep 22 23:14:47.063 DEBG IO Write 1061 has deps [JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32477 Sep 22 23:14:47.063 DEBG up_ds_listen was notified
32478 Sep 22 23:14:47.063 DEBG up_ds_listen process 1061
32479 Sep 22 23:14:47.063 DEBG [A] ack job 1061:62, : downstairs
32480 Sep 22 23:14:47.063 DEBG up_ds_listen checked 1 jobs, back to waiting
32481 Sep 22 23:14:47.401 DEBG IO Write 1062 has deps [JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32482 Sep 22 23:14:47.401 DEBG up_ds_listen was notified
32483 Sep 22 23:14:47.401 DEBG up_ds_listen process 1062
32484 Sep 22 23:14:47.401 DEBG [A] ack job 1062:63, : downstairs
32485 Sep 22 23:14:47.401 DEBG up_ds_listen checked 1 jobs, back to waiting
32486 Sep 22 23:14:47.402 DEBG IO Flush 1063 has deps [JobId(1062), JobId(1061), JobId(1060)]
32487 Sep 22 23:14:47.743 DEBG IO Write 1064 has deps [JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32488 Sep 22 23:14:47.743 DEBG up_ds_listen was notified
32489 Sep 22 23:14:47.743 DEBG up_ds_listen process 1064
32490 Sep 22 23:14:47.743 DEBG [A] ack job 1064:65, : downstairs
32491 Sep 22 23:14:47.743 DEBG up_ds_listen checked 1 jobs, back to waiting
32492 Sep 22 23:14:47.817 DEBG IO Write 1000 has deps []
32493 Sep 22 23:14:47.817 INFO Checking if live repair is needed
32494 Sep 22 23:14:47.817 INFO No Live Repair required at this time
32495 Sep 22 23:14:47.817 DEBG up_ds_listen was notified
32496 Sep 22 23:14:47.817 DEBG up_ds_listen process 1000
32497 Sep 22 23:14:47.821 DEBG [A] ack job 1000:1, : downstairs
32498 Sep 22 23:14:47.821 DEBG up_ds_listen checked 1 jobs, back to waiting
32499 Sep 22 23:14:48.087 DEBG IO Write 1065 has deps [JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32500 Sep 22 23:14:48.087 DEBG up_ds_listen was notified
32501 Sep 22 23:14:48.087 DEBG up_ds_listen process 1065
32502 Sep 22 23:14:48.087 DEBG [A] ack job 1065:66, : downstairs
32503 Sep 22 23:14:48.087 DEBG up_ds_listen checked 1 jobs, back to waiting
32504 Sep 22 23:14:48.088 DEBG IO Flush 1066 has deps [JobId(1065), JobId(1064), JobId(1063)]
32505 Sep 22 23:14:48.429 DEBG IO Write 1067 has deps [JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32506 Sep 22 23:14:48.429 DEBG up_ds_listen was notified
32507 Sep 22 23:14:48.429 DEBG up_ds_listen process 1067
32508 Sep 22 23:14:48.429 DEBG [A] ack job 1067:68, : downstairs
32509 Sep 22 23:14:48.429 DEBG up_ds_listen checked 1 jobs, back to waiting
32510 Sep 22 23:14:48.769 DEBG IO Write 1068 has deps [JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32511 Sep 22 23:14:48.769 DEBG up_ds_listen was notified
32512 Sep 22 23:14:48.769 DEBG up_ds_listen process 1068
32513 Sep 22 23:14:48.769 DEBG [A] ack job 1068:69, : downstairs
32514 Sep 22 23:14:48.769 DEBG up_ds_listen checked 1 jobs, back to waiting
32515 Sep 22 23:14:48.770 DEBG IO Flush 1069 has deps [JobId(1068), JobId(1067), JobId(1066)]
32516 Sep 22 23:14:49.171 DEBG IO Write 1070 has deps [JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)]
32517 Sep 22 23:14:49.171 DEBG up_ds_listen was notified
32518 Sep 22 23:14:49.171 DEBG up_ds_listen process 1070
32519 Sep 22 23:14:49.171 DEBG [A] ack job 1070:71, : downstairs
32520 Sep 22 23:14:49.171 DEBG up_ds_listen checked 1 jobs, back to waiting
32521 Sep 22 23:14:49.356 DEBG Write :1006 deps:[JobId(1005), JobId(1002)] res:true
32522 Sep 22 23:14:49.358 WARN returning error on write!
32523 Sep 22 23:14:49.358 DEBG Write :1006 deps:[JobId(1005), JobId(1002)] res:false
32524 Sep 22 23:14:49.358 INFO [lossy] skipping 1006
32525 Sep 22 23:14:49.359 WARN returning error on write!
32526 Sep 22 23:14:49.359 DEBG Write :1006 deps:[JobId(1005), JobId(1002)] res:false
32527 Sep 22 23:14:49.389 DEBG Write :1006 deps:[JobId(1005), JobId(1002)] res:true
32528 Sep 22 23:14:49.390 INFO [lossy] sleeping 1 second
32529 Sep 22 23:14:49.390 DEBG IO Flush 1071 has deps [JobId(1070), JobId(1069)]
32530 Sep 22 23:14:49.391 DEBG IO Read 1072 has deps [JobId(1071)]
32531 Sep 22 23:14:49.601 INFO [lossy] sleeping 1 second
32532 Sep 22 23:14:49.632 DEBG Write :1007 deps:[JobId(1005), JobId(1002)] res:true
32533 Sep 22 23:14:49.633 INFO [lossy] skipping 1008
32534 Sep 22 23:14:49.633 INFO [lossy] skipping 1008
32535 Sep 22 23:14:49.640 DEBG Flush :1008 extent_limit None deps:[JobId(1007), JobId(1006), JobId(1005)] res:true f:3 g:1
32536 Sep 22 23:14:49.857 INFO [lossy] skipping 1009
32537 Sep 22 23:14:49.888 DEBG Write :1009 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32538 Sep 22 23:14:49.891 DEBG IO Flush 1073 has deps [JobId(1072), JobId(1071)]
32539 Sep 22 23:14:50.107 INFO [lossy] sleeping 1 second
32540 Sep 22 23:14:50.174 ERRO [0] job id 1000 saw error GenericError("test error")
32541 Sep 22 23:14:50.174 ERRO [0] job id 1002 saw error GenericError("test error")
32542 Sep 22 23:14:50.174 ERRO [2] job id 1002 saw error GenericError("test error")
32543 Sep 22 23:14:50.174 ERRO [2] job id 1005 saw error GenericError("test error")
32544 Sep 22 23:14:50.174 ERRO [2] job id 1005 saw error GenericError("test error")
32545 Sep 22 23:14:50.174 ERRO [2] job id 1006 saw error GenericError("test error")
32546 Sep 22 23:14:50.174 ERRO [2] job id 1006 saw error GenericError("test error")
32547 Sep 22 23:14:50.174 DEBG up_ds_listen was notified
32548 Sep 22 23:14:50.174 DEBG up_ds_listen process 1002
32549 Sep 22 23:14:50.174 DEBG [A] ack job 1002:3, : downstairs
32550 Sep 22 23:14:50.177 DEBG [rc] retire 1002 clears [JobId(1000), JobId(1001), JobId(1002)], : downstairs
32551 Sep 22 23:14:50.177 DEBG up_ds_listen process 1005
32552 Sep 22 23:14:50.177 DEBG [A] ack job 1005:6, : downstairs
32553 Sep 22 23:14:50.181 DEBG [rc] retire 1005 clears [JobId(1003), JobId(1004), JobId(1005)], : downstairs
32554 Sep 22 23:14:50.181 DEBG up_ds_listen checked 2 jobs, back to waiting
32555 Sep 22 23:14:50.181 DEBG up_ds_listen was notified
32556 Sep 22 23:14:50.181 DEBG up_ds_listen checked 0 jobs, back to waiting
32557 Sep 22 23:14:50.575 DEBG Write :1006 deps:[JobId(1005), JobId(1002)] res:true
32558 Sep 22 23:14:50.576 INFO [lossy] skipping 1007
32559 Sep 22 23:14:50.576 INFO [lossy] skipping 1008
32560 Sep 22 23:14:50.576 INFO [lossy] skipping 1009
32561 Sep 22 23:14:50.576 INFO [lossy] skipping 1010
32562 Sep 22 23:14:50.577 WARN returning error on write!
32563 Sep 22 23:14:50.577 DEBG Write :1007 deps:[JobId(1005), JobId(1002)] res:false
32564 Sep 22 23:14:50.577 INFO [lossy] skipping 1008
32565 Sep 22 23:14:50.577 INFO [lossy] skipping 1009
32566 Sep 22 23:14:50.578 WARN returning error on write!
32567 Sep 22 23:14:50.578 DEBG Write :1007 deps:[JobId(1005), JobId(1002)] res:false
32568 Sep 22 23:14:50.578 INFO [lossy] skipping 1007
32569 Sep 22 23:14:50.608 DEBG Write :1007 deps:[JobId(1005), JobId(1002)] res:true
32570 Sep 22 23:14:50.609 INFO [lossy] skipping 1008
32571 Sep 22 23:14:50.609 INFO [lossy] skipping 1009
32572 Sep 22 23:14:50.609 INFO [lossy] skipping 1011
32573 Sep 22 23:14:50.609 INFO [lossy] skipping 1014
32574 Sep 22 23:14:50.609 INFO [lossy] skipping 1008
32575 Sep 22 23:14:50.609 INFO [lossy] skipping 1008
32576 Sep 22 23:14:50.609 INFO [lossy] skipping 1008
32577 Sep 22 23:14:50.615 DEBG Flush :1008 extent_limit None deps:[JobId(1007), JobId(1006), JobId(1005)] res:true f:3 g:1
32578 Sep 22 23:14:50.616 WARN returning error on write!
32579 Sep 22 23:14:50.616 DEBG Write :1009 deps:[JobId(1008), JobId(1005), JobId(1002)] res:false
32580 Sep 22 23:14:50.617 INFO [lossy] skipping 1010
32581 Sep 22 23:14:50.617 WARN 1011 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32582 Sep 22 23:14:50.617 WARN 1012 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32583 Sep 22 23:14:50.617 WARN 1013 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32584 Sep 22 23:14:50.647 DEBG Write :1009 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32585 Sep 22 23:14:50.678 DEBG Write :1010 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32586 Sep 22 23:14:50.679 INFO [lossy] skipping 1011
32587 Sep 22 23:14:50.686 DEBG Flush :1011 extent_limit None deps:[JobId(1010), JobId(1009), JobId(1008)] res:true f:4 g:1
32588 Sep 22 23:14:50.716 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32589 Sep 22 23:14:50.717 INFO [lossy] skipping 1013
32590 Sep 22 23:14:50.717 WARN 1014 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32591 Sep 22 23:14:50.717 INFO [lossy] skipping 1013
32592 Sep 22 23:14:50.748 DEBG Write :1013 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32593 Sep 22 23:14:50.755 DEBG Flush :1014 extent_limit None deps:[JobId(1013), JobId(1012), JobId(1011)] res:true f:5 g:1
32594 Sep 22 23:14:50.756 INFO [lossy] sleeping 1 second
32595 Sep 22 23:14:50.786 DEBG Write :1007 deps:[JobId(1005), JobId(1002)] res:true
32596 Sep 22 23:14:50.787 INFO [lossy] skipping 1008
32597 Sep 22 23:14:50.787 INFO [lossy] skipping 1009
32598 Sep 22 23:14:50.787 WARN returning error on flush!
32599 Sep 22 23:14:50.787 DEBG Flush :1008 extent_limit None deps:[JobId(1007), JobId(1006), JobId(1005)] res:false f:3 g:1
32600 Sep 22 23:14:50.787 INFO [lossy] skipping 1008
32601 Sep 22 23:14:50.788 WARN returning error on flush!
32602 Sep 22 23:14:50.788 DEBG Flush :1008 extent_limit None deps:[JobId(1007), JobId(1006), JobId(1005)] res:false f:3 g:1
32603 Sep 22 23:14:50.788 INFO [lossy] skipping 1008
32604 Sep 22 23:14:50.788 INFO [lossy] skipping 1008
32605 Sep 22 23:14:50.788 INFO [lossy] skipping 1008
32606 Sep 22 23:14:50.794 DEBG Flush :1008 extent_limit None deps:[JobId(1007), JobId(1006), JobId(1005)] res:true f:3 g:1
32607 Sep 22 23:14:50.794 INFO [lossy] sleeping 1 second
32608 Sep 22 23:14:51.163 INFO current number of open files limit 65536 is already the maximum
32609 Sep 22 23:14:51.163 INFO Created new region file "/tmp/downstairs-vrx8aK6L/region.json"
32610 Sep 22 23:14:51.241 DEBG Write :1010 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32611 Sep 22 23:14:51.249 DEBG Flush :1011 extent_limit None deps:[JobId(1010), JobId(1009), JobId(1008)] res:true f:4 g:1
32612 Sep 22 23:14:51.249 INFO [lossy] skipping 1012
32613 Sep 22 23:14:51.279 DEBG Write :1013 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32614 Sep 22 23:14:51.280 INFO [lossy] skipping 1014
32615 Sep 22 23:14:51.280 INFO [lossy] skipping 1015
32616 Sep 22 23:14:51.281 WARN returning error on write!
32617 Sep 22 23:14:51.281 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32618 Sep 22 23:14:51.282 INFO [lossy] skipping 1015
32619 Sep 22 23:14:51.282 WARN returning error on write!
32620 Sep 22 23:14:51.282 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32621 Sep 22 23:14:51.282 INFO [lossy] skipping 1015
32622 Sep 22 23:14:51.313 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32623 Sep 22 23:14:51.314 INFO [lossy] skipping 1015
32624 Sep 22 23:14:51.314 WARN returning error on flush!
32625 Sep 22 23:14:51.314 DEBG Flush :1014 extent_limit None deps:[JobId(1013), JobId(1012), JobId(1011)] res:false f:5 g:1
32626 Sep 22 23:14:51.321 DEBG Flush :1014 extent_limit None deps:[JobId(1013), JobId(1012), JobId(1011)] res:true f:5 g:1
32627 Sep 22 23:14:51.321 INFO [lossy] sleeping 1 second
32628 Sep 22 23:14:51.473 DEBG IO Flush 1001 has deps [JobId(1000)]
32629 Sep 22 23:14:51.473 INFO current number of open files limit 65536 is already the maximum
32630 Sep 22 23:14:51.473 INFO Opened existing region file "/tmp/downstairs-vrx8aK6L/region.json"
32631 Sep 22 23:14:51.473 INFO Database read version 1
32632 Sep 22 23:14:51.473 INFO Database write version 1
32633 Sep 22 23:14:51.525 INFO UUID: 28f50308-941f-4d7f-bd0a-39a00206f2e7
32634 Sep 22 23:14:51.525 INFO Blocks per extent:512 Total Extents: 188
32635 Sep 22 23:14:51.525 INFO Crucible Version: Crucible Version: 0.0.1
32636 Commit SHA: ed48f294784d46ea7d4bb99336918b74358eca46
32637 Commit timestamp: 2023-09-22T22:51:18.000000000Z branch: main
32638 rustc: 1.70.0 stable x86_64-unknown-illumos
32639 Cargo: x86_64-unknown-illumos Debug: true Opt level: 0, task: main
32640 Sep 22 23:14:51.525 INFO Upstairs <-> Downstairs Message Version: 4, task: main
32641 Sep 22 23:14:51.525 INFO Using address: 127.0.0.1:50216, task: main
32642 Sep 22 23:14:51.525 INFO Repair listens on 127.0.0.1:0, task: repair
32643 Sep 22 23:14:51.526 DEBG registered endpoint, path: /extent/{eid}/files, method: GET, local_addr: 127.0.0.1:38873, task: repair
32644 Sep 22 23:14:51.526 DEBG registered endpoint, path: /newextent/{eid}/{file_type}, method: GET, local_addr: 127.0.0.1:38873, task: repair
32645 Sep 22 23:14:51.526 INFO listening, local_addr: 127.0.0.1:38873, task: repair
32646 Sep 22 23:14:51.526 DEBG successfully registered DTrace USDT probes, local_addr: 127.0.0.1:38873, task: repair
32647 Sep 22 23:14:51.526 INFO Using repair address: 127.0.0.1:38873, task: main
32648 Sep 22 23:14:51.526 INFO No SSL acceptor configured, task: main
32649 Sep 22 23:14:51.534 INFO listening on 127.0.0.1:0, task: main
32650 Sep 22 23:14:51.534 WARN 6e5c0f70-fd56-4280-9d20-71288e488216 request to replace downstairs 127.0.0.1:42762 with 127.0.0.1:50216
32651 Sep 22 23:14:51.534 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 found old target: 127.0.0.1:42762 at 0
32652 Sep 22 23:14:51.534 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 replacing old: 127.0.0.1:42762 at 0
32653 Sep 22 23:14:51.534 INFO [0] client skip 2 in process jobs because fault, : downstairs
32654 Sep 22 23:14:51.534 INFO [0] changed 2 jobs to fault skipped, : downstairs
32655 Sep 22 23:14:51.534 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Active Active Active ds_transition to Replacing
32656 Sep 22 23:14:51.534 INFO [0] Transition from Active to Replacing
32657 Sep 22 23:14:51.788 DEBG Write :1015 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32658 Sep 22 23:14:51.790 WARN returning error on write!
32659 Sep 22 23:14:51.790 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32660 Sep 22 23:14:51.790 INFO [lossy] skipping 1019
32661 Sep 22 23:14:51.820 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32662 Sep 22 23:14:51.820 INFO [lossy] skipping 1019
32663 Sep 22 23:14:51.821 INFO [lossy] skipping 1019
32664 Sep 22 23:14:51.821 WARN returning error on flush!
32665 Sep 22 23:14:51.821 DEBG Flush :1017 extent_limit None deps:[JobId(1016), JobId(1015), JobId(1014)] res:false f:6 g:1
32666 Sep 22 23:14:51.821 INFO [lossy] skipping 1018
32667 Sep 22 23:14:51.827 DEBG Flush :1017 extent_limit None deps:[JobId(1016), JobId(1015), JobId(1014)] res:true f:6 g:1
32668 Sep 22 23:14:51.827 INFO [lossy] skipping 1018
32669 Sep 22 23:14:51.857 DEBG Write :1018 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32670 Sep 22 23:14:51.859 WARN returning error on write!
32671 Sep 22 23:14:51.859 DEBG Write :1019 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32672 Sep 22 23:14:51.859 INFO [lossy] skipping 1020
32673 Sep 22 23:14:51.889 DEBG Write :1019 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32674 Sep 22 23:14:51.890 INFO [lossy] skipping 1020
32675 Sep 22 23:14:51.890 WARN returning error on flush!
32676 Sep 22 23:14:51.890 DEBG Flush :1020 extent_limit None deps:[JobId(1019), JobId(1018), JobId(1017)] res:false f:7 g:1
32677 Sep 22 23:14:51.890 INFO [lossy] skipping 1020
32678 Sep 22 23:14:51.896 DEBG Flush :1020 extent_limit None deps:[JobId(1019), JobId(1018), JobId(1017)] res:true f:7 g:1
32679 Sep 22 23:14:51.927 DEBG Write :1009 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32680 Sep 22 23:14:51.928 INFO [lossy] skipping 1010
32681 Sep 22 23:14:51.928 INFO [lossy] skipping 1011
32682 Sep 22 23:14:51.928 WARN 1012 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32683 Sep 22 23:14:51.928 INFO [lossy] skipping 1013
32684 Sep 22 23:14:51.928 INFO [lossy] skipping 1017
32685 Sep 22 23:14:51.928 INFO [lossy] skipping 1019
32686 Sep 22 23:14:51.928 INFO [lossy] skipping 1010
32687 Sep 22 23:14:51.928 WARN 1011 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32688 Sep 22 23:14:51.928 INFO [lossy] skipping 1013
32689 Sep 22 23:14:51.928 INFO [lossy] skipping 1017
32690 Sep 22 23:14:51.929 WARN returning error on write!
32691 Sep 22 23:14:51.929 DEBG Write :1010 deps:[JobId(1008), JobId(1005), JobId(1002)] res:false
32692 Sep 22 23:14:51.929 WARN 1013 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32693 Sep 22 23:14:51.930 INFO [lossy] skipping 1017
32694 Sep 22 23:14:51.959 DEBG Write :1010 deps:[JobId(1008), JobId(1005), JobId(1002)] res:true
32695 Sep 22 23:14:51.967 DEBG Flush :1011 extent_limit None deps:[JobId(1010), JobId(1009), JobId(1008)] res:true f:4 g:1
32696 Sep 22 23:14:51.968 WARN returning error on write!
32697 Sep 22 23:14:51.968 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32698 Sep 22 23:14:51.999 DEBG Write :1013 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32699 Sep 22 23:14:52.001 WARN 1014 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32700 Sep 22 23:14:52.001 INFO [lossy] skipping 1015
32701 Sep 22 23:14:52.001 WARN 1016 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32702 Sep 22 23:14:52.001 WARN 1018 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32703 Sep 22 23:14:52.001 WARN 1019 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32704 Sep 22 23:14:52.001 INFO [lossy] skipping 1020
32705 Sep 22 23:14:52.001 INFO [lossy] skipping 1012
32706 Sep 22 23:14:52.001 WARN 1015 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32707 Sep 22 23:14:52.031 DEBG Write :1012 deps:[JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32708 Sep 22 23:14:52.033 WARN returning error on flush!
32709 Sep 22 23:14:52.033 DEBG Flush :1014 extent_limit None deps:[JobId(1013), JobId(1012), JobId(1011)] res:false f:5 g:1
32710 Sep 22 23:14:52.033 INFO [lossy] skipping 1014
32711 Sep 22 23:14:52.033 INFO [lossy] skipping 1014
32712 Sep 22 23:14:52.039 DEBG Flush :1014 extent_limit None deps:[JobId(1013), JobId(1012), JobId(1011)] res:true f:5 g:1
32713 Sep 22 23:14:52.070 DEBG Write :1015 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32714 Sep 22 23:14:52.101 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32715 Sep 22 23:14:52.108 DEBG Flush :1017 extent_limit None deps:[JobId(1016), JobId(1015), JobId(1014)] res:true f:6 g:1
32716 Sep 22 23:14:52.108 INFO [lossy] skipping 1018
32717 Sep 22 23:14:52.138 DEBG Write :1019 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32718 Sep 22 23:14:52.139 INFO [lossy] skipping 1020
32719 Sep 22 23:14:52.170 DEBG Write :1018 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32720 Sep 22 23:14:52.171 WARN returning error on flush!
32721 Sep 22 23:14:52.171 DEBG Flush :1020 extent_limit None deps:[JobId(1019), JobId(1018), JobId(1017)] res:false f:7 g:1
32722 Sep 22 23:14:52.177 DEBG Flush :1020 extent_limit None deps:[JobId(1019), JobId(1018), JobId(1017)] res:true f:7 g:1
32723 Sep 22 23:14:52.177 INFO [lossy] sleeping 1 second
32724 Sep 22 23:14:52.424 DEBG Write :1021 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32725 Sep 22 23:14:52.455 DEBG Write :1015 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32726 Sep 22 23:14:52.456 INFO [lossy] skipping 1016
32727 Sep 22 23:14:52.456 WARN 1017 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32728 Sep 22 23:14:52.457 WARN returning error on write!
32729 Sep 22 23:14:52.457 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32730 Sep 22 23:14:52.458 INFO [lossy] skipping 1016
32731 Sep 22 23:14:52.458 WARN returning error on write!
32732 Sep 22 23:14:52.458 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32733 Sep 22 23:14:52.488 DEBG Write :1016 deps:[JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32734 Sep 22 23:14:52.489 INFO [lossy] sleeping 1 second
32735 Sep 22 23:14:52.739 DEBG Write :1022 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32736 Sep 22 23:14:52.740 WARN returning error on flush!
32737 Sep 22 23:14:52.740 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:false f:8 g:1
32738 Sep 22 23:14:52.740 WARN returning error on flush!
32739 Sep 22 23:14:52.740 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:false f:8 g:1
32740 Sep 22 23:14:52.740 INFO [lossy] skipping 1023
32741 Sep 22 23:14:52.740 INFO [lossy] skipping 1023
32742 Sep 22 23:14:52.747 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:true f:8 g:1
32743 Sep 22 23:14:52.963 INFO [lossy] skipping 1024
32744 Sep 22 23:14:52.994 DEBG Write :1024 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32745 Sep 22 23:14:53.225 INFO [lossy] sleeping 1 second
32746 Sep 22 23:14:53.225 INFO [lossy] skipping 1021
32747 Sep 22 23:14:53.227 WARN returning error on write!
32748 Sep 22 23:14:53.227 DEBG Write :1022 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32749 Sep 22 23:14:53.227 INFO [lossy] skipping 1025
32750 Sep 22 23:14:53.258 DEBG Write :1021 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32751 Sep 22 23:14:53.289 DEBG Write :1022 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32752 Sep 22 23:14:53.289 INFO [lossy] sleeping 1 second
32753 Sep 22 23:14:53.531 DEBG Flush :1017 extent_limit None deps:[JobId(1016), JobId(1015), JobId(1014)] res:true f:6 g:1
32754 Sep 22 23:14:53.561 DEBG Write :1018 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32755 Sep 22 23:14:53.562 INFO [lossy] skipping 1019
32756 Sep 22 23:14:53.562 INFO [lossy] skipping 1020
32757 Sep 22 23:14:53.562 WARN 1021 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32758 Sep 22 23:14:53.562 INFO [lossy] skipping 1022
32759 Sep 22 23:14:53.562 INFO [lossy] skipping 1025
32760 Sep 22 23:14:53.562 INFO [lossy] skipping 1028
32761 Sep 22 23:14:53.592 DEBG Write :1019 deps:[JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32762 Sep 22 23:14:53.593 INFO [lossy] skipping 1020
32763 Sep 22 23:14:53.599 DEBG Flush :1020 extent_limit None deps:[JobId(1019), JobId(1018), JobId(1017)] res:true f:7 g:1
32764 Sep 22 23:14:53.629 DEBG Write :1021 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32765 Sep 22 23:14:53.660 DEBG Write :1022 deps:[JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32766 Sep 22 23:14:53.668 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:true f:8 g:1
32767 Sep 22 23:14:53.698 DEBG Write :1024 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32768 Sep 22 23:14:53.729 DEBG Write :1025 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32769 Sep 22 23:14:53.737 DEBG Flush :1026 extent_limit None deps:[JobId(1025), JobId(1024), JobId(1023)] res:true f:9 g:1
32770 Sep 22 23:14:53.738 WARN returning error on write!
32771 Sep 22 23:14:53.738 DEBG Write :1027 deps:[JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32772 Sep 22 23:14:53.738 INFO [lossy] skipping 1028
32773 Sep 22 23:14:53.738 INFO [lossy] skipping 1027
32774 Sep 22 23:14:53.738 INFO [lossy] skipping 1028
32775 Sep 22 23:14:53.738 INFO [lossy] skipping 1027
32776 Sep 22 23:14:53.738 INFO [lossy] skipping 1028
32777 Sep 22 23:14:53.738 INFO [lossy] skipping 1027
32778 Sep 22 23:14:53.738 INFO [lossy] skipping 1028
32779 Sep 22 23:14:53.738 INFO [lossy] skipping 1027
32780 Sep 22 23:14:53.738 WARN 1028 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32781 Sep 22 23:14:53.768 DEBG Write :1027 deps:[JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32782 Sep 22 23:14:53.770 WARN returning error on flush!
32783 Sep 22 23:14:53.770 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026)] res:false f:10 g:1
32784 Sep 22 23:14:53.770 WARN returning error on flush!
32785 Sep 22 23:14:53.770 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026)] res:false f:10 g:1
32786 Sep 22 23:14:53.770 INFO [lossy] skipping 1028
32787 Sep 22 23:14:53.773 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026)] res:true f:10 g:1
32788 Sep 22 23:14:53.773 INFO [lossy] sleeping 1 second
32789 Sep 22 23:14:54.235 INFO [lossy] skipping 1025
32790 Sep 22 23:14:54.235 INFO [lossy] skipping 1027
32791 Sep 22 23:14:54.235 INFO [lossy] skipping 1028
32792 Sep 22 23:14:54.235 INFO [lossy] skipping 1030
32793 Sep 22 23:14:54.235 INFO [lossy] skipping 1025
32794 Sep 22 23:14:54.235 INFO [lossy] skipping 1025
32795 Sep 22 23:14:54.267 DEBG Write :1025 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32796 Sep 22 23:14:54.274 DEBG Flush :1026 extent_limit None deps:[JobId(1025), JobId(1024), JobId(1023)] res:true f:9 g:1
32797 Sep 22 23:14:54.274 INFO [lossy] skipping 1027
32798 Sep 22 23:14:54.274 WARN 1028 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32799 Sep 22 23:14:54.274 WARN 1029 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32800 Sep 22 23:14:54.274 INFO [lossy] skipping 1030
32801 Sep 22 23:14:54.304 DEBG Write :1027 deps:[JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32802 Sep 22 23:14:54.305 WARN 1030 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32803 Sep 22 23:14:54.308 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026)] res:true f:10 g:1
32804 Sep 22 23:14:54.339 DEBG Write :1029 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32805 Sep 22 23:14:54.340 WARN returning error on write!
32806 Sep 22 23:14:54.341 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32807 Sep 22 23:14:54.341 INFO [lossy] skipping 1031
32808 Sep 22 23:14:54.341 INFO [lossy] skipping 1030
32809 Sep 22 23:14:54.341 WARN 1031 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32810 Sep 22 23:14:54.341 INFO [lossy] skipping 1030
32811 Sep 22 23:14:54.371 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32812 Sep 22 23:14:54.378 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32813 Sep 22 23:14:54.378 INFO [lossy] sleeping 1 second
32814 Sep 22 23:14:54.379 INFO [lossy] skipping 1023
32815 Sep 22 23:14:54.379 INFO [lossy] skipping 1030
32816 Sep 22 23:14:54.379 WARN returning error on flush!
32817 Sep 22 23:14:54.379 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:false f:8 g:1
32818 Sep 22 23:14:54.379 WARN returning error on flush!
32819 Sep 22 23:14:54.379 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:false f:8 g:1
32820 Sep 22 23:14:54.385 DEBG Flush :1023 extent_limit None deps:[JobId(1022), JobId(1021), JobId(1020)] res:true f:8 g:1
32821 Sep 22 23:14:54.385 INFO [lossy] skipping 1024
32822 Sep 22 23:14:54.416 DEBG Write :1025 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32823 Sep 22 23:14:54.417 WARN 1026 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32824 Sep 22 23:14:54.417 WARN 1027 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32825 Sep 22 23:14:54.417 WARN 1029 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32826 Sep 22 23:14:54.417 WARN 1030 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32827 Sep 22 23:14:54.417 INFO [lossy] skipping 1031
32828 Sep 22 23:14:54.448 DEBG Write :1024 deps:[JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32829 Sep 22 23:14:54.449 INFO [lossy] skipping 1031
32830 Sep 22 23:14:54.449 INFO [lossy] skipping 1031
32831 Sep 22 23:14:54.449 INFO [lossy] skipping 1026
32832 Sep 22 23:14:54.449 INFO [lossy] skipping 1030
32833 Sep 22 23:14:54.449 WARN returning error on flush!
32834 Sep 22 23:14:54.449 DEBG Flush :1026 extent_limit None deps:[JobId(1025), JobId(1024), JobId(1023)] res:false f:9 g:1
32835 Sep 22 23:14:54.449 INFO [lossy] skipping 1030
32836 Sep 22 23:14:54.449 WARN returning error on flush!
32837 Sep 22 23:14:54.449 DEBG Flush :1026 extent_limit None deps:[JobId(1025), JobId(1024), JobId(1023)] res:false f:9 g:1
32838 Sep 22 23:14:54.455 DEBG Flush :1026 extent_limit None deps:[JobId(1025), JobId(1024), JobId(1023)] res:true f:9 g:1
32839 Sep 22 23:14:54.486 DEBG Write :1027 deps:[JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32840 Sep 22 23:14:54.490 DEBG Flush :1028 extent_limit None deps:[JobId(1027), JobId(1026)] res:true f:10 g:1
32841 Sep 22 23:14:54.521 DEBG Write :1029 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32842 Sep 22 23:14:54.523 WARN returning error on write!
32843 Sep 22 23:14:54.523 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32844 Sep 22 23:14:54.523 WARN 1031 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32845 Sep 22 23:14:54.524 WARN returning error on write!
32846 Sep 22 23:14:54.524 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32847 Sep 22 23:14:54.554 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32848 Sep 22 23:14:54.562 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32849 Sep 22 23:14:54.562 INFO [lossy] sleeping 1 second
32850 Sep 22 23:14:54.784 WARN returning error on write!
32851 Sep 22 23:14:54.784 DEBG Write :1029 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32852 Sep 22 23:14:54.816 DEBG Write :1030 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32853 Sep 22 23:14:54.817 WARN returning error on write!
32854 Sep 22 23:14:54.817 DEBG Write :1029 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32855 Sep 22 23:14:54.818 INFO [lossy] skipping 1029
32856 Sep 22 23:14:54.818 INFO [lossy] skipping 1029
32857 Sep 22 23:14:54.848 DEBG Write :1029 deps:[JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32858 Sep 22 23:14:54.855 DEBG Flush :1031 extent_limit None deps:[JobId(1030), JobId(1029), JobId(1028)] res:true f:11 g:1
32859 Sep 22 23:14:54.855 INFO [lossy] skipping 1032
32860 Sep 22 23:14:54.855 INFO [lossy] skipping 1032
32861 Sep 22 23:14:54.885 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32862 Sep 22 23:14:54.886 INFO [lossy] sleeping 1 second
32863 Sep 22 23:14:55.555 WARN returning error on write!
32864 Sep 22 23:14:55.555 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32865 Sep 22 23:14:55.556 INFO [lossy] skipping 1033
32866 Sep 22 23:14:55.556 INFO [lossy] skipping 1034
32867 Sep 22 23:14:55.556 INFO [lossy] skipping 1035
32868 Sep 22 23:14:55.586 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32869 Sep 22 23:14:55.617 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32870 Sep 22 23:14:55.624 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
32871 Sep 22 23:14:55.624 INFO [lossy] skipping 1035
32872 Sep 22 23:14:55.624 INFO [lossy] skipping 1035
32873 Sep 22 23:14:55.624 INFO [lossy] skipping 1035
32874 Sep 22 23:14:55.624 INFO [lossy] skipping 1035
32875 Sep 22 23:14:55.654 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32876 Sep 22 23:14:55.655 INFO [lossy] sleeping 1 second
32877 Sep 22 23:14:55.656 WARN returning error on write!
32878 Sep 22 23:14:55.656 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32879 Sep 22 23:14:55.658 WARN returning error on write!
32880 Sep 22 23:14:55.658 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32881 Sep 22 23:14:55.658 INFO [lossy] skipping 1034
32882 Sep 22 23:14:55.658 INFO [lossy] skipping 1036
32883 Sep 22 23:14:55.688 DEBG Write :1032 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32884 Sep 22 23:14:55.719 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32885 Sep 22 23:14:55.727 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
32886 Sep 22 23:14:55.757 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32887 Sep 22 23:14:55.758 INFO [lossy] sleeping 1 second
32888 Sep 22 23:14:56.004 DEBG Write :1033 deps:[JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32889 Sep 22 23:14:56.005 WARN returning error on flush!
32890 Sep 22 23:14:56.005 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:false f:12 g:1
32891 Sep 22 23:14:56.005 INFO [lossy] skipping 1036
32892 Sep 22 23:14:56.005 WARN returning error on flush!
32893 Sep 22 23:14:56.005 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:false f:12 g:1
32894 Sep 22 23:14:56.012 DEBG Flush :1034 extent_limit None deps:[JobId(1033), JobId(1032), JobId(1031)] res:true f:12 g:1
32895 Sep 22 23:14:56.012 INFO [lossy] skipping 1035
32896 Sep 22 23:14:56.042 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32897 Sep 22 23:14:56.043 WARN 1037 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32898 Sep 22 23:14:56.043 INFO [lossy] skipping 1038
32899 Sep 22 23:14:56.073 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32900 Sep 22 23:14:56.074 WARN 1038 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32901 Sep 22 23:14:56.074 INFO [lossy] skipping 1037
32902 Sep 22 23:14:56.080 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
32903 Sep 22 23:14:56.080 INFO [lossy] sleeping 1 second
32904 Sep 22 23:14:56.767 DEBG Write :1036 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32905 Sep 22 23:14:56.768 INFO [lossy] skipping 1037
32906 Sep 22 23:14:56.768 INFO [lossy] skipping 1037
32907 Sep 22 23:14:56.768 INFO [lossy] skipping 1037
32908 Sep 22 23:14:56.775 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
32909 Sep 22 23:14:56.805 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32910 Sep 22 23:14:56.806 WARN returning error on write!
32911 Sep 22 23:14:56.806 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32912 Sep 22 23:14:56.807 WARN 1040 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32913 Sep 22 23:14:56.807 WARN 1041 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32914 Sep 22 23:14:56.807 INFO [lossy] skipping 1043
32915 Sep 22 23:14:56.807 INFO [lossy] skipping 1039
32916 Sep 22 23:14:56.807 INFO [lossy] skipping 1043
32917 Sep 22 23:14:56.837 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32918 Sep 22 23:14:56.837 WARN 1043 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32919 Sep 22 23:14:56.844 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
32920 Sep 22 23:14:56.844 INFO [lossy] skipping 1041
32921 Sep 22 23:14:56.844 INFO [lossy] skipping 1042
32922 Sep 22 23:14:56.844 INFO [lossy] skipping 1043
32923 Sep 22 23:14:56.874 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32924 Sep 22 23:14:56.875 INFO [lossy] skipping 1042
32925 Sep 22 23:14:56.875 WARN 1043 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32926 Sep 22 23:14:56.878 DEBG Flush :1042 extent_limit None deps:[JobId(1041), JobId(1040)] res:true f:15 g:1
32927 Sep 22 23:14:56.908 DEBG Write :1043 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32928 Sep 22 23:14:56.910 INFO [lossy] sleeping 1 second
32929 Sep 22 23:14:56.940 DEBG Write :1035 deps:[JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32930 Sep 22 23:14:56.941 WARN returning error on flush!
32931 Sep 22 23:14:56.941 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:false f:13 g:1
32932 Sep 22 23:14:56.948 DEBG Flush :1037 extent_limit None deps:[JobId(1036), JobId(1035), JobId(1034)] res:true f:13 g:1
32933 Sep 22 23:14:56.948 INFO [lossy] sleeping 1 second
32934 Sep 22 23:14:57.193 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32935 Sep 22 23:14:57.225 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32936 Sep 22 23:14:57.232 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
32937 Sep 22 23:14:57.263 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32938 Sep 22 23:14:57.267 DEBG Flush :1042 extent_limit None deps:[JobId(1041), JobId(1040)] res:true f:15 g:1
32939 Sep 22 23:14:57.268 WARN returning error on write!
32940 Sep 22 23:14:57.268 DEBG Write :1043 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32941 Sep 22 23:14:57.268 INFO [lossy] skipping 1044
32942 Sep 22 23:14:57.268 INFO [lossy] skipping 1045
32943 Sep 22 23:14:57.268 INFO [lossy] skipping 1043
32944 Sep 22 23:14:57.299 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32945 Sep 22 23:14:57.300 WARN returning error on write!
32946 Sep 22 23:14:57.300 DEBG Write :1043 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32947 Sep 22 23:14:57.331 DEBG Write :1043 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32948 Sep 22 23:14:57.332 INFO [lossy] sleeping 1 second
32949 Sep 22 23:14:57.984 WARN returning error on write!
32950 Sep 22 23:14:57.984 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32951 Sep 22 23:14:57.984 INFO [lossy] skipping 1049
32952 Sep 22 23:14:57.985 WARN returning error on write!
32953 Sep 22 23:14:57.985 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32954 Sep 22 23:14:58.016 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32955 Sep 22 23:14:58.017 INFO [lossy] sleeping 1 second
32956 Sep 22 23:14:58.018 WARN returning error on write!
32957 Sep 22 23:14:58.018 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32958 Sep 22 23:14:58.019 WARN returning error on write!
32959 Sep 22 23:14:58.019 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32960 Sep 22 23:14:58.019 WARN 1040 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32961 Sep 22 23:14:58.019 WARN 1041 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32962 Sep 22 23:14:58.019 INFO [lossy] skipping 1043
32963 Sep 22 23:14:58.020 WARN returning error on write!
32964 Sep 22 23:14:58.020 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32965 Sep 22 23:14:58.050 DEBG Write :1039 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32966 Sep 22 23:14:58.051 WARN 1043 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32967 Sep 22 23:14:58.081 DEBG Write :1038 deps:[JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32968 Sep 22 23:14:58.082 INFO [lossy] skipping 1040
32969 Sep 22 23:14:58.082 INFO [lossy] skipping 1046
32970 Sep 22 23:14:58.082 INFO [lossy] skipping 1048
32971 Sep 22 23:14:58.089 DEBG Flush :1040 extent_limit None deps:[JobId(1039), JobId(1038), JobId(1037)] res:true f:14 g:1
32972 Sep 22 23:14:58.089 WARN 1046 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32973 Sep 22 23:14:58.119 DEBG Write :1041 deps:[JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32974 Sep 22 23:14:58.123 DEBG Flush :1042 extent_limit None deps:[JobId(1041), JobId(1040)] res:true f:15 g:1
32975 Sep 22 23:14:58.124 INFO [lossy] skipping 1043
32976 Sep 22 23:14:58.124 WARN returning error on write!
32977 Sep 22 23:14:58.124 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
32978 Sep 22 23:14:58.125 INFO [lossy] skipping 1045
32979 Sep 22 23:14:58.125 INFO [lossy] skipping 1046
32980 Sep 22 23:14:58.125 WARN 1047 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32981 Sep 22 23:14:58.125 WARN 1049 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32982 Sep 22 23:14:58.125 INFO [lossy] skipping 1043
32983 Sep 22 23:14:58.125 INFO [lossy] skipping 1044
32984 Sep 22 23:14:58.125 WARN 1045 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
32985 Sep 22 23:14:58.125 INFO [lossy] skipping 1046
32986 Sep 22 23:14:58.155 DEBG Write :1043 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32987 Sep 22 23:14:58.186 DEBG Write :1044 deps:[JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32988 Sep 22 23:14:58.187 WARN 1046 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32989 Sep 22 23:14:58.194 DEBG Flush :1045 extent_limit None deps:[JobId(1044), JobId(1043), JobId(1042)] res:true f:16 g:1
32990 Sep 22 23:14:58.194 INFO [lossy] skipping 1046
32991 Sep 22 23:14:58.224 DEBG Write :1047 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32992 Sep 22 23:14:58.225 WARN 1048 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32993 Sep 22 23:14:58.225 WARN 1049 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
32994 Sep 22 23:14:58.225 INFO [lossy] skipping 1046
32995 Sep 22 23:14:58.256 DEBG Write :1046 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
32996 Sep 22 23:14:58.257 WARN returning error on flush!
32997 Sep 22 23:14:58.257 DEBG Flush :1048 extent_limit None deps:[JobId(1047), JobId(1046), JobId(1045)] res:false f:17 g:1
32998 Sep 22 23:14:58.257 INFO [lossy] skipping 1048
32999 Sep 22 23:14:58.263 DEBG Flush :1048 extent_limit None deps:[JobId(1047), JobId(1046), JobId(1045)] res:true f:17 g:1
33000 Sep 22 23:14:58.293 DEBG Write :1049 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33001 Sep 22 23:14:58.295 INFO [lossy] sleeping 1 second
33002 Sep 22 23:14:58.515 DEBG Flush :1045 extent_limit None deps:[JobId(1044), JobId(1043), JobId(1042)] res:true f:16 g:1
33003 Sep 22 23:14:58.515 INFO [lossy] skipping 1046
33004 Sep 22 23:14:58.545 DEBG Write :1047 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33005 Sep 22 23:14:58.546 INFO [lossy] skipping 1046
33006 Sep 22 23:14:58.576 DEBG Write :1046 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33007 Sep 22 23:14:58.577 INFO [lossy] skipping 1048
33008 Sep 22 23:14:58.584 DEBG Flush :1048 extent_limit None deps:[JobId(1047), JobId(1046), JobId(1045)] res:true f:17 g:1
33009 Sep 22 23:14:58.584 INFO [lossy] skipping 1049
33010 Sep 22 23:14:58.614 DEBG Write :1049 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33011 Sep 22 23:14:58.615 INFO [lossy] sleeping 1 second
33012 Sep 22 23:14:59.049 DEBG Flush :1045 extent_limit None deps:[JobId(1044), JobId(1043), JobId(1042)] res:true f:16 g:1
33013 Sep 22 23:14:59.080 DEBG Write :1046 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33014 Sep 22 23:14:59.081 INFO [lossy] skipping 1047
33015 Sep 22 23:14:59.081 WARN 1048 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33016 Sep 22 23:14:59.081 WARN 1049 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33017 Sep 22 23:14:59.081 INFO [lossy] skipping 1050
33018 Sep 22 23:14:59.081 INFO [lossy] skipping 1053
33019 Sep 22 23:14:59.081 INFO [lossy] skipping 1047
33020 Sep 22 23:14:59.081 INFO [lossy] skipping 1053
33021 Sep 22 23:14:59.082 WARN returning error on write!
33022 Sep 22 23:14:59.082 DEBG Write :1047 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33023 Sep 22 23:14:59.083 INFO [lossy] skipping 1053
33024 Sep 22 23:14:59.083 WARN returning error on write!
33025 Sep 22 23:14:59.083 DEBG Write :1047 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33026 Sep 22 23:14:59.113 DEBG Write :1047 deps:[JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33027 Sep 22 23:14:59.121 DEBG Flush :1048 extent_limit None deps:[JobId(1047), JobId(1046), JobId(1045)] res:true f:17 g:1
33028 Sep 22 23:14:59.151 DEBG Write :1049 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33029 Sep 22 23:14:59.182 DEBG Write :1050 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33030 Sep 22 23:14:59.189 DEBG Flush :1051 extent_limit None deps:[JobId(1050), JobId(1049), JobId(1048)] res:true f:18 g:1
33031 Sep 22 23:14:59.220 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33032 Sep 22 23:14:59.252 DEBG Write :1053 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33033 Sep 22 23:14:59.260 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:true f:19 g:1
33034 Sep 22 23:14:59.260 INFO [lossy] sleeping 1 second
33035 Sep 22 23:14:59.470 WARN returning error on write!
33036 Sep 22 23:14:59.470 DEBG Write :1050 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33037 Sep 22 23:14:59.470 INFO [lossy] skipping 1051
33038 Sep 22 23:14:59.471 WARN returning error on write!
33039 Sep 22 23:14:59.471 DEBG Write :1050 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33040 Sep 22 23:14:59.501 DEBG Write :1050 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33041 Sep 22 23:14:59.502 INFO [lossy] sleeping 1 second
33042 Sep 22 23:14:59.747 DEBG Write :1050 deps:[JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33043 Sep 22 23:14:59.755 DEBG Flush :1051 extent_limit None deps:[JobId(1050), JobId(1049), JobId(1048)] res:true f:18 g:1
33044 Sep 22 23:14:59.755 WARN returning error on write!
33045 Sep 22 23:14:59.755 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33046 Sep 22 23:14:59.786 DEBG Write :1053 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33047 Sep 22 23:14:59.788 WARN returning error on write!
33048 Sep 22 23:14:59.788 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33049 Sep 22 23:14:59.788 INFO [lossy] skipping 1052
33050 Sep 22 23:14:59.789 WARN returning error on write!
33051 Sep 22 23:14:59.789 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33052 Sep 22 23:14:59.819 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33053 Sep 22 23:14:59.820 WARN returning error on flush!
33054 Sep 22 23:14:59.820 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:false f:19 g:1
33055 Sep 22 23:14:59.820 WARN returning error on flush!
33056 Sep 22 23:14:59.820 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:false f:19 g:1
33057 Sep 22 23:14:59.820 INFO [lossy] skipping 1054
33058 Sep 22 23:14:59.820 INFO [lossy] skipping 1054
33059 Sep 22 23:14:59.820 INFO [lossy] skipping 1054
33060 Sep 22 23:14:59.820 WARN returning error on flush!
33061 Sep 22 23:14:59.820 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:false f:19 g:1
33062 Sep 22 23:14:59.826 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:true f:19 g:1
33063 Sep 22 23:14:59.856 DEBG Write :1055 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33064 Sep 22 23:14:59.858 INFO [lossy] skipping 1056
33065 Sep 22 23:14:59.858 WARN 1057 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33066 Sep 22 23:14:59.858 INFO [lossy] skipping 1056
33067 Sep 22 23:14:59.888 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33068 Sep 22 23:14:59.889 INFO [lossy] sleeping 1 second
33069 Sep 22 23:15:00.339 DEBG Write :1055 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33070 Sep 22 23:15:00.341 WARN returning error on write!
33071 Sep 22 23:15:00.341 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33072 Sep 22 23:15:00.341 INFO [lossy] skipping 1057
33073 Sep 22 23:15:00.371 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33074 Sep 22 23:15:00.372 INFO [lossy] skipping 1057
33075 Sep 22 23:15:00.378 DEBG Flush :1057 extent_limit None deps:[JobId(1056), JobId(1055), JobId(1054)] res:true f:20 g:1
33076 Sep 22 23:15:00.379 WARN returning error on write!
33077 Sep 22 23:15:00.379 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33078 Sep 22 23:15:00.410 DEBG Write :1059 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33079 Sep 22 23:15:00.411 INFO [lossy] skipping 1060
33080 Sep 22 23:15:00.441 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33081 Sep 22 23:15:00.442 WARN returning error on flush!
33082 Sep 22 23:15:00.442 DEBG Flush :1060 extent_limit None deps:[JobId(1059), JobId(1058), JobId(1057)] res:false f:21 g:1
33083 Sep 22 23:15:00.448 DEBG Flush :1060 extent_limit None deps:[JobId(1059), JobId(1058), JobId(1057)] res:true f:21 g:1
33084 Sep 22 23:15:00.448 INFO [lossy] sleeping 1 second
33085 Sep 22 23:15:00.656 INFO [lossy] skipping 1051
33086 Sep 22 23:15:00.656 INFO [lossy] skipping 1058
33087 Sep 22 23:15:00.656 INFO [lossy] skipping 1059
33088 Sep 22 23:15:00.663 DEBG Flush :1051 extent_limit None deps:[JobId(1050), JobId(1049), JobId(1048)] res:true f:18 g:1
33089 Sep 22 23:15:00.663 INFO [lossy] skipping 1059
33090 Sep 22 23:15:00.663 INFO [lossy] skipping 1059
33091 Sep 22 23:15:00.664 WARN returning error on write!
33092 Sep 22 23:15:00.664 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33093 Sep 22 23:15:00.695 DEBG Write :1053 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33094 Sep 22 23:15:00.696 INFO [lossy] skipping 1054
33095 Sep 22 23:15:00.696 WARN 1055 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33096 Sep 22 23:15:00.696 INFO [lossy] skipping 1056
33097 Sep 22 23:15:00.696 INFO [lossy] skipping 1060
33098 Sep 22 23:15:00.696 INFO [lossy] skipping 1052
33099 Sep 22 23:15:00.696 WARN 1054 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33100 Sep 22 23:15:00.696 INFO [lossy] skipping 1056
33101 Sep 22 23:15:00.696 INFO [lossy] skipping 1060
33102 Sep 22 23:15:00.725 DEBG Write :1052 deps:[JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33103 Sep 22 23:15:00.726 INFO [lossy] skipping 1056
33104 Sep 22 23:15:00.726 WARN 1056 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33105 Sep 22 23:15:00.733 DEBG Flush :1054 extent_limit None deps:[JobId(1053), JobId(1052), JobId(1051)] res:true f:19 g:1
33106 Sep 22 23:15:00.763 DEBG Write :1055 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33107 Sep 22 23:15:00.765 WARN returning error on write!
33108 Sep 22 23:15:00.765 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33109 Sep 22 23:15:00.765 WARN 1057 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33110 Sep 22 23:15:00.765 WARN 1058 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33111 Sep 22 23:15:00.765 WARN 1059 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33112 Sep 22 23:15:00.766 WARN returning error on write!
33113 Sep 22 23:15:00.766 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33114 Sep 22 23:15:00.767 WARN returning error on write!
33115 Sep 22 23:15:00.767 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33116 Sep 22 23:15:00.767 INFO [lossy] skipping 1056
33117 Sep 22 23:15:00.797 DEBG Write :1056 deps:[JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33118 Sep 22 23:15:00.805 DEBG Flush :1057 extent_limit None deps:[JobId(1056), JobId(1055), JobId(1054)] res:true f:20 g:1
33119 Sep 22 23:15:00.806 WARN returning error on write!
33120 Sep 22 23:15:00.806 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33121 Sep 22 23:15:00.806 INFO [lossy] skipping 1059
33122 Sep 22 23:15:00.806 INFO [lossy] skipping 1060
33123 Sep 22 23:15:00.807 WARN returning error on write!
33124 Sep 22 23:15:00.807 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33125 Sep 22 23:15:00.808 WARN returning error on write!
33126 Sep 22 23:15:00.808 DEBG Write :1059 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33127 Sep 22 23:15:00.808 WARN 1060 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
33128 Sep 22 23:15:00.838 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33129 Sep 22 23:15:00.840 WARN returning error on write!
33130 Sep 22 23:15:00.840 DEBG Write :1059 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33131 Sep 22 23:15:00.870 DEBG Write :1059 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33132 Sep 22 23:15:00.878 DEBG Flush :1060 extent_limit None deps:[JobId(1059), JobId(1058), JobId(1057)] res:true f:21 g:1
33133 Sep 22 23:15:00.878 INFO [lossy] sleeping 1 second
33134 Sep 22 23:15:01.093 DEBG Flush :1057 extent_limit None deps:[JobId(1056), JobId(1055), JobId(1054)] res:true f:20 g:1
33135 Sep 22 23:15:01.123 DEBG Write :1058 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33136 Sep 22 23:15:01.154 DEBG Write :1059 deps:[JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33137 Sep 22 23:15:01.162 DEBG Flush :1060 extent_limit None deps:[JobId(1059), JobId(1058), JobId(1057)] res:true f:21 g:1
33138 Sep 22 23:15:01.192 DEBG Write :1061 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33139 Sep 22 23:15:01.194 INFO [lossy] sleeping 1 second
33140 Sep 22 23:15:01.614 INFO [lossy] skipping 1061
33141 Sep 22 23:15:01.646 DEBG Write :1062 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33142 Sep 22 23:15:01.647 INFO [lossy] skipping 1063
33143 Sep 22 23:15:01.647 INFO [lossy] skipping 1061
33144 Sep 22 23:15:01.647 INFO [lossy] skipping 1061
33145 Sep 22 23:15:01.679 DEBG Write :1061 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33146 Sep 22 23:15:01.680 INFO [lossy] skipping 1063
33147 Sep 22 23:15:01.680 INFO [lossy] skipping 1063
33148 Sep 22 23:15:01.686 DEBG Flush :1063 extent_limit None deps:[JobId(1062), JobId(1061), JobId(1060)] res:true f:22 g:1
33149 Sep 22 23:15:01.687 INFO [lossy] sleeping 1 second
33150 Sep 22 23:15:01.928 DEBG Write :1061 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33151 Sep 22 23:15:01.959 DEBG Write :1062 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33152 Sep 22 23:15:01.961 INFO [lossy] skipping 1063
33153 Sep 22 23:15:01.961 INFO [lossy] skipping 1065
33154 Sep 22 23:15:01.961 INFO [lossy] skipping 1067
33155 Sep 22 23:15:01.967 DEBG Flush :1063 extent_limit None deps:[JobId(1062), JobId(1061), JobId(1060)] res:true f:22 g:1
33156 Sep 22 23:15:01.998 DEBG Write :1065 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33157 Sep 22 23:15:02.030 DEBG Write :1064 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33158 Sep 22 23:15:02.038 DEBG Flush :1066 extent_limit None deps:[JobId(1065), JobId(1064), JobId(1063)] res:true f:23 g:1
33159 Sep 22 23:15:02.069 DEBG Write :1067 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33160 Sep 22 23:15:02.070 INFO [lossy] sleeping 1 second
33161 Sep 22 23:15:02.271 DEBG Write :1000 deps:[] res:true
33162 Sep 22 23:15:02.312 DEBG Write :1062 deps:[JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33163 Sep 22 23:15:02.320 DEBG Flush :1063 extent_limit None deps:[JobId(1062), JobId(1061), JobId(1060)] res:true f:22 g:1
33164 Sep 22 23:15:02.351 DEBG Write :1064 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33165 Sep 22 23:15:02.383 DEBG Write :1065 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33166 Sep 22 23:15:02.391 DEBG Flush :1066 extent_limit None deps:[JobId(1065), JobId(1064), JobId(1063)] res:true f:23 g:1
33167 Sep 22 23:15:02.423 DEBG Write :1067 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33168 Sep 22 23:15:02.425 WARN returning error on write!
33169 Sep 22 23:15:02.425 DEBG Write :1068 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33170 Sep 22 23:15:02.456 DEBG Write :1068 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33171 Sep 22 23:15:02.458 WARN returning error on flush!
33172 Sep 22 23:15:02.458 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:false f:24 g:1
33173 Sep 22 23:15:02.458 INFO [lossy] skipping 1069
33174 Sep 22 23:15:02.458 WARN returning error on flush!
33175 Sep 22 23:15:02.458 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:false f:24 g:1
33176 Sep 22 23:15:02.464 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:true f:24 g:1
33177 Sep 22 23:15:02.465 INFO [lossy] sleeping 1 second
33178 Sep 22 23:15:02.618 ERRO [0] job id 1007 saw error GenericError("test error")
33179 Sep 22 23:15:02.618 ERRO [0] job id 1007 saw error GenericError("test error")
33180 Sep 22 23:15:02.618 ERRO [0] job id 1009 saw error GenericError("test error")
33181 Sep 22 23:15:02.619 ERRO [0] job id 1016 saw error GenericError("test error")
33182 Sep 22 23:15:02.619 ERRO [0] job id 1017 saw error GenericError("test error")
33183 Sep 22 23:15:02.619 ERRO [0] job id 1019 saw error GenericError("test error")
33184 Sep 22 23:15:02.619 ERRO [0] job id 1020 saw error GenericError("test error")
33185 Sep 22 23:15:02.619 ERRO [0] job id 1023 saw error GenericError("test error")
33186 Sep 22 23:15:02.619 ERRO [0] job id 1023 saw error GenericError("test error")
33187 Sep 22 23:15:02.620 DEBG up_ds_listen was notified
33188 Sep 22 23:15:02.620 DEBG up_ds_listen process 1008
33189 Sep 22 23:15:02.620 DEBG [A] ack job 1008:9, : downstairs
33190 Sep 22 23:15:02.620 DEBG up_ds_listen checked 1 jobs, back to waiting
33191 Sep 22 23:15:02.620 ERRO [1] job id 1008 saw error GenericError("test error")
33192 Sep 22 23:15:02.620 DEBG up_ds_listen was notified
33193 Sep 22 23:15:02.620 DEBG up_ds_listen process 1011
33194 Sep 22 23:15:02.620 DEBG [A] ack job 1011:12, : downstairs
33195 Sep 22 23:15:02.620 DEBG up_ds_listen checked 1 jobs, back to waiting
33196 Sep 22 23:15:02.620 ERRO [1] job id 1008 saw error GenericError("test error")
33197 Sep 22 23:15:02.620 ERRO [2] job id 1012 saw error GenericError("test error")
33198 Sep 22 23:15:02.623 DEBG [rc] retire 1008 clears [JobId(1006), JobId(1007), JobId(1008)], : downstairs
33199 Sep 22 23:15:02.623 ERRO [2] job id 1012 saw error GenericError("test error")
33200 Sep 22 23:15:02.623 ERRO [1] job id 1010 saw error GenericError("test error")
33201 Sep 22 23:15:02.623 ERRO [2] job id 1014 saw error GenericError("test error")
33202 Sep 22 23:15:02.626 DEBG [rc] retire 1011 clears [JobId(1009), JobId(1010), JobId(1011)], : downstairs
33203 Sep 22 23:15:02.626 DEBG up_ds_listen was notified
33204 Sep 22 23:15:02.626 DEBG up_ds_listen process 1014
33205 Sep 22 23:15:02.626 DEBG [A] ack job 1014:15, : downstairs
33206 Sep 22 23:15:02.626 DEBG up_ds_listen checked 1 jobs, back to waiting
33207 Sep 22 23:15:02.626 ERRO [0] job id 1030 saw error GenericError("test error")
33208 Sep 22 23:15:02.626 ERRO [1] job id 1012 saw error GenericError("test error")
33209 Sep 22 23:15:02.633 ERRO [2] job id 1016 saw error GenericError("test error")
33210 Sep 22 23:15:02.633 ERRO [2] job id 1016 saw error GenericError("test error")
33211 Sep 22 23:15:02.634 ERRO [0] job id 1032 saw error GenericError("test error")
33212 Sep 22 23:15:02.634 ERRO [1] job id 1014 saw error GenericError("test error")
33213 Sep 22 23:15:02.637 DEBG [rc] retire 1014 clears [JobId(1012), JobId(1013), JobId(1014)], : downstairs
33214 Sep 22 23:15:02.637 DEBG up_ds_listen was notified
33215 Sep 22 23:15:02.637 DEBG up_ds_listen process 1017
33216 Sep 22 23:15:02.637 DEBG [A] ack job 1017:18, : downstairs
33217 Sep 22 23:15:02.637 DEBG up_ds_listen checked 1 jobs, back to waiting
33218 Sep 22 23:15:02.640 DEBG [rc] retire 1017 clears [JobId(1015), JobId(1016), JobId(1017)], : downstairs
33219 Sep 22 23:15:02.640 DEBG up_ds_listen was notified
33220 Sep 22 23:15:02.640 DEBG up_ds_listen process 1020
33221 Sep 22 23:15:02.640 DEBG [A] ack job 1020:21, : downstairs
33222 Sep 22 23:15:02.640 DEBG up_ds_listen checked 1 jobs, back to waiting
33223 Sep 22 23:15:02.640 ERRO [1] job id 1020 saw error GenericError("test error")
33224 Sep 22 23:15:02.640 DEBG up_ds_listen was notified
33225 Sep 22 23:15:02.640 DEBG up_ds_listen process 1023
33226 Sep 22 23:15:02.640 DEBG [A] ack job 1023:24, : downstairs
33227 Sep 22 23:15:02.640 DEBG up_ds_listen checked 1 jobs, back to waiting
33228 Sep 22 23:15:02.640 ERRO [0] job id 1039 saw error GenericError("test error")
33229 Sep 22 23:15:02.643 DEBG [rc] retire 1020 clears [JobId(1018), JobId(1019), JobId(1020)], : downstairs
33230 Sep 22 23:15:02.644 ERRO [1] job id 1022 saw error GenericError("test error")
33231 Sep 22 23:15:02.644 DEBG up_ds_listen was notified
33232 Sep 22 23:15:02.644 DEBG up_ds_listen process 1026
33233 Sep 22 23:15:02.644 DEBG [A] ack job 1026:27, : downstairs
33234 Sep 22 23:15:02.644 DEBG up_ds_listen checked 1 jobs, back to waiting
33235 Sep 22 23:15:02.644 ERRO [2] job id 1027 saw error GenericError("test error")
33236 Sep 22 23:15:02.644 ERRO [1] job id 1023 saw error GenericError("test error")
33237 Sep 22 23:15:02.644 ERRO [2] job id 1028 saw error GenericError("test error")
33238 Sep 22 23:15:02.644 ERRO [1] job id 1023 saw error GenericError("test error")
33239 Sep 22 23:15:02.644 ERRO [0] job id 1044 saw error GenericError("test error")
33240 Sep 22 23:15:02.644 ERRO [2] job id 1028 saw error GenericError("test error")
33241 Sep 22 23:15:02.647 DEBG [rc] retire 1023 clears [JobId(1021), JobId(1022), JobId(1023)], : downstairs
33242 Sep 22 23:15:02.647 ERRO [0] job id 1044 saw error GenericError("test error")
33243 Sep 22 23:15:02.647 DEBG up_ds_listen was notified
33244 Sep 22 23:15:02.647 DEBG up_ds_listen process 1028
33245 Sep 22 23:15:02.647 DEBG [A] ack job 1028:29, : downstairs
33246 Sep 22 23:15:02.647 DEBG up_ds_listen checked 1 jobs, back to waiting
33247 Sep 22 23:15:02.647 ERRO [2] job id 1029 saw error GenericError("test error")
33248 Sep 22 23:15:02.647 ERRO [1] job id 1026 saw error GenericError("test error")
33249 Sep 22 23:15:02.647 ERRO [2] job id 1029 saw error GenericError("test error")
33250 Sep 22 23:15:02.647 ERRO [1] job id 1026 saw error GenericError("test error")
33251 Sep 22 23:15:02.647 ERRO [0] job id 1047 saw error GenericError("test error")
33252 Sep 22 23:15:02.650 DEBG [rc] retire 1026 clears [JobId(1024), JobId(1025), JobId(1026)], : downstairs
33253 Sep 22 23:15:02.650 ERRO [0] job id 1047 saw error GenericError("test error")
33254 Sep 22 23:15:02.651 DEBG up_ds_listen was notified
33255 Sep 22 23:15:02.651 DEBG up_ds_listen process 1031
33256 Sep 22 23:15:02.651 DEBG [A] ack job 1031:32, : downstairs
33257 Sep 22 23:15:02.651 DEBG up_ds_listen checked 1 jobs, back to waiting
33258 Sep 22 23:15:02.652 DEBG [rc] retire 1028 clears [JobId(1027), JobId(1028)], : downstairs
33259 Sep 22 23:15:02.652 ERRO [2] job id 1034 saw error GenericError("test error")
33260 Sep 22 23:15:02.652 ERRO [1] job id 1030 saw error GenericError("test error")
33261 Sep 22 23:15:02.652 ERRO [2] job id 1034 saw error GenericError("test error")
33262 Sep 22 23:15:02.652 ERRO [1] job id 1030 saw error GenericError("test error")
33263 Sep 22 23:15:02.652 DEBG up_ds_listen was notified
33264 Sep 22 23:15:02.652 DEBG up_ds_listen process 1034
33265 Sep 22 23:15:02.652 DEBG [A] ack job 1034:35, : downstairs
33266 Sep 22 23:15:02.652 DEBG up_ds_listen checked 1 jobs, back to waiting
33267 Sep 22 23:15:02.656 DEBG [rc] retire 1031 clears [JobId(1029), JobId(1030), JobId(1031)], : downstairs
33268 Sep 22 23:15:02.656 ERRO [1] job id 1032 saw error GenericError("test error")
33269 Sep 22 23:15:02.656 ERRO [1] job id 1033 saw error GenericError("test error")
33270 Sep 22 23:15:02.656 DEBG up_ds_listen was notified
33271 Sep 22 23:15:02.656 DEBG up_ds_listen process 1037
33272 Sep 22 23:15:02.656 DEBG [A] ack job 1037:38, : downstairs
33273 Sep 22 23:15:02.656 DEBG up_ds_listen checked 1 jobs, back to waiting
33274 Sep 22 23:15:02.656 ERRO [0] job id 1056 saw error GenericError("test error")
33275 Sep 22 23:15:02.659 DEBG [rc] retire 1034 clears [JobId(1032), JobId(1033), JobId(1034)], : downstairs
33276 Sep 22 23:15:02.659 DEBG up_ds_listen was notified
33277 Sep 22 23:15:02.659 DEBG up_ds_listen process 1040
33278 Sep 22 23:15:02.659 DEBG [A] ack job 1040:41, : downstairs
33279 Sep 22 23:15:02.659 DEBG up_ds_listen checked 1 jobs, back to waiting
33280 Sep 22 23:15:02.659 ERRO [0] job id 1058 saw error GenericError("test error")
33281 Sep 22 23:15:02.659 DEBG up_ds_listen was notified
33282 Sep 22 23:15:02.659 DEBG up_ds_listen process 1042
33283 Sep 22 23:15:02.659 DEBG [A] ack job 1042:43, : downstairs
33284 Sep 22 23:15:02.659 DEBG up_ds_listen checked 1 jobs, back to waiting
33285 Sep 22 23:15:02.659 ERRO [2] job id 1043 saw error GenericError("test error")
33286 Sep 22 23:15:02.659 ERRO [1] job id 1037 saw error GenericError("test error")
33287 Sep 22 23:15:02.662 DEBG [rc] retire 1037 clears [JobId(1035), JobId(1036), JobId(1037)], : downstairs
33288 Sep 22 23:15:02.663 ERRO [0] job id 1060 saw error GenericError("test error")
33289 Sep 22 23:15:02.663 ERRO [2] job id 1043 saw error GenericError("test error")
33290 Sep 22 23:15:02.663 ERRO [1] job id 1038 saw error GenericError("test error")
33291 Sep 22 23:15:02.663 ERRO [1] job id 1039 saw error GenericError("test error")
33292 Sep 22 23:15:02.663 ERRO [1] job id 1038 saw error GenericError("test error")
33293 Sep 22 23:15:02.663 DEBG up_ds_listen was notified
33294 Sep 22 23:15:02.663 DEBG up_ds_listen process 1045
33295 Sep 22 23:15:02.663 DEBG [A] ack job 1045:46, : downstairs
33296 Sep 22 23:15:02.663 DEBG up_ds_listen checked 1 jobs, back to waiting
33297 Sep 22 23:15:02.666 DEBG [rc] retire 1040 clears [JobId(1038), JobId(1039), JobId(1040)], : downstairs
33298 Sep 22 23:15:02.666 DEBG up_ds_listen was notified
33299 Sep 22 23:15:02.666 DEBG up_ds_listen process 1048
33300 Sep 22 23:15:02.666 DEBG [A] ack job 1048:49, : downstairs
33301 Sep 22 23:15:02.666 DEBG up_ds_listen checked 1 jobs, back to waiting
33302 Sep 22 23:15:02.667 DEBG [rc] retire 1042 clears [JobId(1041), JobId(1042)], : downstairs
33303 Sep 22 23:15:02.667 ERRO [1] job id 1044 saw error GenericError("test error")
33304 Sep 22 23:15:02.667 DEBG up_ds_listen was notified
33305 Sep 22 23:15:02.667 DEBG up_ds_listen process 1051
33306 Sep 22 23:15:02.667 DEBG [A] ack job 1051:52, : downstairs
33307 Sep 22 23:15:02.667 DEBG up_ds_listen checked 1 jobs, back to waiting
33308 Sep 22 23:15:02.667 ERRO [2] job id 1052 saw error GenericError("test error")
33309 Sep 22 23:15:02.668 ERRO [2] job id 1052 saw error GenericError("test error")
33310 Sep 22 23:15:02.670 DEBG [rc] retire 1045 clears [JobId(1043), JobId(1044), JobId(1045)], : downstairs
33311 Sep 22 23:15:02.670 ERRO [2] job id 1052 saw error GenericError("test error")
33312 Sep 22 23:15:02.670 ERRO [2] job id 1054 saw error GenericError("test error")
33313 Sep 22 23:15:02.670 ERRO [1] job id 1048 saw error GenericError("test error")
33314 Sep 22 23:15:02.671 ERRO [2] job id 1054 saw error GenericError("test error")
33315 Sep 22 23:15:02.673 DEBG [rc] retire 1048 clears [JobId(1046), JobId(1047), JobId(1048)], : downstairs
33316 Sep 22 23:15:02.673 ERRO [2] job id 1054 saw error GenericError("test error")
33317 Sep 22 23:15:02.673 ERRO [1] job id 1050 saw error GenericError("test error")
33318 Sep 22 23:15:02.674 DEBG up_ds_listen was notified
33319 Sep 22 23:15:02.674 DEBG up_ds_listen process 1054
33320 Sep 22 23:15:02.674 DEBG [A] ack job 1054:55, : downstairs
33321 Sep 22 23:15:02.674 DEBG up_ds_listen checked 1 jobs, back to waiting
33322 Sep 22 23:15:02.674 ERRO [1] job id 1050 saw error GenericError("test error")
33323 Sep 22 23:15:02.677 DEBG [rc] retire 1051 clears [JobId(1049), JobId(1050), JobId(1051)], : downstairs
33324 Sep 22 23:15:02.677 DEBG up_ds_listen was notified
33325 Sep 22 23:15:02.677 DEBG up_ds_listen process 1057
33326 Sep 22 23:15:02.677 DEBG [A] ack job 1057:58, : downstairs
33327 Sep 22 23:15:02.677 DEBG up_ds_listen checked 1 jobs, back to waiting
33328 Sep 22 23:15:02.677 ERRO [1] job id 1052 saw error GenericError("test error")
33329 Sep 22 23:15:02.677 DEBG up_ds_listen was notified
33330 Sep 22 23:15:02.677 DEBG up_ds_listen process 1060
33331 Sep 22 23:15:02.677 DEBG [A] ack job 1060:61, : downstairs
33332 Sep 22 23:15:02.677 DEBG up_ds_listen checked 1 jobs, back to waiting
33333 Sep 22 23:15:02.680 DEBG [rc] retire 1054 clears [JobId(1052), JobId(1053), JobId(1054)], : downstairs
33334 Sep 22 23:15:02.680 ERRO [1] job id 1056 saw error GenericError("test error")
33335 Sep 22 23:15:02.680 DEBG up_ds_listen was notified
33336 Sep 22 23:15:02.680 DEBG up_ds_listen process 1063
33337 Sep 22 23:15:02.680 DEBG [A] ack job 1063:64, : downstairs
33338 Sep 22 23:15:02.680 DEBG up_ds_listen checked 1 jobs, back to waiting
33339 Sep 22 23:15:02.680 ERRO [1] job id 1056 saw error GenericError("test error")
33340 Sep 22 23:15:02.680 ERRO [1] job id 1056 saw error GenericError("test error")
33341 Sep 22 23:15:02.683 DEBG [rc] retire 1057 clears [JobId(1055), JobId(1056), JobId(1057)], : downstairs
33342 Sep 22 23:15:02.683 ERRO [2] job id 1068 saw error GenericError("test error")
33343 Sep 22 23:15:02.683 ERRO [1] job id 1058 saw error GenericError("test error")
33344 Sep 22 23:15:02.683 ERRO [1] job id 1058 saw error GenericError("test error")
33345 Sep 22 23:15:02.683 ERRO [2] job id 1069 saw error GenericError("test error")
33346 Sep 22 23:15:02.683 ERRO [1] job id 1059 saw error GenericError("test error")
33347 Sep 22 23:15:02.683 ERRO [2] job id 1069 saw error GenericError("test error")
33348 Sep 22 23:15:02.683 ERRO [1] job id 1059 saw error GenericError("test error")
33349 Sep 22 23:15:02.686 DEBG [rc] retire 1060 clears [JobId(1058), JobId(1059), JobId(1060)], : downstairs
33350 Sep 22 23:15:02.689 DEBG [rc] retire 1063 clears [JobId(1061), JobId(1062), JobId(1063)], : downstairs
33351 Sep 22 23:15:02.689 DEBG up_ds_listen was notified
33352 Sep 22 23:15:02.689 DEBG up_ds_listen process 1066
33353 Sep 22 23:15:02.689 DEBG [A] ack job 1066:67, : downstairs
33354 Sep 22 23:15:02.689 DEBG up_ds_listen checked 1 jobs, back to waiting
33355 Sep 22 23:15:02.717 DEBG Write :1000 deps:[] res:true
33356 Sep 22 23:15:02.721 DEBG Write :1064 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33357 Sep 22 23:15:02.723 WARN returning error on write!
33358 Sep 22 23:15:02.723 DEBG Write :1065 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33359 Sep 22 23:15:02.724 INFO [lossy] skipping 1070
33360 Sep 22 23:15:02.724 INFO [lossy] skipping 1072
33361 Sep 22 23:15:02.754 DEBG Write :1065 deps:[JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33362 Sep 22 23:15:02.756 WARN returning error on flush!
33363 Sep 22 23:15:02.756 DEBG Flush :1066 extent_limit None deps:[JobId(1065), JobId(1064), JobId(1063)] res:false f:23 g:1
33364 Sep 22 23:15:02.756 INFO [lossy] skipping 1067
33365 Sep 22 23:15:02.756 INFO [lossy] skipping 1070
33366 Sep 22 23:15:02.762 DEBG Flush :1066 extent_limit None deps:[JobId(1065), JobId(1064), JobId(1063)] res:true f:23 g:1
33367 Sep 22 23:15:02.793 DEBG Write :1067 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33368 Sep 22 23:15:02.794 WARN 1070 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33369 Sep 22 23:15:02.825 DEBG Write :1068 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33370 Sep 22 23:15:02.833 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:true f:24 g:1
33371 Sep 22 23:15:02.834 WARN returning error on write!
33372 Sep 22 23:15:02.834 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33373 Sep 22 23:15:02.834 WARN 1071 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
33374 Sep 22 23:15:02.834 INFO [lossy] skipping 1072
33375 Sep 22 23:15:02.834 INFO [lossy] skipping 1073
33376 Sep 22 23:15:02.835 WARN returning error on write!
33377 Sep 22 23:15:02.835 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33378 Sep 22 23:15:02.865 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33379 Sep 22 23:15:02.867 INFO [lossy] sleeping 1 second
33380 Sep 22 23:15:02.867 ERRO [0] job id 1065 saw error GenericError("test error")
33381 Sep 22 23:15:02.867 ERRO [0] job id 1066 saw error GenericError("test error")
33382 Sep 22 23:15:02.870 DEBG [rc] retire 1066 clears [JobId(1064), JobId(1065), JobId(1066)], : downstairs
33383 Sep 22 23:15:02.870 ERRO [0] job id 1070 saw error GenericError("test error")
33384 Sep 22 23:15:02.870 ERRO [0] job id 1070 saw error GenericError("test error")
33385 Sep 22 23:15:02.870 DEBG up_ds_listen was notified
33386 Sep 22 23:15:02.870 DEBG up_ds_listen process 1069
33387 Sep 22 23:15:02.870 DEBG [A] ack job 1069:70, : downstairs
33388 Sep 22 23:15:02.870 DEBG up_ds_listen checked 1 jobs, back to waiting
33389 Sep 22 23:15:03.073 WARN returning error on write!
33390 Sep 22 23:15:03.073 DEBG Write :1068 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
33391 Sep 22 23:15:03.073 INFO [lossy] skipping 1069
33392 Sep 22 23:15:03.073 INFO [lossy] skipping 1073
33393 Sep 22 23:15:03.104 DEBG Write :1068 deps:[JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33394 Sep 22 23:15:03.105 WARN returning error on flush!
33395 Sep 22 23:15:03.105 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:false f:24 g:1
33396 Sep 22 23:15:03.105 WARN returning error on flush!
33397 Sep 22 23:15:03.105 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:false f:24 g:1
33398 Sep 22 23:15:03.105 INFO [lossy] skipping 1069
33399 Sep 22 23:15:03.112 DEBG Flush :1069 extent_limit None deps:[JobId(1068), JobId(1067), JobId(1066)] res:true f:24 g:1
33400 Sep 22 23:15:03.112 INFO [lossy] sleeping 1 second
33401 Sep 22 23:15:03.112 ERRO [1] job id 1068 saw error GenericError("test error")
33402 Sep 22 23:15:03.112 ERRO [1] job id 1069 saw error GenericError("test error")
33403 Sep 22 23:15:03.112 ERRO [1] job id 1069 saw error GenericError("test error")
33404 Sep 22 23:15:03.115 DEBG [rc] retire 1069 clears [JobId(1067), JobId(1068), JobId(1069)], : downstairs
33405 Sep 22 23:15:03.159 DEBG Write :1000 deps:[] res:true
33406 Sep 22 23:15:03.453 WARN 6e5c0f70-fd56-4280-9d20-71288e488216 request to replace downstairs 127.0.0.1:42762 with 127.0.0.1:50216
33407 Sep 22 23:15:03.453 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 found new target: 127.0.0.1:50216 at 0
33408 Waited for some repair work, proceeding with test
33409 Sep 22 23:15:03.495 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
33410 Sep 22 23:15:03.496 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
33411 Sep 22 23:15:03.500 DEBG Flush :1071 extent_limit None deps:[JobId(1070), JobId(1069)] res:true f:25 g:1
33412 Sep 22 23:15:03.500 INFO [lossy] skipping 1072
33413 Sep 22 23:15:03.507 DEBG Read :1072 deps:[JobId(1071)] res:true
33414 Sep 22 23:15:03.530 INFO [lossy] sleeping 1 second
33415 Sep 22 23:15:03.537 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:1 g:1
33416 Sep 22 23:15:03.537 WARN [0] will exit pm_task, this downstairs Replacing
33417 Sep 22 23:15:03.538 ERRO 127.0.0.1:42762: proc: [0] client work task ended, Ok(Err([0] This downstairs now in Replacing)), so we end too, looper: 0
33418 Sep 22 23:15:03.538 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 Gone missing, transition from Replacing to Replaced
33419 Sep 22 23:15:03.538 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 connection to 127.0.0.1:42762 closed, looper: 0
33420 Sep 22 23:15:03.538 INFO [0] 127.0.0.1:42762 task reports connection:false
33421 Sep 22 23:15:03.538 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Replaced Active Active
33422 Sep 22 23:15:03.538 INFO [0] 127.0.0.1:42762 task reports offline
33423 Sep 22 23:15:03.538 INFO Upstairs starts
33424 Sep 22 23:15:03.538 INFO Crucible Version: BuildInfo {
33425 version: "0.0.1",
33426 git_sha: "ed48f294784d46ea7d4bb99336918b74358eca46",
33427 git_commit_timestamp: "2023-09-22T22:51:18.000000000Z",
33428 git_branch: "main",
33429 rustc_semver: "1.70.0",
33430 rustc_channel: "stable",
33431 rustc_host_triple: "x86_64-unknown-illumos",
33432 rustc_commit_sha: "90c541806f23a127002de5b4038be731ba1458ca",
33433 cargo_triple: "x86_64-unknown-illumos",
33434 debug: true,
33435 opt_level: 0,
33436 }
33437 Sep 22 23:15:03.538 INFO Upstairs <-> Downstairs Message Version: 4
33438 Sep 22 23:15:03.538 INFO Crucible stats registered with UUID: 6e5c0f70-fd56-4280-9d20-71288e488216
33439 Sep 22 23:15:03.538 INFO Crucible 6e5c0f70-fd56-4280-9d20-71288e488216 has session id: bc593f63-fb7c-4043-a1b6-d037ee9b50f2
33440 Sep 22 23:15:03.538 WARN upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } disconnected, 0 jobs left, task: main
33441 Sep 22 23:15:03.538 WARN upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } was previously active, clearing, task: main
33442 Sep 22 23:15:03.538 INFO connection (127.0.0.1:49772): all done
33443 Sep 22 23:15:03.538 INFO [0] connecting to 127.0.0.1:50216, looper: 0
33444 Sep 22 23:15:03.539 INFO [1] connecting to 127.0.0.1:64149, looper: 1
33445 Sep 22 23:15:03.539 INFO [2] connecting to 127.0.0.1:58182, looper: 2
33446 Sep 22 23:15:03.539 INFO up_listen starts, task: up_listen
33447 Sep 22 23:15:03.539 INFO Wait for all three downstairs to come online
33448 Sep 22 23:15:03.539 INFO Flush timeout: 0.5
33449 Sep 22 23:15:03.539 DEBG up_ds_listen was notified
33450 Sep 22 23:15:03.539 DEBG up_ds_listen process 1001
33451 Sep 22 23:15:03.539 DEBG [A] ack job 1001:2, : downstairs
33452 Sep 22 23:15:03.609 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
33453 Sep 22 23:15:03.610 DEBG up_ds_listen checked 1 jobs, back to waiting
33454 Sep 22 23:15:03.610 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 0
33455 Sep 22 23:15:03.610 INFO [0] Proc runs for 127.0.0.1:50216 in state New
33456 Sep 22 23:15:03.610 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 1
33457 Sep 22 23:15:03.610 INFO [1] Proc runs for 127.0.0.1:64149 in state New
33458 Sep 22 23:15:03.610 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 2
33459 Sep 22 23:15:03.610 INFO [2] Proc runs for 127.0.0.1:58182 in state New
33460 Sep 22 23:15:03.610 INFO accepted connection from 127.0.0.1:33442, task: main
33461 Sep 22 23:15:03.610 INFO accepted connection from 127.0.0.1:39895, task: main
33462 Sep 22 23:15:03.610 INFO accepted connection from 127.0.0.1:37976, task: main
33463 Sep 22 23:15:03.611 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
33464 Sep 22 23:15:03.611 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } connected, version 4, task: proc
33465 Sep 22 23:15:03.611 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
33466 Sep 22 23:15:03.611 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } connected, version 4, task: proc
33467 Sep 22 23:15:03.611 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
33468 Sep 22 23:15:03.611 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } connected, version 4, task: proc
33469 The guest has requested activation
33470 Sep 22 23:15:03.611 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) New New New ds_transition to WaitActive
33471 Sep 22 23:15:03.611 INFO [0] Transition from New to WaitActive
33472 Sep 22 23:15:03.611 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) WaitActive New New ds_transition to WaitActive
33473 Sep 22 23:15:03.611 INFO [1] Transition from New to WaitActive
33474 Sep 22 23:15:03.611 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) WaitActive WaitActive New ds_transition to WaitActive
33475 Sep 22 23:15:03.611 INFO [2] Transition from New to WaitActive
33476 Sep 22 23:15:03.611 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 active request set
33477 Sep 22 23:15:03.611 INFO [0] received activate with gen 2
33478 Sep 22 23:15:03.611 INFO [0] client got ds_active_rx, promote! session 2cf90053-4dbd-4cc4-8468-9d35f085e47a
33479 Sep 22 23:15:03.611 INFO [1] received activate with gen 2
33480 Sep 22 23:15:03.611 INFO [1] client got ds_active_rx, promote! session 2cf90053-4dbd-4cc4-8468-9d35f085e47a
33481 Sep 22 23:15:03.611 INFO [2] received activate with gen 2
33482 Sep 22 23:15:03.611 INFO [2] client got ds_active_rx, promote! session 2cf90053-4dbd-4cc4-8468-9d35f085e47a
33483 Sep 22 23:15:03.612 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } to UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 }
33484 Sep 22 23:15:03.612 WARN Signaling to UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } thread that UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } is being promoted (read-write)
33485 Sep 22 23:15:03.612 WARN Attempting RW takeover from UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } to UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 }
33486 Sep 22 23:15:03.612 WARN Signaling to UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } thread that UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } is being promoted (read-write)
33487 Sep 22 23:15:03.612 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } is now active (read-write)
33488 Sep 22 23:15:03.612 WARN Another upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 }, task: main
33489 Sep 22 23:15:03.612 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } is now active (read-write)
33490 Sep 22 23:15:03.612 WARN Another upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } promoted to active, shutting down connection for UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 }, task: main
33491 Sep 22 23:15:03.612 INFO UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 } is now active (read-write)
33492 Sep 22 23:15:03.612 INFO connection (127.0.0.1:48323): all done
33493 Sep 22 23:15:03.612 INFO connection (127.0.0.1:59274): all done
33494 Sep 22 23:15:03.613 ERRO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) cmd_loop saw YouAreNoLongerActive 6e5c0f70-fd56-4280-9d20-71288e488216 2cf90053-4dbd-4cc4-8468-9d35f085e47a 2
33495 Sep 22 23:15:03.613 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Replaced Active Active ds_transition to Disabled
33496 Sep 22 23:15:03.613 INFO [1] Transition from Active to Disabled
33497 Sep 22 23:15:03.613 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 set inactive, session f334c95e-b851-4a8a-a731-3fb69e42e934
33498 Sep 22 23:15:03.613 ERRO 127.0.0.1:64149: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 1
33499 Sep 22 23:15:03.613 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 Gone missing, transition from Disabled to Disconnected
33500 Sep 22 23:15:03.613 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 connection to 127.0.0.1:64149 closed, looper: 1
33501 Sep 22 23:15:03.613 ERRO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) cmd_loop saw YouAreNoLongerActive 6e5c0f70-fd56-4280-9d20-71288e488216 2cf90053-4dbd-4cc4-8468-9d35f085e47a 2
33502 Sep 22 23:15:03.613 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Replaced Disconnected Active ds_transition to Disabled
33503 Sep 22 23:15:03.613 INFO [2] Transition from Active to Disabled
33504 Sep 22 23:15:03.613 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 set inactive, session f334c95e-b851-4a8a-a731-3fb69e42e934
33505 Sep 22 23:15:03.613 ERRO 127.0.0.1:58182: proc: Generation number is too low: saw YouAreNoLongerActive with larger gen 2 than ours 1, looper: 2
33506 Sep 22 23:15:03.613 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 Gone missing, transition from Disabled to Disconnected
33507 Sep 22 23:15:03.613 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 connection to 127.0.0.1:58182 closed, looper: 2
33508 Sep 22 23:15:03.613 WARN [1] pm_task rx.recv() is None
33509 Sep 22 23:15:03.613 INFO [1] 127.0.0.1:64149 task reports connection:false
33510 Sep 22 23:15:03.613 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Replaced Disconnected Disconnected
33511 Sep 22 23:15:03.613 INFO [1] 127.0.0.1:64149 task reports offline
33512 Sep 22 23:15:03.613 INFO [2] 127.0.0.1:58182 task reports connection:false
33513 Sep 22 23:15:03.613 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Replaced Disconnected Disconnected
33514 Sep 22 23:15:03.613 INFO [2] 127.0.0.1:58182 task reports offline
33515 Sep 22 23:15:03.613 WARN [2] pm_task rx.recv() is None
33516 Sep 22 23:15:03.614 INFO [0] downstairs client at 127.0.0.1:50216 has UUID 28f50308-941f-4d7f-bd0a-39a00206f2e7
33517 Sep 22 23:15:03.614 INFO [0] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 28f50308-941f-4d7f-bd0a-39a00206f2e7, encrypted: true, database_read_version: 1, database_write_version: 1 }
33518 Sep 22 23:15:03.614 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
33519 Sep 22 23:15:03.614 INFO [1] downstairs client at 127.0.0.1:64149 has UUID df366b3b-ce8f-4a81-b993-9efbc4225b81
33520 Sep 22 23:15:03.614 INFO [1] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: df366b3b-ce8f-4a81-b993-9efbc4225b81, encrypted: true, database_read_version: 1, database_write_version: 1 }
33521 Sep 22 23:15:03.614 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
33522 Sep 22 23:15:03.614 INFO [2] downstairs client at 127.0.0.1:58182 has UUID 1361b8e1-8898-44ce-892b-e72b28f2a9df
33523 Sep 22 23:15:03.614 INFO [2] Got region def RegionDefinition { block_size: 512, extent_size: Block { value: 512, shift: 9 }, extent_count: 188, uuid: 1361b8e1-8898-44ce-892b-e72b28f2a9df, encrypted: true, database_read_version: 1, database_write_version: 1 }
33524 Sep 22 23:15:03.614 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitActive WaitActive WaitActive
33525 Sep 22 23:15:03.623 INFO Current flush_numbers [0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33526 Sep 22 23:15:03.625 INFO Current flush_numbers [0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33527 Sep 22 23:15:03.626 INFO Current flush_numbers [0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33528 Sep 22 23:15:03.639 INFO Downstairs has completed Negotiation, task: proc
33529 Sep 22 23:15:03.640 INFO Downstairs has completed Negotiation, task: proc
33530 Sep 22 23:15:03.641 INFO Downstairs has completed Negotiation, task: proc
33531 Sep 22 23:15:03.641 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) WaitActive WaitActive WaitActive ds_transition to WaitQuorum
33532 Sep 22 23:15:03.641 INFO [0] Transition from WaitActive to WaitQuorum
33533 Sep 22 23:15:03.641 WARN [0] new RM replaced this: None
33534 Sep 22 23:15:03.641 INFO [0] Starts reconcile loop
33535 Sep 22 23:15:03.641 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) WaitQuorum WaitActive WaitActive ds_transition to WaitQuorum
33536 Sep 22 23:15:03.641 INFO [1] Transition from WaitActive to WaitQuorum
33537 Sep 22 23:15:03.641 WARN [1] new RM replaced this: None
33538 Sep 22 23:15:03.641 INFO [1] Starts reconcile loop
33539 Sep 22 23:15:03.642 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (2cf90053-4dbd-4cc4-8468-9d35f085e47a) WaitQuorum WaitQuorum WaitActive ds_transition to WaitQuorum
33540 Sep 22 23:15:03.642 INFO [2] Transition from WaitActive to WaitQuorum
33541 Sep 22 23:15:03.642 WARN [2] new RM replaced this: None
33542 Sep 22 23:15:03.642 INFO [2] Starts reconcile loop
33543 Sep 22 23:15:03.642 INFO [0] 127.0.0.1:50216 task reports connection:true
33544 Sep 22 23:15:03.642 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 WaitQuorum WaitQuorum WaitQuorum
33545 Sep 22 23:15:03.642 INFO [0]R flush_numbers[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33546 Sep 22 23:15:03.642 INFO [0]R generation[0..12]: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
33547 Sep 22 23:15:03.642 INFO [0]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33548 Sep 22 23:15:03.642 INFO [1]R flush_numbers[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33549 Sep 22 23:15:03.642 INFO [1]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33550 Sep 22 23:15:03.642 INFO [1]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33551 Sep 22 23:15:03.642 INFO [2]R flush_numbers[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33552 Sep 22 23:15:03.642 INFO [2]R generation[0..12]: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
33553 Sep 22 23:15:03.642 INFO [2]R dirty[0..12]: [false, false, false, false, false, false, false, false, false, false, false, false]
33554 Sep 22 23:15:03.642 INFO Max found gen is 2
33555 Sep 22 23:15:03.642 INFO Generation requested: 2 >= found:2
33556 Sep 22 23:15:03.642 INFO Next flush: 2
33557 Sep 22 23:15:03.642 INFO Extent 0 has flush number mismatch, : mend
33558 Sep 22 23:15:03.642 INFO First source client ID for extent 0, mrl: flush_mismatch, : mend
33559 Sep 22 23:15:03.642 INFO extent:0 gens: 0 1 1, mrl: flush_mismatch, : mend
33560 Sep 22 23:15:03.642 INFO extent:0 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33561 Sep 22 23:15:03.642 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33562 Sep 22 23:15:03.642 INFO extent:0 dirty: false false false, mrl: flush_mismatch, : mend
33563 Sep 22 23:15:03.642 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33564 Sep 22 23:15:03.642 INFO find dest for source 1 for extent at index 0, mrl: flush_mismatch, : mend
33565 Sep 22 23:15:03.642 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33566 Sep 22 23:15:03.642 INFO Extent 1 has flush number mismatch, : mend
33567 Sep 22 23:15:03.642 INFO First source client ID for extent 1, mrl: flush_mismatch, : mend
33568 Sep 22 23:15:03.642 INFO extent:1 gens: 0 1 1, mrl: flush_mismatch, : mend
33569 Sep 22 23:15:03.642 INFO extent:1 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33570 Sep 22 23:15:03.642 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33571 Sep 22 23:15:03.642 INFO extent:1 dirty: false false false, mrl: flush_mismatch, : mend
33572 Sep 22 23:15:03.642 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33573 Sep 22 23:15:03.642 INFO find dest for source 1 for extent at index 1, mrl: flush_mismatch, : mend
33574 Sep 22 23:15:03.642 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33575 Sep 22 23:15:03.642 INFO Extent 2 has flush number mismatch, : mend
33576 Sep 22 23:15:03.642 INFO First source client ID for extent 2, mrl: flush_mismatch, : mend
33577 Sep 22 23:15:03.642 INFO extent:2 gens: 0 1 1, mrl: flush_mismatch, : mend
33578 Sep 22 23:15:03.642 INFO extent:2 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33579 Sep 22 23:15:03.642 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33580 Sep 22 23:15:03.642 INFO extent:2 dirty: false false false, mrl: flush_mismatch, : mend
33581 Sep 22 23:15:03.642 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33582 Sep 22 23:15:03.642 INFO find dest for source 1 for extent at index 2, mrl: flush_mismatch, : mend
33583 Sep 22 23:15:03.642 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33584 Sep 22 23:15:03.642 INFO Extent 3 has flush number mismatch, : mend
33585 Sep 22 23:15:03.642 INFO First source client ID for extent 3, mrl: flush_mismatch, : mend
33586 Sep 22 23:15:03.642 INFO extent:3 gens: 0 1 1, mrl: flush_mismatch, : mend
33587 Sep 22 23:15:03.642 INFO extent:3 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33588 Sep 22 23:15:03.642 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33589 Sep 22 23:15:03.642 INFO extent:3 dirty: false false false, mrl: flush_mismatch, : mend
33590 Sep 22 23:15:03.642 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33591 Sep 22 23:15:03.642 INFO find dest for source 1 for extent at index 3, mrl: flush_mismatch, : mend
33592 Sep 22 23:15:03.642 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33593 Sep 22 23:15:03.642 INFO Extent 4 has flush number mismatch, : mend
33594 Sep 22 23:15:03.643 INFO First source client ID for extent 4, mrl: flush_mismatch, : mend
33595 Sep 22 23:15:03.643 INFO extent:4 gens: 0 1 1, mrl: flush_mismatch, : mend
33596 Sep 22 23:15:03.643 INFO extent:4 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33597 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33598 Sep 22 23:15:03.643 INFO extent:4 dirty: false false false, mrl: flush_mismatch, : mend
33599 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33600 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 4, mrl: flush_mismatch, : mend
33601 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33602 Sep 22 23:15:03.643 INFO Extent 5 has flush number mismatch, : mend
33603 Sep 22 23:15:03.643 INFO First source client ID for extent 5, mrl: flush_mismatch, : mend
33604 Sep 22 23:15:03.643 INFO extent:5 gens: 0 1 1, mrl: flush_mismatch, : mend
33605 Sep 22 23:15:03.643 INFO extent:5 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33606 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33607 Sep 22 23:15:03.643 INFO extent:5 dirty: false false false, mrl: flush_mismatch, : mend
33608 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33609 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 5, mrl: flush_mismatch, : mend
33610 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33611 Sep 22 23:15:03.643 INFO Extent 6 has flush number mismatch, : mend
33612 Sep 22 23:15:03.643 INFO First source client ID for extent 6, mrl: flush_mismatch, : mend
33613 Sep 22 23:15:03.643 INFO extent:6 gens: 0 1 1, mrl: flush_mismatch, : mend
33614 Sep 22 23:15:03.643 INFO extent:6 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33615 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33616 Sep 22 23:15:03.643 INFO extent:6 dirty: false false false, mrl: flush_mismatch, : mend
33617 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33618 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 6, mrl: flush_mismatch, : mend
33619 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33620 Sep 22 23:15:03.643 INFO Extent 7 has flush number mismatch, : mend
33621 Sep 22 23:15:03.643 INFO First source client ID for extent 7, mrl: flush_mismatch, : mend
33622 Sep 22 23:15:03.643 INFO extent:7 gens: 0 1 1, mrl: flush_mismatch, : mend
33623 Sep 22 23:15:03.643 INFO extent:7 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33624 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33625 Sep 22 23:15:03.643 INFO extent:7 dirty: false false false, mrl: flush_mismatch, : mend
33626 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33627 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 7, mrl: flush_mismatch, : mend
33628 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33629 Sep 22 23:15:03.643 INFO Extent 8 has flush number mismatch, : mend
33630 Sep 22 23:15:03.643 INFO First source client ID for extent 8, mrl: flush_mismatch, : mend
33631 Sep 22 23:15:03.643 INFO extent:8 gens: 0 1 1, mrl: flush_mismatch, : mend
33632 Sep 22 23:15:03.643 INFO extent:8 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33633 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33634 Sep 22 23:15:03.643 INFO extent:8 dirty: false false false, mrl: flush_mismatch, : mend
33635 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33636 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 8, mrl: flush_mismatch, : mend
33637 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33638 Sep 22 23:15:03.643 INFO Extent 9 has flush number mismatch, : mend
33639 Sep 22 23:15:03.643 INFO First source client ID for extent 9, mrl: flush_mismatch, : mend
33640 Sep 22 23:15:03.643 INFO extent:9 gens: 0 1 1, mrl: flush_mismatch, : mend
33641 Sep 22 23:15:03.643 INFO extent:9 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33642 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33643 Sep 22 23:15:03.643 INFO extent:9 dirty: false false false, mrl: flush_mismatch, : mend
33644 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33645 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 9, mrl: flush_mismatch, : mend
33646 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33647 Sep 22 23:15:03.643 INFO Extent 10 has flush number mismatch, : mend
33648 Sep 22 23:15:03.643 INFO First source client ID for extent 10, mrl: flush_mismatch, : mend
33649 Sep 22 23:15:03.643 INFO extent:10 gens: 0 1 1, mrl: flush_mismatch, : mend
33650 Sep 22 23:15:03.643 INFO extent:10 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33651 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33652 Sep 22 23:15:03.643 INFO extent:10 dirty: false false false, mrl: flush_mismatch, : mend
33653 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33654 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 10, mrl: flush_mismatch, : mend
33655 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33656 Sep 22 23:15:03.643 INFO Extent 11 has flush number mismatch, : mend
33657 Sep 22 23:15:03.643 INFO First source client ID for extent 11, mrl: flush_mismatch, : mend
33658 Sep 22 23:15:03.643 INFO extent:11 gens: 0 1 1, mrl: flush_mismatch, : mend
33659 Sep 22 23:15:03.643 INFO extent:11 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33660 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33661 Sep 22 23:15:03.643 INFO extent:11 dirty: false false false, mrl: flush_mismatch, : mend
33662 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33663 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 11, mrl: flush_mismatch, : mend
33664 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33665 Sep 22 23:15:03.643 INFO Extent 12 has flush number mismatch, : mend
33666 Sep 22 23:15:03.643 INFO First source client ID for extent 12, mrl: flush_mismatch, : mend
33667 Sep 22 23:15:03.643 INFO extent:12 gens: 0 1 1, mrl: flush_mismatch, : mend
33668 Sep 22 23:15:03.643 INFO extent:12 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33669 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33670 Sep 22 23:15:03.643 INFO extent:12 dirty: false false false, mrl: flush_mismatch, : mend
33671 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33672 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 12, mrl: flush_mismatch, : mend
33673 Sep 22 23:15:03.643 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33674 Sep 22 23:15:03.643 INFO Extent 13 has flush number mismatch, : mend
33675 Sep 22 23:15:03.643 INFO First source client ID for extent 13, mrl: flush_mismatch, : mend
33676 Sep 22 23:15:03.643 INFO extent:13 gens: 0 1 1, mrl: flush_mismatch, : mend
33677 Sep 22 23:15:03.643 INFO extent:13 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33678 Sep 22 23:15:03.643 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33679 Sep 22 23:15:03.643 INFO extent:13 dirty: false false false, mrl: flush_mismatch, : mend
33680 Sep 22 23:15:03.643 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33681 Sep 22 23:15:03.643 INFO find dest for source 1 for extent at index 13, mrl: flush_mismatch, : mend
33682 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33683 Sep 22 23:15:03.644 INFO Extent 14 has flush number mismatch, : mend
33684 Sep 22 23:15:03.644 INFO First source client ID for extent 14, mrl: flush_mismatch, : mend
33685 Sep 22 23:15:03.644 INFO extent:14 gens: 0 1 1, mrl: flush_mismatch, : mend
33686 Sep 22 23:15:03.644 INFO extent:14 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33687 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33688 Sep 22 23:15:03.644 INFO extent:14 dirty: false false false, mrl: flush_mismatch, : mend
33689 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33690 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 14, mrl: flush_mismatch, : mend
33691 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33692 Sep 22 23:15:03.644 INFO Extent 15 has flush number mismatch, : mend
33693 Sep 22 23:15:03.644 INFO First source client ID for extent 15, mrl: flush_mismatch, : mend
33694 Sep 22 23:15:03.644 INFO extent:15 gens: 0 1 1, mrl: flush_mismatch, : mend
33695 Sep 22 23:15:03.644 INFO extent:15 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33696 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33697 Sep 22 23:15:03.644 INFO extent:15 dirty: false false false, mrl: flush_mismatch, : mend
33698 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33699 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 15, mrl: flush_mismatch, : mend
33700 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33701 Sep 22 23:15:03.644 INFO Extent 16 has flush number mismatch, : mend
33702 Sep 22 23:15:03.644 INFO First source client ID for extent 16, mrl: flush_mismatch, : mend
33703 Sep 22 23:15:03.644 INFO extent:16 gens: 0 1 1, mrl: flush_mismatch, : mend
33704 Sep 22 23:15:03.644 INFO extent:16 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33705 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33706 Sep 22 23:15:03.644 INFO extent:16 dirty: false false false, mrl: flush_mismatch, : mend
33707 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33708 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 16, mrl: flush_mismatch, : mend
33709 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33710 Sep 22 23:15:03.644 INFO Extent 17 has flush number mismatch, : mend
33711 Sep 22 23:15:03.644 INFO First source client ID for extent 17, mrl: flush_mismatch, : mend
33712 Sep 22 23:15:03.644 INFO extent:17 gens: 0 1 1, mrl: flush_mismatch, : mend
33713 Sep 22 23:15:03.644 INFO extent:17 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33714 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33715 Sep 22 23:15:03.644 INFO extent:17 dirty: false false false, mrl: flush_mismatch, : mend
33716 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33717 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 17, mrl: flush_mismatch, : mend
33718 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33719 Sep 22 23:15:03.644 INFO Extent 18 has flush number mismatch, : mend
33720 Sep 22 23:15:03.644 INFO First source client ID for extent 18, mrl: flush_mismatch, : mend
33721 Sep 22 23:15:03.644 INFO extent:18 gens: 0 1 1, mrl: flush_mismatch, : mend
33722 Sep 22 23:15:03.644 INFO extent:18 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33723 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33724 Sep 22 23:15:03.644 INFO extent:18 dirty: false false false, mrl: flush_mismatch, : mend
33725 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33726 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 18, mrl: flush_mismatch, : mend
33727 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33728 Sep 22 23:15:03.644 INFO Extent 19 has flush number mismatch, : mend
33729 Sep 22 23:15:03.644 INFO First source client ID for extent 19, mrl: flush_mismatch, : mend
33730 Sep 22 23:15:03.644 INFO extent:19 gens: 0 1 1, mrl: flush_mismatch, : mend
33731 Sep 22 23:15:03.644 INFO extent:19 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33732 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33733 Sep 22 23:15:03.644 INFO extent:19 dirty: false false false, mrl: flush_mismatch, : mend
33734 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33735 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 19, mrl: flush_mismatch, : mend
33736 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33737 Sep 22 23:15:03.644 INFO Extent 20 has flush number mismatch, : mend
33738 Sep 22 23:15:03.644 INFO First source client ID for extent 20, mrl: flush_mismatch, : mend
33739 Sep 22 23:15:03.644 INFO extent:20 gens: 0 1 1, mrl: flush_mismatch, : mend
33740 Sep 22 23:15:03.644 INFO extent:20 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33741 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33742 Sep 22 23:15:03.644 INFO extent:20 dirty: false false false, mrl: flush_mismatch, : mend
33743 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33744 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 20, mrl: flush_mismatch, : mend
33745 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33746 Sep 22 23:15:03.644 INFO Extent 21 has flush number mismatch, : mend
33747 Sep 22 23:15:03.644 INFO First source client ID for extent 21, mrl: flush_mismatch, : mend
33748 Sep 22 23:15:03.644 INFO extent:21 gens: 0 1 1, mrl: flush_mismatch, : mend
33749 Sep 22 23:15:03.644 INFO extent:21 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33750 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33751 Sep 22 23:15:03.644 INFO extent:21 dirty: false false false, mrl: flush_mismatch, : mend
33752 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33753 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 21, mrl: flush_mismatch, : mend
33754 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33755 Sep 22 23:15:03.644 INFO Extent 22 has flush number mismatch, : mend
33756 Sep 22 23:15:03.644 INFO First source client ID for extent 22, mrl: flush_mismatch, : mend
33757 Sep 22 23:15:03.644 INFO extent:22 gens: 0 1 1, mrl: flush_mismatch, : mend
33758 Sep 22 23:15:03.644 INFO extent:22 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33759 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33760 Sep 22 23:15:03.644 INFO extent:22 dirty: false false false, mrl: flush_mismatch, : mend
33761 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33762 Sep 22 23:15:03.644 INFO find dest for source 1 for extent at index 22, mrl: flush_mismatch, : mend
33763 Sep 22 23:15:03.644 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33764 Sep 22 23:15:03.644 INFO Extent 23 has flush number mismatch, : mend
33765 Sep 22 23:15:03.644 INFO First source client ID for extent 23, mrl: flush_mismatch, : mend
33766 Sep 22 23:15:03.644 INFO extent:23 gens: 0 1 1, mrl: flush_mismatch, : mend
33767 Sep 22 23:15:03.644 INFO extent:23 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33768 Sep 22 23:15:03.644 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33769 Sep 22 23:15:03.644 INFO extent:23 dirty: false false false, mrl: flush_mismatch, : mend
33770 Sep 22 23:15:03.644 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33771 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 23, mrl: flush_mismatch, : mend
33772 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33773 Sep 22 23:15:03.645 INFO Extent 24 has flush number mismatch, : mend
33774 Sep 22 23:15:03.645 INFO First source client ID for extent 24, mrl: flush_mismatch, : mend
33775 Sep 22 23:15:03.645 INFO extent:24 gens: 0 1 1, mrl: flush_mismatch, : mend
33776 Sep 22 23:15:03.645 INFO extent:24 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33777 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33778 Sep 22 23:15:03.645 INFO extent:24 dirty: false false false, mrl: flush_mismatch, : mend
33779 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33780 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 24, mrl: flush_mismatch, : mend
33781 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33782 Sep 22 23:15:03.645 INFO Extent 25 has flush number mismatch, : mend
33783 Sep 22 23:15:03.645 INFO First source client ID for extent 25, mrl: flush_mismatch, : mend
33784 Sep 22 23:15:03.645 INFO extent:25 gens: 0 1 1, mrl: flush_mismatch, : mend
33785 Sep 22 23:15:03.645 INFO extent:25 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33786 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33787 Sep 22 23:15:03.645 INFO extent:25 dirty: false false false, mrl: flush_mismatch, : mend
33788 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33789 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 25, mrl: flush_mismatch, : mend
33790 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33791 Sep 22 23:15:03.645 INFO Extent 26 has flush number mismatch, : mend
33792 Sep 22 23:15:03.645 INFO First source client ID for extent 26, mrl: flush_mismatch, : mend
33793 Sep 22 23:15:03.645 INFO extent:26 gens: 0 1 1, mrl: flush_mismatch, : mend
33794 Sep 22 23:15:03.645 INFO extent:26 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33795 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33796 Sep 22 23:15:03.645 INFO extent:26 dirty: false false false, mrl: flush_mismatch, : mend
33797 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33798 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 26, mrl: flush_mismatch, : mend
33799 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33800 Sep 22 23:15:03.645 INFO Extent 27 has flush number mismatch, : mend
33801 Sep 22 23:15:03.645 INFO First source client ID for extent 27, mrl: flush_mismatch, : mend
33802 Sep 22 23:15:03.645 INFO extent:27 gens: 0 1 1, mrl: flush_mismatch, : mend
33803 Sep 22 23:15:03.645 INFO extent:27 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33804 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33805 Sep 22 23:15:03.645 INFO extent:27 dirty: false false false, mrl: flush_mismatch, : mend
33806 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33807 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 27, mrl: flush_mismatch, : mend
33808 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33809 Sep 22 23:15:03.645 INFO Extent 28 has flush number mismatch, : mend
33810 Sep 22 23:15:03.645 INFO First source client ID for extent 28, mrl: flush_mismatch, : mend
33811 Sep 22 23:15:03.645 INFO extent:28 gens: 0 1 1, mrl: flush_mismatch, : mend
33812 Sep 22 23:15:03.645 INFO extent:28 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33813 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33814 Sep 22 23:15:03.645 INFO extent:28 dirty: false false false, mrl: flush_mismatch, : mend
33815 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33816 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 28, mrl: flush_mismatch, : mend
33817 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33818 Sep 22 23:15:03.645 INFO Extent 29 has flush number mismatch, : mend
33819 Sep 22 23:15:03.645 INFO First source client ID for extent 29, mrl: flush_mismatch, : mend
33820 Sep 22 23:15:03.645 INFO extent:29 gens: 0 1 1, mrl: flush_mismatch, : mend
33821 Sep 22 23:15:03.645 INFO extent:29 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33822 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33823 Sep 22 23:15:03.645 INFO extent:29 dirty: false false false, mrl: flush_mismatch, : mend
33824 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33825 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 29, mrl: flush_mismatch, : mend
33826 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33827 Sep 22 23:15:03.645 INFO Extent 30 has flush number mismatch, : mend
33828 Sep 22 23:15:03.645 INFO First source client ID for extent 30, mrl: flush_mismatch, : mend
33829 Sep 22 23:15:03.645 INFO extent:30 gens: 0 1 1, mrl: flush_mismatch, : mend
33830 Sep 22 23:15:03.645 INFO extent:30 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33831 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33832 Sep 22 23:15:03.645 INFO extent:30 dirty: false false false, mrl: flush_mismatch, : mend
33833 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33834 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 30, mrl: flush_mismatch, : mend
33835 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33836 Sep 22 23:15:03.645 INFO Extent 31 has flush number mismatch, : mend
33837 Sep 22 23:15:03.645 INFO First source client ID for extent 31, mrl: flush_mismatch, : mend
33838 Sep 22 23:15:03.645 INFO extent:31 gens: 0 1 1, mrl: flush_mismatch, : mend
33839 Sep 22 23:15:03.645 INFO extent:31 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33840 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33841 Sep 22 23:15:03.645 INFO extent:31 dirty: false false false, mrl: flush_mismatch, : mend
33842 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33843 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 31, mrl: flush_mismatch, : mend
33844 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33845 Sep 22 23:15:03.645 INFO Extent 32 has flush number mismatch, : mend
33846 Sep 22 23:15:03.645 INFO First source client ID for extent 32, mrl: flush_mismatch, : mend
33847 Sep 22 23:15:03.645 INFO extent:32 gens: 0 1 1, mrl: flush_mismatch, : mend
33848 Sep 22 23:15:03.645 INFO extent:32 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33849 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33850 Sep 22 23:15:03.645 INFO extent:32 dirty: false false false, mrl: flush_mismatch, : mend
33851 Sep 22 23:15:03.645 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33852 Sep 22 23:15:03.645 INFO find dest for source 1 for extent at index 32, mrl: flush_mismatch, : mend
33853 Sep 22 23:15:03.645 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33854 Sep 22 23:15:03.645 INFO Extent 33 has flush number mismatch, : mend
33855 Sep 22 23:15:03.645 INFO First source client ID for extent 33, mrl: flush_mismatch, : mend
33856 Sep 22 23:15:03.645 INFO extent:33 gens: 0 1 1, mrl: flush_mismatch, : mend
33857 Sep 22 23:15:03.645 INFO extent:33 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33858 Sep 22 23:15:03.645 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33859 Sep 22 23:15:03.645 INFO extent:33 dirty: false false false, mrl: flush_mismatch, : mend
33860 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33861 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 33, mrl: flush_mismatch, : mend
33862 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33863 Sep 22 23:15:03.646 INFO Extent 34 has flush number mismatch, : mend
33864 Sep 22 23:15:03.646 INFO First source client ID for extent 34, mrl: flush_mismatch, : mend
33865 Sep 22 23:15:03.646 INFO extent:34 gens: 0 1 1, mrl: flush_mismatch, : mend
33866 Sep 22 23:15:03.646 INFO extent:34 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33867 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33868 Sep 22 23:15:03.646 INFO extent:34 dirty: false false false, mrl: flush_mismatch, : mend
33869 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33870 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 34, mrl: flush_mismatch, : mend
33871 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33872 Sep 22 23:15:03.646 INFO Extent 35 has flush number mismatch, : mend
33873 Sep 22 23:15:03.646 INFO First source client ID for extent 35, mrl: flush_mismatch, : mend
33874 Sep 22 23:15:03.646 INFO extent:35 gens: 0 1 1, mrl: flush_mismatch, : mend
33875 Sep 22 23:15:03.646 INFO extent:35 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33876 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33877 Sep 22 23:15:03.646 INFO extent:35 dirty: false false false, mrl: flush_mismatch, : mend
33878 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33879 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 35, mrl: flush_mismatch, : mend
33880 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33881 Sep 22 23:15:03.646 INFO Extent 36 has flush number mismatch, : mend
33882 Sep 22 23:15:03.646 INFO First source client ID for extent 36, mrl: flush_mismatch, : mend
33883 Sep 22 23:15:03.646 INFO extent:36 gens: 0 1 1, mrl: flush_mismatch, : mend
33884 Sep 22 23:15:03.646 INFO extent:36 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33885 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33886 Sep 22 23:15:03.646 INFO extent:36 dirty: false false false, mrl: flush_mismatch, : mend
33887 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33888 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 36, mrl: flush_mismatch, : mend
33889 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33890 Sep 22 23:15:03.646 INFO Extent 37 has flush number mismatch, : mend
33891 Sep 22 23:15:03.646 INFO First source client ID for extent 37, mrl: flush_mismatch, : mend
33892 Sep 22 23:15:03.646 INFO extent:37 gens: 0 1 1, mrl: flush_mismatch, : mend
33893 Sep 22 23:15:03.646 INFO extent:37 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33894 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33895 Sep 22 23:15:03.646 INFO extent:37 dirty: false false false, mrl: flush_mismatch, : mend
33896 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33897 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 37, mrl: flush_mismatch, : mend
33898 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33899 Sep 22 23:15:03.646 INFO Extent 38 has flush number mismatch, : mend
33900 Sep 22 23:15:03.646 INFO First source client ID for extent 38, mrl: flush_mismatch, : mend
33901 Sep 22 23:15:03.646 INFO extent:38 gens: 0 1 1, mrl: flush_mismatch, : mend
33902 Sep 22 23:15:03.646 INFO extent:38 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33903 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33904 Sep 22 23:15:03.646 INFO extent:38 dirty: false false false, mrl: flush_mismatch, : mend
33905 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33906 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 38, mrl: flush_mismatch, : mend
33907 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33908 Sep 22 23:15:03.646 INFO Extent 39 has flush number mismatch, : mend
33909 Sep 22 23:15:03.646 INFO First source client ID for extent 39, mrl: flush_mismatch, : mend
33910 Sep 22 23:15:03.646 INFO extent:39 gens: 0 1 1, mrl: flush_mismatch, : mend
33911 Sep 22 23:15:03.646 INFO extent:39 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33912 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33913 Sep 22 23:15:03.646 INFO extent:39 dirty: false false false, mrl: flush_mismatch, : mend
33914 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33915 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 39, mrl: flush_mismatch, : mend
33916 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33917 Sep 22 23:15:03.646 INFO Extent 40 has flush number mismatch, : mend
33918 Sep 22 23:15:03.646 INFO First source client ID for extent 40, mrl: flush_mismatch, : mend
33919 Sep 22 23:15:03.646 INFO extent:40 gens: 0 1 1, mrl: flush_mismatch, : mend
33920 Sep 22 23:15:03.646 INFO extent:40 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33921 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33922 Sep 22 23:15:03.646 INFO extent:40 dirty: false false false, mrl: flush_mismatch, : mend
33923 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33924 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 40, mrl: flush_mismatch, : mend
33925 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33926 Sep 22 23:15:03.646 INFO Extent 41 has flush number mismatch, : mend
33927 Sep 22 23:15:03.646 INFO First source client ID for extent 41, mrl: flush_mismatch, : mend
33928 Sep 22 23:15:03.646 INFO extent:41 gens: 0 1 1, mrl: flush_mismatch, : mend
33929 Sep 22 23:15:03.646 INFO extent:41 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33930 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33931 Sep 22 23:15:03.646 INFO extent:41 dirty: false false false, mrl: flush_mismatch, : mend
33932 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33933 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 41, mrl: flush_mismatch, : mend
33934 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33935 Sep 22 23:15:03.646 INFO Extent 42 has flush number mismatch, : mend
33936 Sep 22 23:15:03.646 INFO First source client ID for extent 42, mrl: flush_mismatch, : mend
33937 Sep 22 23:15:03.646 INFO extent:42 gens: 0 1 1, mrl: flush_mismatch, : mend
33938 Sep 22 23:15:03.646 INFO extent:42 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33939 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33940 Sep 22 23:15:03.646 INFO extent:42 dirty: false false false, mrl: flush_mismatch, : mend
33941 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33942 Sep 22 23:15:03.646 INFO find dest for source 1 for extent at index 42, mrl: flush_mismatch, : mend
33943 Sep 22 23:15:03.646 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33944 Sep 22 23:15:03.646 INFO Extent 43 has flush number mismatch, : mend
33945 Sep 22 23:15:03.646 INFO First source client ID for extent 43, mrl: flush_mismatch, : mend
33946 Sep 22 23:15:03.646 INFO extent:43 gens: 0 1 1, mrl: flush_mismatch, : mend
33947 Sep 22 23:15:03.646 INFO extent:43 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33948 Sep 22 23:15:03.646 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33949 Sep 22 23:15:03.646 INFO extent:43 dirty: false false false, mrl: flush_mismatch, : mend
33950 Sep 22 23:15:03.646 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33951 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 43, mrl: flush_mismatch, : mend
33952 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33953 Sep 22 23:15:03.647 INFO Extent 44 has flush number mismatch, : mend
33954 Sep 22 23:15:03.647 INFO First source client ID for extent 44, mrl: flush_mismatch, : mend
33955 Sep 22 23:15:03.647 INFO extent:44 gens: 0 1 1, mrl: flush_mismatch, : mend
33956 Sep 22 23:15:03.647 INFO extent:44 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33957 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33958 Sep 22 23:15:03.647 INFO extent:44 dirty: false false false, mrl: flush_mismatch, : mend
33959 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33960 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 44, mrl: flush_mismatch, : mend
33961 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33962 Sep 22 23:15:03.647 INFO Extent 45 has flush number mismatch, : mend
33963 Sep 22 23:15:03.647 INFO First source client ID for extent 45, mrl: flush_mismatch, : mend
33964 Sep 22 23:15:03.647 INFO extent:45 gens: 0 1 1, mrl: flush_mismatch, : mend
33965 Sep 22 23:15:03.647 INFO extent:45 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33966 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33967 Sep 22 23:15:03.647 INFO extent:45 dirty: false false false, mrl: flush_mismatch, : mend
33968 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33969 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 45, mrl: flush_mismatch, : mend
33970 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33971 Sep 22 23:15:03.647 INFO Extent 46 has flush number mismatch, : mend
33972 Sep 22 23:15:03.647 INFO First source client ID for extent 46, mrl: flush_mismatch, : mend
33973 Sep 22 23:15:03.647 INFO extent:46 gens: 0 1 1, mrl: flush_mismatch, : mend
33974 Sep 22 23:15:03.647 INFO extent:46 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33975 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33976 Sep 22 23:15:03.647 INFO extent:46 dirty: false false false, mrl: flush_mismatch, : mend
33977 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33978 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 46, mrl: flush_mismatch, : mend
33979 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33980 Sep 22 23:15:03.647 INFO Extent 47 has flush number mismatch, : mend
33981 Sep 22 23:15:03.647 INFO First source client ID for extent 47, mrl: flush_mismatch, : mend
33982 Sep 22 23:15:03.647 INFO extent:47 gens: 0 1 1, mrl: flush_mismatch, : mend
33983 Sep 22 23:15:03.647 INFO extent:47 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33984 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33985 Sep 22 23:15:03.647 INFO extent:47 dirty: false false false, mrl: flush_mismatch, : mend
33986 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33987 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 47, mrl: flush_mismatch, : mend
33988 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33989 Sep 22 23:15:03.647 INFO Extent 48 has flush number mismatch, : mend
33990 Sep 22 23:15:03.647 INFO First source client ID for extent 48, mrl: flush_mismatch, : mend
33991 Sep 22 23:15:03.647 INFO extent:48 gens: 0 1 1, mrl: flush_mismatch, : mend
33992 Sep 22 23:15:03.647 INFO extent:48 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33993 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33994 Sep 22 23:15:03.647 INFO extent:48 dirty: false false false, mrl: flush_mismatch, : mend
33995 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
33996 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 48, mrl: flush_mismatch, : mend
33997 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
33998 Sep 22 23:15:03.647 INFO Extent 49 has flush number mismatch, : mend
33999 Sep 22 23:15:03.647 INFO First source client ID for extent 49, mrl: flush_mismatch, : mend
34000 Sep 22 23:15:03.647 INFO extent:49 gens: 0 1 1, mrl: flush_mismatch, : mend
34001 Sep 22 23:15:03.647 INFO extent:49 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34002 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34003 Sep 22 23:15:03.647 INFO extent:49 dirty: false false false, mrl: flush_mismatch, : mend
34004 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34005 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 49, mrl: flush_mismatch, : mend
34006 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34007 Sep 22 23:15:03.647 INFO Extent 50 has flush number mismatch, : mend
34008 Sep 22 23:15:03.647 INFO First source client ID for extent 50, mrl: flush_mismatch, : mend
34009 Sep 22 23:15:03.647 INFO extent:50 gens: 0 1 1, mrl: flush_mismatch, : mend
34010 Sep 22 23:15:03.647 INFO extent:50 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34011 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34012 Sep 22 23:15:03.647 INFO extent:50 dirty: false false false, mrl: flush_mismatch, : mend
34013 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34014 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 50, mrl: flush_mismatch, : mend
34015 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34016 Sep 22 23:15:03.647 INFO Extent 51 has flush number mismatch, : mend
34017 Sep 22 23:15:03.647 INFO First source client ID for extent 51, mrl: flush_mismatch, : mend
34018 Sep 22 23:15:03.647 INFO extent:51 gens: 0 1 1, mrl: flush_mismatch, : mend
34019 Sep 22 23:15:03.647 INFO extent:51 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34020 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34021 Sep 22 23:15:03.647 INFO extent:51 dirty: false false false, mrl: flush_mismatch, : mend
34022 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34023 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 51, mrl: flush_mismatch, : mend
34024 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34025 Sep 22 23:15:03.647 INFO Extent 52 has flush number mismatch, : mend
34026 Sep 22 23:15:03.647 INFO First source client ID for extent 52, mrl: flush_mismatch, : mend
34027 Sep 22 23:15:03.647 INFO extent:52 gens: 0 1 1, mrl: flush_mismatch, : mend
34028 Sep 22 23:15:03.647 INFO extent:52 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34029 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34030 Sep 22 23:15:03.647 INFO extent:52 dirty: false false false, mrl: flush_mismatch, : mend
34031 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34032 Sep 22 23:15:03.647 INFO find dest for source 1 for extent at index 52, mrl: flush_mismatch, : mend
34033 Sep 22 23:15:03.647 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34034 Sep 22 23:15:03.647 INFO Extent 53 has flush number mismatch, : mend
34035 Sep 22 23:15:03.647 INFO First source client ID for extent 53, mrl: flush_mismatch, : mend
34036 Sep 22 23:15:03.647 INFO extent:53 gens: 0 1 1, mrl: flush_mismatch, : mend
34037 Sep 22 23:15:03.647 INFO extent:53 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34038 Sep 22 23:15:03.647 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34039 Sep 22 23:15:03.647 INFO extent:53 dirty: false false false, mrl: flush_mismatch, : mend
34040 Sep 22 23:15:03.647 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34041 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 53, mrl: flush_mismatch, : mend
34042 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34043 Sep 22 23:15:03.648 INFO Extent 54 has flush number mismatch, : mend
34044 Sep 22 23:15:03.648 INFO First source client ID for extent 54, mrl: flush_mismatch, : mend
34045 Sep 22 23:15:03.648 INFO extent:54 gens: 0 1 1, mrl: flush_mismatch, : mend
34046 Sep 22 23:15:03.648 INFO extent:54 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34047 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34048 Sep 22 23:15:03.648 INFO extent:54 dirty: false false false, mrl: flush_mismatch, : mend
34049 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34050 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 54, mrl: flush_mismatch, : mend
34051 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34052 Sep 22 23:15:03.648 INFO Extent 55 has flush number mismatch, : mend
34053 Sep 22 23:15:03.648 INFO First source client ID for extent 55, mrl: flush_mismatch, : mend
34054 Sep 22 23:15:03.648 INFO extent:55 gens: 0 1 1, mrl: flush_mismatch, : mend
34055 Sep 22 23:15:03.648 INFO extent:55 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34056 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34057 Sep 22 23:15:03.648 INFO extent:55 dirty: false false false, mrl: flush_mismatch, : mend
34058 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34059 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 55, mrl: flush_mismatch, : mend
34060 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34061 Sep 22 23:15:03.648 INFO Extent 56 has flush number mismatch, : mend
34062 Sep 22 23:15:03.648 INFO First source client ID for extent 56, mrl: flush_mismatch, : mend
34063 Sep 22 23:15:03.648 INFO extent:56 gens: 0 1 1, mrl: flush_mismatch, : mend
34064 Sep 22 23:15:03.648 INFO extent:56 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34065 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34066 Sep 22 23:15:03.648 INFO extent:56 dirty: false false false, mrl: flush_mismatch, : mend
34067 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34068 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 56, mrl: flush_mismatch, : mend
34069 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34070 Sep 22 23:15:03.648 INFO Extent 57 has flush number mismatch, : mend
34071 Sep 22 23:15:03.648 INFO First source client ID for extent 57, mrl: flush_mismatch, : mend
34072 Sep 22 23:15:03.648 INFO extent:57 gens: 0 1 1, mrl: flush_mismatch, : mend
34073 Sep 22 23:15:03.648 INFO extent:57 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34074 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34075 Sep 22 23:15:03.648 INFO extent:57 dirty: false false false, mrl: flush_mismatch, : mend
34076 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34077 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 57, mrl: flush_mismatch, : mend
34078 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34079 Sep 22 23:15:03.648 INFO Extent 58 has flush number mismatch, : mend
34080 Sep 22 23:15:03.648 INFO First source client ID for extent 58, mrl: flush_mismatch, : mend
34081 Sep 22 23:15:03.648 INFO extent:58 gens: 0 1 1, mrl: flush_mismatch, : mend
34082 Sep 22 23:15:03.648 INFO extent:58 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34083 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34084 Sep 22 23:15:03.648 INFO extent:58 dirty: false false false, mrl: flush_mismatch, : mend
34085 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34086 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 58, mrl: flush_mismatch, : mend
34087 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34088 Sep 22 23:15:03.648 INFO Extent 59 has flush number mismatch, : mend
34089 Sep 22 23:15:03.648 INFO First source client ID for extent 59, mrl: flush_mismatch, : mend
34090 Sep 22 23:15:03.648 INFO extent:59 gens: 0 1 1, mrl: flush_mismatch, : mend
34091 Sep 22 23:15:03.648 INFO extent:59 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34092 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34093 Sep 22 23:15:03.648 INFO extent:59 dirty: false false false, mrl: flush_mismatch, : mend
34094 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34095 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 59, mrl: flush_mismatch, : mend
34096 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34097 Sep 22 23:15:03.648 INFO Extent 60 has flush number mismatch, : mend
34098 Sep 22 23:15:03.648 INFO First source client ID for extent 60, mrl: flush_mismatch, : mend
34099 Sep 22 23:15:03.648 INFO extent:60 gens: 0 1 1, mrl: flush_mismatch, : mend
34100 Sep 22 23:15:03.648 INFO extent:60 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34101 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34102 Sep 22 23:15:03.648 INFO extent:60 dirty: false false false, mrl: flush_mismatch, : mend
34103 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34104 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 60, mrl: flush_mismatch, : mend
34105 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34106 Sep 22 23:15:03.648 INFO Extent 61 has flush number mismatch, : mend
34107 Sep 22 23:15:03.648 INFO First source client ID for extent 61, mrl: flush_mismatch, : mend
34108 Sep 22 23:15:03.648 INFO extent:61 gens: 0 1 1, mrl: flush_mismatch, : mend
34109 Sep 22 23:15:03.648 INFO extent:61 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34110 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34111 Sep 22 23:15:03.648 INFO extent:61 dirty: false false false, mrl: flush_mismatch, : mend
34112 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34113 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 61, mrl: flush_mismatch, : mend
34114 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34115 Sep 22 23:15:03.648 INFO Extent 62 has flush number mismatch, : mend
34116 Sep 22 23:15:03.648 INFO First source client ID for extent 62, mrl: flush_mismatch, : mend
34117 Sep 22 23:15:03.648 INFO extent:62 gens: 0 1 1, mrl: flush_mismatch, : mend
34118 Sep 22 23:15:03.648 INFO extent:62 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34119 Sep 22 23:15:03.648 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34120 Sep 22 23:15:03.648 INFO extent:62 dirty: false false false, mrl: flush_mismatch, : mend
34121 Sep 22 23:15:03.648 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34122 Sep 22 23:15:03.648 INFO find dest for source 1 for extent at index 62, mrl: flush_mismatch, : mend
34123 Sep 22 23:15:03.648 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34124 Sep 22 23:15:03.648 INFO Extent 63 has flush number mismatch, : mend
34125 Sep 22 23:15:03.648 INFO First source client ID for extent 63, mrl: flush_mismatch, : mend
34126 Sep 22 23:15:03.648 INFO extent:63 gens: 0 1 1, mrl: flush_mismatch, : mend
34127 Sep 22 23:15:03.648 INFO extent:63 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34128 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34129 Sep 22 23:15:03.649 INFO extent:63 dirty: false false false, mrl: flush_mismatch, : mend
34130 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34131 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 63, mrl: flush_mismatch, : mend
34132 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34133 Sep 22 23:15:03.649 INFO Extent 64 has flush number mismatch, : mend
34134 Sep 22 23:15:03.649 INFO First source client ID for extent 64, mrl: flush_mismatch, : mend
34135 Sep 22 23:15:03.649 INFO extent:64 gens: 0 1 1, mrl: flush_mismatch, : mend
34136 Sep 22 23:15:03.649 INFO extent:64 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34137 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34138 Sep 22 23:15:03.649 INFO extent:64 dirty: false false false, mrl: flush_mismatch, : mend
34139 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34140 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 64, mrl: flush_mismatch, : mend
34141 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34142 Sep 22 23:15:03.649 INFO Extent 65 has flush number mismatch, : mend
34143 Sep 22 23:15:03.649 INFO First source client ID for extent 65, mrl: flush_mismatch, : mend
34144 Sep 22 23:15:03.649 INFO extent:65 gens: 0 1 1, mrl: flush_mismatch, : mend
34145 Sep 22 23:15:03.649 INFO extent:65 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34146 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34147 Sep 22 23:15:03.649 INFO extent:65 dirty: false false false, mrl: flush_mismatch, : mend
34148 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34149 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 65, mrl: flush_mismatch, : mend
34150 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34151 Sep 22 23:15:03.649 INFO Extent 66 has flush number mismatch, : mend
34152 Sep 22 23:15:03.649 INFO First source client ID for extent 66, mrl: flush_mismatch, : mend
34153 Sep 22 23:15:03.649 INFO extent:66 gens: 0 1 1, mrl: flush_mismatch, : mend
34154 Sep 22 23:15:03.649 INFO extent:66 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34155 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34156 Sep 22 23:15:03.649 INFO extent:66 dirty: false false false, mrl: flush_mismatch, : mend
34157 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34158 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 66, mrl: flush_mismatch, : mend
34159 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34160 Sep 22 23:15:03.649 INFO Extent 67 has flush number mismatch, : mend
34161 Sep 22 23:15:03.649 INFO First source client ID for extent 67, mrl: flush_mismatch, : mend
34162 Sep 22 23:15:03.649 INFO extent:67 gens: 0 1 1, mrl: flush_mismatch, : mend
34163 Sep 22 23:15:03.649 INFO extent:67 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34164 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34165 Sep 22 23:15:03.649 INFO extent:67 dirty: false false false, mrl: flush_mismatch, : mend
34166 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34167 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 67, mrl: flush_mismatch, : mend
34168 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34169 Sep 22 23:15:03.649 INFO Extent 68 has flush number mismatch, : mend
34170 Sep 22 23:15:03.649 INFO First source client ID for extent 68, mrl: flush_mismatch, : mend
34171 Sep 22 23:15:03.649 INFO extent:68 gens: 0 1 1, mrl: flush_mismatch, : mend
34172 Sep 22 23:15:03.649 INFO extent:68 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34173 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34174 Sep 22 23:15:03.649 INFO extent:68 dirty: false false false, mrl: flush_mismatch, : mend
34175 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34176 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 68, mrl: flush_mismatch, : mend
34177 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34178 Sep 22 23:15:03.649 INFO Extent 69 has flush number mismatch, : mend
34179 Sep 22 23:15:03.649 INFO First source client ID for extent 69, mrl: flush_mismatch, : mend
34180 Sep 22 23:15:03.649 INFO extent:69 gens: 0 1 1, mrl: flush_mismatch, : mend
34181 Sep 22 23:15:03.649 INFO extent:69 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34182 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34183 Sep 22 23:15:03.649 INFO extent:69 dirty: false false false, mrl: flush_mismatch, : mend
34184 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34185 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 69, mrl: flush_mismatch, : mend
34186 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34187 Sep 22 23:15:03.649 INFO Extent 70 has flush number mismatch, : mend
34188 Sep 22 23:15:03.649 INFO First source client ID for extent 70, mrl: flush_mismatch, : mend
34189 Sep 22 23:15:03.649 INFO extent:70 gens: 0 1 1, mrl: flush_mismatch, : mend
34190 Sep 22 23:15:03.649 INFO extent:70 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34191 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34192 Sep 22 23:15:03.649 INFO extent:70 dirty: false false false, mrl: flush_mismatch, : mend
34193 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34194 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 70, mrl: flush_mismatch, : mend
34195 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34196 Sep 22 23:15:03.649 INFO Extent 71 has flush number mismatch, : mend
34197 Sep 22 23:15:03.649 INFO First source client ID for extent 71, mrl: flush_mismatch, : mend
34198 Sep 22 23:15:03.649 INFO extent:71 gens: 0 1 1, mrl: flush_mismatch, : mend
34199 Sep 22 23:15:03.649 INFO extent:71 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34200 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34201 Sep 22 23:15:03.649 INFO extent:71 dirty: false false false, mrl: flush_mismatch, : mend
34202 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34203 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 71, mrl: flush_mismatch, : mend
34204 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34205 Sep 22 23:15:03.649 INFO Extent 72 has flush number mismatch, : mend
34206 Sep 22 23:15:03.649 INFO First source client ID for extent 72, mrl: flush_mismatch, : mend
34207 Sep 22 23:15:03.649 INFO extent:72 gens: 0 1 1, mrl: flush_mismatch, : mend
34208 Sep 22 23:15:03.649 INFO extent:72 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34209 Sep 22 23:15:03.649 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34210 Sep 22 23:15:03.649 INFO extent:72 dirty: false false false, mrl: flush_mismatch, : mend
34211 Sep 22 23:15:03.649 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34212 Sep 22 23:15:03.649 INFO find dest for source 1 for extent at index 72, mrl: flush_mismatch, : mend
34213 Sep 22 23:15:03.649 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34214 Sep 22 23:15:03.649 INFO Extent 73 has flush number mismatch, : mend
34215 Sep 22 23:15:03.649 INFO First source client ID for extent 73, mrl: flush_mismatch, : mend
34216 Sep 22 23:15:03.649 INFO extent:73 gens: 0 1 1, mrl: flush_mismatch, : mend
34217 Sep 22 23:15:03.649 INFO extent:73 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34218 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34219 Sep 22 23:15:03.650 INFO extent:73 dirty: false false false, mrl: flush_mismatch, : mend
34220 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34221 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 73, mrl: flush_mismatch, : mend
34222 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34223 Sep 22 23:15:03.650 INFO Extent 74 has flush number mismatch, : mend
34224 Sep 22 23:15:03.650 INFO First source client ID for extent 74, mrl: flush_mismatch, : mend
34225 Sep 22 23:15:03.650 INFO extent:74 gens: 0 1 1, mrl: flush_mismatch, : mend
34226 Sep 22 23:15:03.650 INFO extent:74 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34227 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34228 Sep 22 23:15:03.650 INFO extent:74 dirty: false false false, mrl: flush_mismatch, : mend
34229 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34230 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 74, mrl: flush_mismatch, : mend
34231 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34232 Sep 22 23:15:03.650 INFO Extent 75 has flush number mismatch, : mend
34233 Sep 22 23:15:03.650 INFO First source client ID for extent 75, mrl: flush_mismatch, : mend
34234 Sep 22 23:15:03.650 INFO extent:75 gens: 0 1 1, mrl: flush_mismatch, : mend
34235 Sep 22 23:15:03.650 INFO extent:75 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34236 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34237 Sep 22 23:15:03.650 INFO extent:75 dirty: false false false, mrl: flush_mismatch, : mend
34238 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34239 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 75, mrl: flush_mismatch, : mend
34240 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34241 Sep 22 23:15:03.650 INFO Extent 76 has flush number mismatch, : mend
34242 Sep 22 23:15:03.650 INFO First source client ID for extent 76, mrl: flush_mismatch, : mend
34243 Sep 22 23:15:03.650 INFO extent:76 gens: 0 1 1, mrl: flush_mismatch, : mend
34244 Sep 22 23:15:03.650 INFO extent:76 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34245 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34246 Sep 22 23:15:03.650 INFO extent:76 dirty: false false false, mrl: flush_mismatch, : mend
34247 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34248 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 76, mrl: flush_mismatch, : mend
34249 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34250 Sep 22 23:15:03.650 INFO Extent 77 has flush number mismatch, : mend
34251 Sep 22 23:15:03.650 INFO First source client ID for extent 77, mrl: flush_mismatch, : mend
34252 Sep 22 23:15:03.650 INFO extent:77 gens: 0 1 1, mrl: flush_mismatch, : mend
34253 Sep 22 23:15:03.650 INFO extent:77 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34254 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34255 Sep 22 23:15:03.650 INFO extent:77 dirty: false false false, mrl: flush_mismatch, : mend
34256 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34257 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 77, mrl: flush_mismatch, : mend
34258 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34259 Sep 22 23:15:03.650 INFO Extent 78 has flush number mismatch, : mend
34260 Sep 22 23:15:03.650 INFO First source client ID for extent 78, mrl: flush_mismatch, : mend
34261 Sep 22 23:15:03.650 INFO extent:78 gens: 0 1 1, mrl: flush_mismatch, : mend
34262 Sep 22 23:15:03.650 INFO extent:78 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34263 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34264 Sep 22 23:15:03.650 INFO extent:78 dirty: false false false, mrl: flush_mismatch, : mend
34265 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34266 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 78, mrl: flush_mismatch, : mend
34267 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34268 Sep 22 23:15:03.650 INFO Extent 79 has flush number mismatch, : mend
34269 Sep 22 23:15:03.650 INFO First source client ID for extent 79, mrl: flush_mismatch, : mend
34270 Sep 22 23:15:03.650 INFO extent:79 gens: 0 1 1, mrl: flush_mismatch, : mend
34271 Sep 22 23:15:03.650 INFO extent:79 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34272 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34273 Sep 22 23:15:03.650 INFO extent:79 dirty: false false false, mrl: flush_mismatch, : mend
34274 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34275 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 79, mrl: flush_mismatch, : mend
34276 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34277 Sep 22 23:15:03.650 INFO Extent 80 has flush number mismatch, : mend
34278 Sep 22 23:15:03.650 INFO First source client ID for extent 80, mrl: flush_mismatch, : mend
34279 Sep 22 23:15:03.650 INFO extent:80 gens: 0 1 1, mrl: flush_mismatch, : mend
34280 Sep 22 23:15:03.650 INFO extent:80 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34281 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34282 Sep 22 23:15:03.650 INFO extent:80 dirty: false false false, mrl: flush_mismatch, : mend
34283 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34284 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 80, mrl: flush_mismatch, : mend
34285 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34286 Sep 22 23:15:03.650 INFO Extent 81 has flush number mismatch, : mend
34287 Sep 22 23:15:03.650 INFO First source client ID for extent 81, mrl: flush_mismatch, : mend
34288 Sep 22 23:15:03.650 INFO extent:81 gens: 0 1 1, mrl: flush_mismatch, : mend
34289 Sep 22 23:15:03.650 INFO extent:81 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34290 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34291 Sep 22 23:15:03.650 INFO extent:81 dirty: false false false, mrl: flush_mismatch, : mend
34292 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34293 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 81, mrl: flush_mismatch, : mend
34294 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34295 Sep 22 23:15:03.650 INFO Extent 82 has flush number mismatch, : mend
34296 Sep 22 23:15:03.650 INFO First source client ID for extent 82, mrl: flush_mismatch, : mend
34297 Sep 22 23:15:03.650 INFO extent:82 gens: 0 1 1, mrl: flush_mismatch, : mend
34298 Sep 22 23:15:03.650 INFO extent:82 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34299 Sep 22 23:15:03.650 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34300 Sep 22 23:15:03.650 INFO extent:82 dirty: false false false, mrl: flush_mismatch, : mend
34301 Sep 22 23:15:03.650 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34302 Sep 22 23:15:03.650 INFO find dest for source 1 for extent at index 82, mrl: flush_mismatch, : mend
34303 Sep 22 23:15:03.650 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34304 Sep 22 23:15:03.650 INFO Extent 83 has flush number mismatch, : mend
34305 Sep 22 23:15:03.650 INFO First source client ID for extent 83, mrl: flush_mismatch, : mend
34306 Sep 22 23:15:03.650 INFO extent:83 gens: 0 1 1, mrl: flush_mismatch, : mend
34307 Sep 22 23:15:03.650 INFO extent:83 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34308 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34309 Sep 22 23:15:03.651 INFO extent:83 dirty: false false false, mrl: flush_mismatch, : mend
34310 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34311 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 83, mrl: flush_mismatch, : mend
34312 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34313 Sep 22 23:15:03.651 INFO Extent 84 has flush number mismatch, : mend
34314 Sep 22 23:15:03.651 INFO First source client ID for extent 84, mrl: flush_mismatch, : mend
34315 Sep 22 23:15:03.651 INFO extent:84 gens: 0 1 1, mrl: flush_mismatch, : mend
34316 Sep 22 23:15:03.651 INFO extent:84 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34317 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34318 Sep 22 23:15:03.651 INFO extent:84 dirty: false false false, mrl: flush_mismatch, : mend
34319 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34320 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 84, mrl: flush_mismatch, : mend
34321 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34322 Sep 22 23:15:03.651 INFO Extent 85 has flush number mismatch, : mend
34323 Sep 22 23:15:03.651 INFO First source client ID for extent 85, mrl: flush_mismatch, : mend
34324 Sep 22 23:15:03.651 INFO extent:85 gens: 0 1 1, mrl: flush_mismatch, : mend
34325 Sep 22 23:15:03.651 INFO extent:85 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34326 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34327 Sep 22 23:15:03.651 INFO extent:85 dirty: false false false, mrl: flush_mismatch, : mend
34328 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34329 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 85, mrl: flush_mismatch, : mend
34330 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34331 Sep 22 23:15:03.651 INFO Extent 86 has flush number mismatch, : mend
34332 Sep 22 23:15:03.651 INFO First source client ID for extent 86, mrl: flush_mismatch, : mend
34333 Sep 22 23:15:03.651 INFO extent:86 gens: 0 1 1, mrl: flush_mismatch, : mend
34334 Sep 22 23:15:03.651 INFO extent:86 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34335 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34336 Sep 22 23:15:03.651 INFO extent:86 dirty: false false false, mrl: flush_mismatch, : mend
34337 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34338 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 86, mrl: flush_mismatch, : mend
34339 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34340 Sep 22 23:15:03.651 INFO Extent 87 has flush number mismatch, : mend
34341 Sep 22 23:15:03.651 INFO First source client ID for extent 87, mrl: flush_mismatch, : mend
34342 Sep 22 23:15:03.651 INFO extent:87 gens: 0 1 1, mrl: flush_mismatch, : mend
34343 Sep 22 23:15:03.651 INFO extent:87 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34344 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34345 Sep 22 23:15:03.651 INFO extent:87 dirty: false false false, mrl: flush_mismatch, : mend
34346 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34347 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 87, mrl: flush_mismatch, : mend
34348 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34349 Sep 22 23:15:03.651 INFO Extent 88 has flush number mismatch, : mend
34350 Sep 22 23:15:03.651 INFO First source client ID for extent 88, mrl: flush_mismatch, : mend
34351 Sep 22 23:15:03.651 INFO extent:88 gens: 0 1 1, mrl: flush_mismatch, : mend
34352 Sep 22 23:15:03.651 INFO extent:88 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34353 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34354 Sep 22 23:15:03.651 INFO extent:88 dirty: false false false, mrl: flush_mismatch, : mend
34355 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34356 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 88, mrl: flush_mismatch, : mend
34357 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34358 Sep 22 23:15:03.651 INFO Extent 89 has flush number mismatch, : mend
34359 Sep 22 23:15:03.651 INFO First source client ID for extent 89, mrl: flush_mismatch, : mend
34360 Sep 22 23:15:03.651 INFO extent:89 gens: 0 1 1, mrl: flush_mismatch, : mend
34361 Sep 22 23:15:03.651 INFO extent:89 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34362 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34363 Sep 22 23:15:03.651 INFO extent:89 dirty: false false false, mrl: flush_mismatch, : mend
34364 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34365 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 89, mrl: flush_mismatch, : mend
34366 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34367 Sep 22 23:15:03.651 INFO Extent 90 has flush number mismatch, : mend
34368 Sep 22 23:15:03.651 INFO First source client ID for extent 90, mrl: flush_mismatch, : mend
34369 Sep 22 23:15:03.651 INFO extent:90 gens: 0 1 1, mrl: flush_mismatch, : mend
34370 Sep 22 23:15:03.651 INFO extent:90 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34371 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34372 Sep 22 23:15:03.651 INFO extent:90 dirty: false false false, mrl: flush_mismatch, : mend
34373 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34374 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 90, mrl: flush_mismatch, : mend
34375 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34376 Sep 22 23:15:03.651 INFO Extent 91 has flush number mismatch, : mend
34377 Sep 22 23:15:03.651 INFO First source client ID for extent 91, mrl: flush_mismatch, : mend
34378 Sep 22 23:15:03.651 INFO extent:91 gens: 0 1 1, mrl: flush_mismatch, : mend
34379 Sep 22 23:15:03.651 INFO extent:91 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34380 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34381 Sep 22 23:15:03.651 INFO extent:91 dirty: false false false, mrl: flush_mismatch, : mend
34382 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34383 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 91, mrl: flush_mismatch, : mend
34384 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34385 Sep 22 23:15:03.651 INFO Extent 92 has flush number mismatch, : mend
34386 Sep 22 23:15:03.651 INFO First source client ID for extent 92, mrl: flush_mismatch, : mend
34387 Sep 22 23:15:03.651 INFO extent:92 gens: 0 1 1, mrl: flush_mismatch, : mend
34388 Sep 22 23:15:03.651 INFO extent:92 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34389 Sep 22 23:15:03.651 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34390 Sep 22 23:15:03.651 INFO extent:92 dirty: false false false, mrl: flush_mismatch, : mend
34391 Sep 22 23:15:03.651 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34392 Sep 22 23:15:03.651 INFO find dest for source 1 for extent at index 92, mrl: flush_mismatch, : mend
34393 Sep 22 23:15:03.651 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34394 Sep 22 23:15:03.651 INFO Extent 93 has flush number mismatch, : mend
34395 Sep 22 23:15:03.651 INFO First source client ID for extent 93, mrl: flush_mismatch, : mend
34396 Sep 22 23:15:03.651 INFO extent:93 gens: 0 1 1, mrl: flush_mismatch, : mend
34397 Sep 22 23:15:03.651 INFO extent:93 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34398 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34399 Sep 22 23:15:03.652 INFO extent:93 dirty: false false false, mrl: flush_mismatch, : mend
34400 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34401 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 93, mrl: flush_mismatch, : mend
34402 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34403 Sep 22 23:15:03.652 INFO Extent 94 has flush number mismatch, : mend
34404 Sep 22 23:15:03.652 INFO First source client ID for extent 94, mrl: flush_mismatch, : mend
34405 Sep 22 23:15:03.652 INFO extent:94 gens: 0 1 1, mrl: flush_mismatch, : mend
34406 Sep 22 23:15:03.652 INFO extent:94 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34407 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34408 Sep 22 23:15:03.652 INFO extent:94 dirty: false false false, mrl: flush_mismatch, : mend
34409 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34410 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 94, mrl: flush_mismatch, : mend
34411 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34412 Sep 22 23:15:03.652 INFO Extent 95 has flush number mismatch, : mend
34413 Sep 22 23:15:03.652 INFO First source client ID for extent 95, mrl: flush_mismatch, : mend
34414 Sep 22 23:15:03.652 INFO extent:95 gens: 0 1 1, mrl: flush_mismatch, : mend
34415 Sep 22 23:15:03.652 INFO extent:95 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34416 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34417 Sep 22 23:15:03.652 INFO extent:95 dirty: false false false, mrl: flush_mismatch, : mend
34418 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34419 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 95, mrl: flush_mismatch, : mend
34420 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34421 Sep 22 23:15:03.652 INFO Extent 96 has flush number mismatch, : mend
34422 Sep 22 23:15:03.652 INFO First source client ID for extent 96, mrl: flush_mismatch, : mend
34423 Sep 22 23:15:03.652 INFO extent:96 gens: 0 1 1, mrl: flush_mismatch, : mend
34424 Sep 22 23:15:03.652 INFO extent:96 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34425 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34426 Sep 22 23:15:03.652 INFO extent:96 dirty: false false false, mrl: flush_mismatch, : mend
34427 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34428 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 96, mrl: flush_mismatch, : mend
34429 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34430 Sep 22 23:15:03.652 INFO Extent 97 has flush number mismatch, : mend
34431 Sep 22 23:15:03.652 INFO First source client ID for extent 97, mrl: flush_mismatch, : mend
34432 Sep 22 23:15:03.652 INFO extent:97 gens: 0 1 1, mrl: flush_mismatch, : mend
34433 Sep 22 23:15:03.652 INFO extent:97 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34434 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34435 Sep 22 23:15:03.652 INFO extent:97 dirty: false false false, mrl: flush_mismatch, : mend
34436 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34437 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 97, mrl: flush_mismatch, : mend
34438 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34439 Sep 22 23:15:03.652 INFO Extent 98 has flush number mismatch, : mend
34440 Sep 22 23:15:03.652 INFO First source client ID for extent 98, mrl: flush_mismatch, : mend
34441 Sep 22 23:15:03.652 INFO extent:98 gens: 0 1 1, mrl: flush_mismatch, : mend
34442 Sep 22 23:15:03.652 INFO extent:98 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34443 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34444 Sep 22 23:15:03.652 INFO extent:98 dirty: false false false, mrl: flush_mismatch, : mend
34445 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34446 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 98, mrl: flush_mismatch, : mend
34447 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34448 Sep 22 23:15:03.652 INFO Extent 99 has flush number mismatch, : mend
34449 Sep 22 23:15:03.652 INFO First source client ID for extent 99, mrl: flush_mismatch, : mend
34450 Sep 22 23:15:03.652 INFO extent:99 gens: 0 1 1, mrl: flush_mismatch, : mend
34451 Sep 22 23:15:03.652 INFO extent:99 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34452 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34453 Sep 22 23:15:03.652 INFO extent:99 dirty: false false false, mrl: flush_mismatch, : mend
34454 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34455 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 99, mrl: flush_mismatch, : mend
34456 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34457 Sep 22 23:15:03.652 INFO Extent 100 has flush number mismatch, : mend
34458 Sep 22 23:15:03.652 INFO First source client ID for extent 100, mrl: flush_mismatch, : mend
34459 Sep 22 23:15:03.652 INFO extent:100 gens: 0 1 1, mrl: flush_mismatch, : mend
34460 Sep 22 23:15:03.652 INFO extent:100 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34461 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34462 Sep 22 23:15:03.652 INFO extent:100 dirty: false false false, mrl: flush_mismatch, : mend
34463 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34464 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 100, mrl: flush_mismatch, : mend
34465 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34466 Sep 22 23:15:03.652 INFO Extent 101 has flush number mismatch, : mend
34467 Sep 22 23:15:03.652 INFO First source client ID for extent 101, mrl: flush_mismatch, : mend
34468 Sep 22 23:15:03.652 INFO extent:101 gens: 0 1 1, mrl: flush_mismatch, : mend
34469 Sep 22 23:15:03.652 INFO extent:101 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34470 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34471 Sep 22 23:15:03.652 INFO extent:101 dirty: false false false, mrl: flush_mismatch, : mend
34472 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34473 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 101, mrl: flush_mismatch, : mend
34474 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34475 Sep 22 23:15:03.652 INFO Extent 102 has flush number mismatch, : mend
34476 Sep 22 23:15:03.652 INFO First source client ID for extent 102, mrl: flush_mismatch, : mend
34477 Sep 22 23:15:03.652 INFO extent:102 gens: 0 1 1, mrl: flush_mismatch, : mend
34478 Sep 22 23:15:03.652 INFO extent:102 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34479 Sep 22 23:15:03.652 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34480 Sep 22 23:15:03.652 INFO extent:102 dirty: false false false, mrl: flush_mismatch, : mend
34481 Sep 22 23:15:03.652 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34482 Sep 22 23:15:03.652 INFO find dest for source 1 for extent at index 102, mrl: flush_mismatch, : mend
34483 Sep 22 23:15:03.652 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34484 Sep 22 23:15:03.652 INFO Extent 103 has flush number mismatch, : mend
34485 Sep 22 23:15:03.652 INFO First source client ID for extent 103, mrl: flush_mismatch, : mend
34486 Sep 22 23:15:03.652 INFO extent:103 gens: 0 1 1, mrl: flush_mismatch, : mend
34487 Sep 22 23:15:03.652 INFO extent:103 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34488 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34489 Sep 22 23:15:03.653 INFO extent:103 dirty: false false false, mrl: flush_mismatch, : mend
34490 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34491 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 103, mrl: flush_mismatch, : mend
34492 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34493 Sep 22 23:15:03.653 INFO Extent 104 has flush number mismatch, : mend
34494 Sep 22 23:15:03.653 INFO First source client ID for extent 104, mrl: flush_mismatch, : mend
34495 Sep 22 23:15:03.653 INFO extent:104 gens: 0 1 1, mrl: flush_mismatch, : mend
34496 Sep 22 23:15:03.653 INFO extent:104 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34497 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34498 Sep 22 23:15:03.653 INFO extent:104 dirty: false false false, mrl: flush_mismatch, : mend
34499 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34500 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 104, mrl: flush_mismatch, : mend
34501 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34502 Sep 22 23:15:03.653 INFO Extent 105 has flush number mismatch, : mend
34503 Sep 22 23:15:03.653 INFO First source client ID for extent 105, mrl: flush_mismatch, : mend
34504 Sep 22 23:15:03.653 INFO extent:105 gens: 0 1 1, mrl: flush_mismatch, : mend
34505 Sep 22 23:15:03.653 INFO extent:105 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34506 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34507 Sep 22 23:15:03.653 INFO extent:105 dirty: false false false, mrl: flush_mismatch, : mend
34508 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34509 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 105, mrl: flush_mismatch, : mend
34510 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34511 Sep 22 23:15:03.653 INFO Extent 106 has flush number mismatch, : mend
34512 Sep 22 23:15:03.653 INFO First source client ID for extent 106, mrl: flush_mismatch, : mend
34513 Sep 22 23:15:03.653 INFO extent:106 gens: 0 1 1, mrl: flush_mismatch, : mend
34514 Sep 22 23:15:03.653 INFO extent:106 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34515 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34516 Sep 22 23:15:03.653 INFO extent:106 dirty: false false false, mrl: flush_mismatch, : mend
34517 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34518 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 106, mrl: flush_mismatch, : mend
34519 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34520 Sep 22 23:15:03.653 INFO Extent 107 has flush number mismatch, : mend
34521 Sep 22 23:15:03.653 INFO First source client ID for extent 107, mrl: flush_mismatch, : mend
34522 Sep 22 23:15:03.653 INFO extent:107 gens: 0 1 1, mrl: flush_mismatch, : mend
34523 Sep 22 23:15:03.653 INFO extent:107 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34524 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34525 Sep 22 23:15:03.653 INFO extent:107 dirty: false false false, mrl: flush_mismatch, : mend
34526 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34527 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 107, mrl: flush_mismatch, : mend
34528 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34529 Sep 22 23:15:03.653 INFO Extent 108 has flush number mismatch, : mend
34530 Sep 22 23:15:03.653 INFO First source client ID for extent 108, mrl: flush_mismatch, : mend
34531 Sep 22 23:15:03.653 INFO extent:108 gens: 0 1 1, mrl: flush_mismatch, : mend
34532 Sep 22 23:15:03.653 INFO extent:108 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34533 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34534 Sep 22 23:15:03.653 INFO extent:108 dirty: false false false, mrl: flush_mismatch, : mend
34535 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34536 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 108, mrl: flush_mismatch, : mend
34537 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34538 Sep 22 23:15:03.653 INFO Extent 109 has flush number mismatch, : mend
34539 Sep 22 23:15:03.653 INFO First source client ID for extent 109, mrl: flush_mismatch, : mend
34540 Sep 22 23:15:03.653 INFO extent:109 gens: 0 1 1, mrl: flush_mismatch, : mend
34541 Sep 22 23:15:03.653 INFO extent:109 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34542 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34543 Sep 22 23:15:03.653 INFO extent:109 dirty: false false false, mrl: flush_mismatch, : mend
34544 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34545 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 109, mrl: flush_mismatch, : mend
34546 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34547 Sep 22 23:15:03.653 INFO Extent 110 has flush number mismatch, : mend
34548 Sep 22 23:15:03.653 INFO First source client ID for extent 110, mrl: flush_mismatch, : mend
34549 Sep 22 23:15:03.653 INFO extent:110 gens: 0 1 1, mrl: flush_mismatch, : mend
34550 Sep 22 23:15:03.653 INFO extent:110 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34551 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34552 Sep 22 23:15:03.653 INFO extent:110 dirty: false false false, mrl: flush_mismatch, : mend
34553 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34554 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 110, mrl: flush_mismatch, : mend
34555 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34556 Sep 22 23:15:03.653 INFO Extent 111 has flush number mismatch, : mend
34557 Sep 22 23:15:03.653 INFO First source client ID for extent 111, mrl: flush_mismatch, : mend
34558 Sep 22 23:15:03.653 INFO extent:111 gens: 0 1 1, mrl: flush_mismatch, : mend
34559 Sep 22 23:15:03.653 INFO extent:111 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34560 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34561 Sep 22 23:15:03.653 INFO extent:111 dirty: false false false, mrl: flush_mismatch, : mend
34562 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34563 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 111, mrl: flush_mismatch, : mend
34564 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34565 Sep 22 23:15:03.653 INFO Extent 112 has flush number mismatch, : mend
34566 Sep 22 23:15:03.653 INFO First source client ID for extent 112, mrl: flush_mismatch, : mend
34567 Sep 22 23:15:03.653 INFO extent:112 gens: 0 1 1, mrl: flush_mismatch, : mend
34568 Sep 22 23:15:03.653 INFO extent:112 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34569 Sep 22 23:15:03.653 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34570 Sep 22 23:15:03.653 INFO extent:112 dirty: false false false, mrl: flush_mismatch, : mend
34571 Sep 22 23:15:03.653 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34572 Sep 22 23:15:03.653 INFO find dest for source 1 for extent at index 112, mrl: flush_mismatch, : mend
34573 Sep 22 23:15:03.653 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34574 Sep 22 23:15:03.654 INFO Extent 113 has flush number mismatch, : mend
34575 Sep 22 23:15:03.654 INFO First source client ID for extent 113, mrl: flush_mismatch, : mend
34576 Sep 22 23:15:03.654 INFO extent:113 gens: 0 1 1, mrl: flush_mismatch, : mend
34577 Sep 22 23:15:03.654 INFO extent:113 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34578 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34579 Sep 22 23:15:03.654 INFO extent:113 dirty: false false false, mrl: flush_mismatch, : mend
34580 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34581 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 113, mrl: flush_mismatch, : mend
34582 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34583 Sep 22 23:15:03.654 INFO Extent 114 has flush number mismatch, : mend
34584 Sep 22 23:15:03.654 INFO First source client ID for extent 114, mrl: flush_mismatch, : mend
34585 Sep 22 23:15:03.654 INFO extent:114 gens: 0 1 1, mrl: flush_mismatch, : mend
34586 Sep 22 23:15:03.654 INFO extent:114 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34587 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34588 Sep 22 23:15:03.654 INFO extent:114 dirty: false false false, mrl: flush_mismatch, : mend
34589 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34590 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 114, mrl: flush_mismatch, : mend
34591 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34592 Sep 22 23:15:03.654 INFO Extent 115 has flush number mismatch, : mend
34593 Sep 22 23:15:03.654 INFO First source client ID for extent 115, mrl: flush_mismatch, : mend
34594 Sep 22 23:15:03.654 INFO extent:115 gens: 0 1 1, mrl: flush_mismatch, : mend
34595 Sep 22 23:15:03.654 INFO extent:115 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34596 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34597 Sep 22 23:15:03.654 INFO extent:115 dirty: false false false, mrl: flush_mismatch, : mend
34598 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34599 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 115, mrl: flush_mismatch, : mend
34600 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34601 Sep 22 23:15:03.654 INFO Extent 116 has flush number mismatch, : mend
34602 Sep 22 23:15:03.654 INFO First source client ID for extent 116, mrl: flush_mismatch, : mend
34603 Sep 22 23:15:03.654 INFO extent:116 gens: 0 1 1, mrl: flush_mismatch, : mend
34604 Sep 22 23:15:03.654 INFO extent:116 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34605 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34606 Sep 22 23:15:03.654 INFO extent:116 dirty: false false false, mrl: flush_mismatch, : mend
34607 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34608 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 116, mrl: flush_mismatch, : mend
34609 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34610 Sep 22 23:15:03.654 INFO Extent 117 has flush number mismatch, : mend
34611 Sep 22 23:15:03.654 INFO First source client ID for extent 117, mrl: flush_mismatch, : mend
34612 Sep 22 23:15:03.654 INFO extent:117 gens: 0 1 1, mrl: flush_mismatch, : mend
34613 Sep 22 23:15:03.654 INFO extent:117 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34614 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34615 Sep 22 23:15:03.654 INFO extent:117 dirty: false false false, mrl: flush_mismatch, : mend
34616 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34617 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 117, mrl: flush_mismatch, : mend
34618 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34619 Sep 22 23:15:03.654 INFO Extent 118 has flush number mismatch, : mend
34620 Sep 22 23:15:03.654 INFO First source client ID for extent 118, mrl: flush_mismatch, : mend
34621 Sep 22 23:15:03.654 INFO extent:118 gens: 0 1 1, mrl: flush_mismatch, : mend
34622 Sep 22 23:15:03.654 INFO extent:118 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34623 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34624 Sep 22 23:15:03.654 INFO extent:118 dirty: false false false, mrl: flush_mismatch, : mend
34625 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34626 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 118, mrl: flush_mismatch, : mend
34627 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34628 Sep 22 23:15:03.654 INFO Extent 119 has flush number mismatch, : mend
34629 Sep 22 23:15:03.654 INFO First source client ID for extent 119, mrl: flush_mismatch, : mend
34630 Sep 22 23:15:03.654 INFO extent:119 gens: 0 1 1, mrl: flush_mismatch, : mend
34631 Sep 22 23:15:03.654 INFO extent:119 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34632 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34633 Sep 22 23:15:03.654 INFO extent:119 dirty: false false false, mrl: flush_mismatch, : mend
34634 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34635 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 119, mrl: flush_mismatch, : mend
34636 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34637 Sep 22 23:15:03.654 INFO Extent 120 has flush number mismatch, : mend
34638 Sep 22 23:15:03.654 INFO First source client ID for extent 120, mrl: flush_mismatch, : mend
34639 Sep 22 23:15:03.654 INFO extent:120 gens: 0 1 1, mrl: flush_mismatch, : mend
34640 Sep 22 23:15:03.654 INFO extent:120 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34641 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34642 Sep 22 23:15:03.654 INFO extent:120 dirty: false false false, mrl: flush_mismatch, : mend
34643 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34644 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 120, mrl: flush_mismatch, : mend
34645 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34646 Sep 22 23:15:03.654 INFO Extent 121 has flush number mismatch, : mend
34647 Sep 22 23:15:03.654 INFO First source client ID for extent 121, mrl: flush_mismatch, : mend
34648 Sep 22 23:15:03.654 INFO extent:121 gens: 0 1 1, mrl: flush_mismatch, : mend
34649 Sep 22 23:15:03.654 INFO extent:121 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34650 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34651 Sep 22 23:15:03.654 INFO extent:121 dirty: false false false, mrl: flush_mismatch, : mend
34652 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34653 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 121, mrl: flush_mismatch, : mend
34654 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34655 Sep 22 23:15:03.654 INFO Extent 122 has flush number mismatch, : mend
34656 Sep 22 23:15:03.654 INFO First source client ID for extent 122, mrl: flush_mismatch, : mend
34657 Sep 22 23:15:03.654 INFO extent:122 gens: 0 1 1, mrl: flush_mismatch, : mend
34658 Sep 22 23:15:03.654 INFO extent:122 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34659 Sep 22 23:15:03.654 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34660 Sep 22 23:15:03.654 INFO extent:122 dirty: false false false, mrl: flush_mismatch, : mend
34661 Sep 22 23:15:03.654 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34662 Sep 22 23:15:03.654 INFO find dest for source 1 for extent at index 122, mrl: flush_mismatch, : mend
34663 Sep 22 23:15:03.654 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34664 Sep 22 23:15:03.655 INFO Extent 123 has flush number mismatch, : mend
34665 Sep 22 23:15:03.655 INFO First source client ID for extent 123, mrl: flush_mismatch, : mend
34666 Sep 22 23:15:03.655 INFO extent:123 gens: 0 1 1, mrl: flush_mismatch, : mend
34667 Sep 22 23:15:03.655 INFO extent:123 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34668 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34669 Sep 22 23:15:03.655 INFO extent:123 dirty: false false false, mrl: flush_mismatch, : mend
34670 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34671 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 123, mrl: flush_mismatch, : mend
34672 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34673 Sep 22 23:15:03.655 INFO Extent 124 has flush number mismatch, : mend
34674 Sep 22 23:15:03.655 INFO First source client ID for extent 124, mrl: flush_mismatch, : mend
34675 Sep 22 23:15:03.655 INFO extent:124 gens: 0 1 1, mrl: flush_mismatch, : mend
34676 Sep 22 23:15:03.655 INFO extent:124 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34677 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34678 Sep 22 23:15:03.655 INFO extent:124 dirty: false false false, mrl: flush_mismatch, : mend
34679 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34680 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 124, mrl: flush_mismatch, : mend
34681 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34682 Sep 22 23:15:03.655 INFO Extent 125 has flush number mismatch, : mend
34683 Sep 22 23:15:03.655 INFO First source client ID for extent 125, mrl: flush_mismatch, : mend
34684 Sep 22 23:15:03.655 INFO extent:125 gens: 0 1 1, mrl: flush_mismatch, : mend
34685 Sep 22 23:15:03.655 INFO extent:125 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34686 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34687 Sep 22 23:15:03.655 INFO extent:125 dirty: false false false, mrl: flush_mismatch, : mend
34688 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34689 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 125, mrl: flush_mismatch, : mend
34690 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34691 Sep 22 23:15:03.655 INFO Extent 126 has flush number mismatch, : mend
34692 Sep 22 23:15:03.655 INFO First source client ID for extent 126, mrl: flush_mismatch, : mend
34693 Sep 22 23:15:03.655 INFO extent:126 gens: 0 1 1, mrl: flush_mismatch, : mend
34694 Sep 22 23:15:03.655 INFO extent:126 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34695 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34696 Sep 22 23:15:03.655 INFO extent:126 dirty: false false false, mrl: flush_mismatch, : mend
34697 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34698 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 126, mrl: flush_mismatch, : mend
34699 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34700 Sep 22 23:15:03.655 INFO Extent 127 has flush number mismatch, : mend
34701 Sep 22 23:15:03.655 INFO First source client ID for extent 127, mrl: flush_mismatch, : mend
34702 Sep 22 23:15:03.655 INFO extent:127 gens: 0 1 1, mrl: flush_mismatch, : mend
34703 Sep 22 23:15:03.655 INFO extent:127 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34704 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34705 Sep 22 23:15:03.655 INFO extent:127 dirty: false false false, mrl: flush_mismatch, : mend
34706 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34707 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 127, mrl: flush_mismatch, : mend
34708 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34709 Sep 22 23:15:03.655 INFO Extent 128 has flush number mismatch, : mend
34710 Sep 22 23:15:03.655 INFO First source client ID for extent 128, mrl: flush_mismatch, : mend
34711 Sep 22 23:15:03.655 INFO extent:128 gens: 0 1 1, mrl: flush_mismatch, : mend
34712 Sep 22 23:15:03.655 INFO extent:128 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34713 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34714 Sep 22 23:15:03.655 INFO extent:128 dirty: false false false, mrl: flush_mismatch, : mend
34715 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34716 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 128, mrl: flush_mismatch, : mend
34717 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34718 Sep 22 23:15:03.655 INFO Extent 129 has flush number mismatch, : mend
34719 Sep 22 23:15:03.655 INFO First source client ID for extent 129, mrl: flush_mismatch, : mend
34720 Sep 22 23:15:03.655 INFO extent:129 gens: 0 1 1, mrl: flush_mismatch, : mend
34721 Sep 22 23:15:03.655 INFO extent:129 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34722 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34723 Sep 22 23:15:03.655 INFO extent:129 dirty: false false false, mrl: flush_mismatch, : mend
34724 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34725 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 129, mrl: flush_mismatch, : mend
34726 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34727 Sep 22 23:15:03.655 INFO Extent 130 has flush number mismatch, : mend
34728 Sep 22 23:15:03.655 INFO First source client ID for extent 130, mrl: flush_mismatch, : mend
34729 Sep 22 23:15:03.655 INFO extent:130 gens: 0 1 1, mrl: flush_mismatch, : mend
34730 Sep 22 23:15:03.655 INFO extent:130 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34731 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34732 Sep 22 23:15:03.655 INFO extent:130 dirty: false false false, mrl: flush_mismatch, : mend
34733 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34734 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 130, mrl: flush_mismatch, : mend
34735 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34736 Sep 22 23:15:03.655 INFO Extent 131 has flush number mismatch, : mend
34737 Sep 22 23:15:03.655 INFO First source client ID for extent 131, mrl: flush_mismatch, : mend
34738 Sep 22 23:15:03.655 INFO extent:131 gens: 0 1 1, mrl: flush_mismatch, : mend
34739 Sep 22 23:15:03.655 INFO extent:131 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34740 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34741 Sep 22 23:15:03.655 INFO extent:131 dirty: false false false, mrl: flush_mismatch, : mend
34742 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34743 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 131, mrl: flush_mismatch, : mend
34744 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34745 Sep 22 23:15:03.655 INFO Extent 132 has flush number mismatch, : mend
34746 Sep 22 23:15:03.655 INFO First source client ID for extent 132, mrl: flush_mismatch, : mend
34747 Sep 22 23:15:03.655 INFO extent:132 gens: 0 1 1, mrl: flush_mismatch, : mend
34748 Sep 22 23:15:03.655 INFO extent:132 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34749 Sep 22 23:15:03.655 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34750 Sep 22 23:15:03.655 INFO extent:132 dirty: false false false, mrl: flush_mismatch, : mend
34751 Sep 22 23:15:03.655 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34752 Sep 22 23:15:03.655 INFO find dest for source 1 for extent at index 132, mrl: flush_mismatch, : mend
34753 Sep 22 23:15:03.655 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34754 Sep 22 23:15:03.656 INFO Extent 133 has flush number mismatch, : mend
34755 Sep 22 23:15:03.656 INFO First source client ID for extent 133, mrl: flush_mismatch, : mend
34756 Sep 22 23:15:03.656 INFO extent:133 gens: 0 1 1, mrl: flush_mismatch, : mend
34757 Sep 22 23:15:03.656 INFO extent:133 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34758 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34759 Sep 22 23:15:03.656 INFO extent:133 dirty: false false false, mrl: flush_mismatch, : mend
34760 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34761 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 133, mrl: flush_mismatch, : mend
34762 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34763 Sep 22 23:15:03.656 INFO Extent 134 has flush number mismatch, : mend
34764 Sep 22 23:15:03.656 INFO First source client ID for extent 134, mrl: flush_mismatch, : mend
34765 Sep 22 23:15:03.656 INFO extent:134 gens: 0 1 1, mrl: flush_mismatch, : mend
34766 Sep 22 23:15:03.656 INFO extent:134 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34767 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34768 Sep 22 23:15:03.656 INFO extent:134 dirty: false false false, mrl: flush_mismatch, : mend
34769 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34770 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 134, mrl: flush_mismatch, : mend
34771 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34772 Sep 22 23:15:03.656 INFO Extent 135 has flush number mismatch, : mend
34773 Sep 22 23:15:03.656 INFO First source client ID for extent 135, mrl: flush_mismatch, : mend
34774 Sep 22 23:15:03.656 INFO extent:135 gens: 0 1 1, mrl: flush_mismatch, : mend
34775 Sep 22 23:15:03.656 INFO extent:135 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34776 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34777 Sep 22 23:15:03.656 INFO extent:135 dirty: false false false, mrl: flush_mismatch, : mend
34778 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34779 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 135, mrl: flush_mismatch, : mend
34780 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34781 Sep 22 23:15:03.656 INFO Extent 136 has flush number mismatch, : mend
34782 Sep 22 23:15:03.656 INFO First source client ID for extent 136, mrl: flush_mismatch, : mend
34783 Sep 22 23:15:03.656 INFO extent:136 gens: 0 1 1, mrl: flush_mismatch, : mend
34784 Sep 22 23:15:03.656 INFO extent:136 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34785 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34786 Sep 22 23:15:03.656 INFO extent:136 dirty: false false false, mrl: flush_mismatch, : mend
34787 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34788 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 136, mrl: flush_mismatch, : mend
34789 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34790 Sep 22 23:15:03.656 INFO Extent 137 has flush number mismatch, : mend
34791 Sep 22 23:15:03.656 INFO First source client ID for extent 137, mrl: flush_mismatch, : mend
34792 Sep 22 23:15:03.656 INFO extent:137 gens: 0 1 1, mrl: flush_mismatch, : mend
34793 Sep 22 23:15:03.656 INFO extent:137 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34794 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34795 Sep 22 23:15:03.656 INFO extent:137 dirty: false false false, mrl: flush_mismatch, : mend
34796 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34797 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 137, mrl: flush_mismatch, : mend
34798 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34799 Sep 22 23:15:03.656 INFO Extent 138 has flush number mismatch, : mend
34800 Sep 22 23:15:03.656 INFO First source client ID for extent 138, mrl: flush_mismatch, : mend
34801 Sep 22 23:15:03.656 INFO extent:138 gens: 0 1 1, mrl: flush_mismatch, : mend
34802 Sep 22 23:15:03.656 INFO extent:138 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34803 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34804 Sep 22 23:15:03.656 INFO extent:138 dirty: false false false, mrl: flush_mismatch, : mend
34805 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34806 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 138, mrl: flush_mismatch, : mend
34807 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34808 Sep 22 23:15:03.656 INFO Extent 139 has flush number mismatch, : mend
34809 Sep 22 23:15:03.656 INFO First source client ID for extent 139, mrl: flush_mismatch, : mend
34810 Sep 22 23:15:03.656 INFO extent:139 gens: 0 1 1, mrl: flush_mismatch, : mend
34811 Sep 22 23:15:03.656 INFO extent:139 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34812 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34813 Sep 22 23:15:03.656 INFO extent:139 dirty: false false false, mrl: flush_mismatch, : mend
34814 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34815 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 139, mrl: flush_mismatch, : mend
34816 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34817 Sep 22 23:15:03.656 INFO Extent 140 has flush number mismatch, : mend
34818 Sep 22 23:15:03.656 INFO First source client ID for extent 140, mrl: flush_mismatch, : mend
34819 Sep 22 23:15:03.656 INFO extent:140 gens: 0 1 1, mrl: flush_mismatch, : mend
34820 Sep 22 23:15:03.656 INFO extent:140 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34821 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34822 Sep 22 23:15:03.656 INFO extent:140 dirty: false false false, mrl: flush_mismatch, : mend
34823 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34824 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 140, mrl: flush_mismatch, : mend
34825 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34826 Sep 22 23:15:03.656 INFO Extent 141 has flush number mismatch, : mend
34827 Sep 22 23:15:03.656 INFO First source client ID for extent 141, mrl: flush_mismatch, : mend
34828 Sep 22 23:15:03.656 INFO extent:141 gens: 0 1 1, mrl: flush_mismatch, : mend
34829 Sep 22 23:15:03.656 INFO extent:141 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34830 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34831 Sep 22 23:15:03.656 INFO extent:141 dirty: false false false, mrl: flush_mismatch, : mend
34832 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34833 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 141, mrl: flush_mismatch, : mend
34834 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34835 Sep 22 23:15:03.656 INFO Extent 142 has flush number mismatch, : mend
34836 Sep 22 23:15:03.656 INFO First source client ID for extent 142, mrl: flush_mismatch, : mend
34837 Sep 22 23:15:03.656 INFO extent:142 gens: 0 1 1, mrl: flush_mismatch, : mend
34838 Sep 22 23:15:03.656 INFO extent:142 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34839 Sep 22 23:15:03.656 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34840 Sep 22 23:15:03.656 INFO extent:142 dirty: false false false, mrl: flush_mismatch, : mend
34841 Sep 22 23:15:03.656 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34842 Sep 22 23:15:03.656 INFO find dest for source 1 for extent at index 142, mrl: flush_mismatch, : mend
34843 Sep 22 23:15:03.656 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34844 Sep 22 23:15:03.657 INFO Extent 143 has flush number mismatch, : mend
34845 Sep 22 23:15:03.657 INFO First source client ID for extent 143, mrl: flush_mismatch, : mend
34846 Sep 22 23:15:03.657 INFO extent:143 gens: 0 1 1, mrl: flush_mismatch, : mend
34847 Sep 22 23:15:03.657 INFO extent:143 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34848 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34849 Sep 22 23:15:03.657 INFO extent:143 dirty: false false false, mrl: flush_mismatch, : mend
34850 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34851 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 143, mrl: flush_mismatch, : mend
34852 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34853 Sep 22 23:15:03.657 INFO Extent 144 has flush number mismatch, : mend
34854 Sep 22 23:15:03.657 INFO First source client ID for extent 144, mrl: flush_mismatch, : mend
34855 Sep 22 23:15:03.657 INFO extent:144 gens: 0 1 1, mrl: flush_mismatch, : mend
34856 Sep 22 23:15:03.657 INFO extent:144 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34857 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34858 Sep 22 23:15:03.657 INFO extent:144 dirty: false false false, mrl: flush_mismatch, : mend
34859 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34860 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 144, mrl: flush_mismatch, : mend
34861 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34862 Sep 22 23:15:03.657 INFO Extent 145 has flush number mismatch, : mend
34863 Sep 22 23:15:03.657 INFO First source client ID for extent 145, mrl: flush_mismatch, : mend
34864 Sep 22 23:15:03.657 INFO extent:145 gens: 0 1 1, mrl: flush_mismatch, : mend
34865 Sep 22 23:15:03.657 INFO extent:145 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34866 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34867 Sep 22 23:15:03.657 INFO extent:145 dirty: false false false, mrl: flush_mismatch, : mend
34868 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34869 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 145, mrl: flush_mismatch, : mend
34870 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34871 Sep 22 23:15:03.657 INFO Extent 146 has flush number mismatch, : mend
34872 Sep 22 23:15:03.657 INFO First source client ID for extent 146, mrl: flush_mismatch, : mend
34873 Sep 22 23:15:03.657 INFO extent:146 gens: 0 1 1, mrl: flush_mismatch, : mend
34874 Sep 22 23:15:03.657 INFO extent:146 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34875 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34876 Sep 22 23:15:03.657 INFO extent:146 dirty: false false false, mrl: flush_mismatch, : mend
34877 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34878 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 146, mrl: flush_mismatch, : mend
34879 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34880 Sep 22 23:15:03.657 INFO Extent 147 has flush number mismatch, : mend
34881 Sep 22 23:15:03.657 INFO First source client ID for extent 147, mrl: flush_mismatch, : mend
34882 Sep 22 23:15:03.657 INFO extent:147 gens: 0 1 1, mrl: flush_mismatch, : mend
34883 Sep 22 23:15:03.657 INFO extent:147 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34884 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34885 Sep 22 23:15:03.657 INFO extent:147 dirty: false false false, mrl: flush_mismatch, : mend
34886 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34887 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 147, mrl: flush_mismatch, : mend
34888 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34889 Sep 22 23:15:03.657 INFO Extent 148 has flush number mismatch, : mend
34890 Sep 22 23:15:03.657 INFO First source client ID for extent 148, mrl: flush_mismatch, : mend
34891 Sep 22 23:15:03.657 INFO extent:148 gens: 0 1 1, mrl: flush_mismatch, : mend
34892 Sep 22 23:15:03.657 INFO extent:148 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34893 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34894 Sep 22 23:15:03.657 INFO extent:148 dirty: false false false, mrl: flush_mismatch, : mend
34895 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34896 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 148, mrl: flush_mismatch, : mend
34897 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34898 Sep 22 23:15:03.657 INFO Extent 149 has flush number mismatch, : mend
34899 Sep 22 23:15:03.657 INFO First source client ID for extent 149, mrl: flush_mismatch, : mend
34900 Sep 22 23:15:03.657 INFO extent:149 gens: 0 1 1, mrl: flush_mismatch, : mend
34901 Sep 22 23:15:03.657 INFO extent:149 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34902 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34903 Sep 22 23:15:03.657 INFO extent:149 dirty: false false false, mrl: flush_mismatch, : mend
34904 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34905 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 149, mrl: flush_mismatch, : mend
34906 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34907 Sep 22 23:15:03.657 INFO Extent 150 has flush number mismatch, : mend
34908 Sep 22 23:15:03.657 INFO First source client ID for extent 150, mrl: flush_mismatch, : mend
34909 Sep 22 23:15:03.657 INFO extent:150 gens: 0 1 1, mrl: flush_mismatch, : mend
34910 Sep 22 23:15:03.657 INFO extent:150 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34911 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34912 Sep 22 23:15:03.657 INFO extent:150 dirty: false false false, mrl: flush_mismatch, : mend
34913 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34914 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 150, mrl: flush_mismatch, : mend
34915 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34916 Sep 22 23:15:03.657 INFO Extent 151 has flush number mismatch, : mend
34917 Sep 22 23:15:03.657 INFO First source client ID for extent 151, mrl: flush_mismatch, : mend
34918 Sep 22 23:15:03.657 INFO extent:151 gens: 0 1 1, mrl: flush_mismatch, : mend
34919 Sep 22 23:15:03.657 INFO extent:151 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34920 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34921 Sep 22 23:15:03.657 INFO extent:151 dirty: false false false, mrl: flush_mismatch, : mend
34922 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34923 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 151, mrl: flush_mismatch, : mend
34924 Sep 22 23:15:03.657 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34925 Sep 22 23:15:03.657 INFO Extent 152 has flush number mismatch, : mend
34926 Sep 22 23:15:03.657 INFO First source client ID for extent 152, mrl: flush_mismatch, : mend
34927 Sep 22 23:15:03.657 INFO extent:152 gens: 0 1 1, mrl: flush_mismatch, : mend
34928 Sep 22 23:15:03.657 INFO extent:152 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34929 Sep 22 23:15:03.657 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34930 Sep 22 23:15:03.657 INFO extent:152 dirty: false false false, mrl: flush_mismatch, : mend
34931 Sep 22 23:15:03.657 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34932 Sep 22 23:15:03.657 INFO find dest for source 1 for extent at index 152, mrl: flush_mismatch, : mend
34933 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34934 Sep 22 23:15:03.658 INFO Extent 153 has flush number mismatch, : mend
34935 Sep 22 23:15:03.658 INFO First source client ID for extent 153, mrl: flush_mismatch, : mend
34936 Sep 22 23:15:03.658 INFO extent:153 gens: 0 1 1, mrl: flush_mismatch, : mend
34937 Sep 22 23:15:03.658 INFO extent:153 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34938 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34939 Sep 22 23:15:03.658 INFO extent:153 dirty: false false false, mrl: flush_mismatch, : mend
34940 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34941 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 153, mrl: flush_mismatch, : mend
34942 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34943 Sep 22 23:15:03.658 INFO Extent 154 has flush number mismatch, : mend
34944 Sep 22 23:15:03.658 INFO First source client ID for extent 154, mrl: flush_mismatch, : mend
34945 Sep 22 23:15:03.658 INFO extent:154 gens: 0 1 1, mrl: flush_mismatch, : mend
34946 Sep 22 23:15:03.658 INFO extent:154 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34947 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34948 Sep 22 23:15:03.658 INFO extent:154 dirty: false false false, mrl: flush_mismatch, : mend
34949 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34950 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 154, mrl: flush_mismatch, : mend
34951 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34952 Sep 22 23:15:03.658 INFO Extent 155 has flush number mismatch, : mend
34953 Sep 22 23:15:03.658 INFO First source client ID for extent 155, mrl: flush_mismatch, : mend
34954 Sep 22 23:15:03.658 INFO extent:155 gens: 0 1 1, mrl: flush_mismatch, : mend
34955 Sep 22 23:15:03.658 INFO extent:155 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34956 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34957 Sep 22 23:15:03.658 INFO extent:155 dirty: false false false, mrl: flush_mismatch, : mend
34958 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34959 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 155, mrl: flush_mismatch, : mend
34960 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34961 Sep 22 23:15:03.658 INFO Extent 156 has flush number mismatch, : mend
34962 Sep 22 23:15:03.658 INFO First source client ID for extent 156, mrl: flush_mismatch, : mend
34963 Sep 22 23:15:03.658 INFO extent:156 gens: 0 1 1, mrl: flush_mismatch, : mend
34964 Sep 22 23:15:03.658 INFO extent:156 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34965 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34966 Sep 22 23:15:03.658 INFO extent:156 dirty: false false false, mrl: flush_mismatch, : mend
34967 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34968 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 156, mrl: flush_mismatch, : mend
34969 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34970 Sep 22 23:15:03.658 INFO Extent 157 has flush number mismatch, : mend
34971 Sep 22 23:15:03.658 INFO First source client ID for extent 157, mrl: flush_mismatch, : mend
34972 Sep 22 23:15:03.658 INFO extent:157 gens: 0 1 1, mrl: flush_mismatch, : mend
34973 Sep 22 23:15:03.658 INFO extent:157 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34974 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34975 Sep 22 23:15:03.658 INFO extent:157 dirty: false false false, mrl: flush_mismatch, : mend
34976 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34977 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 157, mrl: flush_mismatch, : mend
34978 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34979 Sep 22 23:15:03.658 INFO Extent 158 has flush number mismatch, : mend
34980 Sep 22 23:15:03.658 INFO First source client ID for extent 158, mrl: flush_mismatch, : mend
34981 Sep 22 23:15:03.658 INFO extent:158 gens: 0 1 1, mrl: flush_mismatch, : mend
34982 Sep 22 23:15:03.658 INFO extent:158 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34983 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34984 Sep 22 23:15:03.658 INFO extent:158 dirty: false false false, mrl: flush_mismatch, : mend
34985 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34986 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 158, mrl: flush_mismatch, : mend
34987 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34988 Sep 22 23:15:03.658 INFO Extent 159 has flush number mismatch, : mend
34989 Sep 22 23:15:03.658 INFO First source client ID for extent 159, mrl: flush_mismatch, : mend
34990 Sep 22 23:15:03.658 INFO extent:159 gens: 0 1 1, mrl: flush_mismatch, : mend
34991 Sep 22 23:15:03.658 INFO extent:159 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34992 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34993 Sep 22 23:15:03.658 INFO extent:159 dirty: false false false, mrl: flush_mismatch, : mend
34994 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
34995 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 159, mrl: flush_mismatch, : mend
34996 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
34997 Sep 22 23:15:03.658 INFO Extent 160 has flush number mismatch, : mend
34998 Sep 22 23:15:03.658 INFO First source client ID for extent 160, mrl: flush_mismatch, : mend
34999 Sep 22 23:15:03.658 INFO extent:160 gens: 0 1 1, mrl: flush_mismatch, : mend
35000 Sep 22 23:15:03.658 INFO extent:160 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35001 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35002 Sep 22 23:15:03.658 INFO extent:160 dirty: false false false, mrl: flush_mismatch, : mend
35003 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35004 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 160, mrl: flush_mismatch, : mend
35005 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35006 Sep 22 23:15:03.658 INFO Extent 161 has flush number mismatch, : mend
35007 Sep 22 23:15:03.658 INFO First source client ID for extent 161, mrl: flush_mismatch, : mend
35008 Sep 22 23:15:03.658 INFO extent:161 gens: 0 1 1, mrl: flush_mismatch, : mend
35009 Sep 22 23:15:03.658 INFO extent:161 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35010 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35011 Sep 22 23:15:03.658 INFO extent:161 dirty: false false false, mrl: flush_mismatch, : mend
35012 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35013 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 161, mrl: flush_mismatch, : mend
35014 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35015 Sep 22 23:15:03.658 INFO Extent 162 has flush number mismatch, : mend
35016 Sep 22 23:15:03.658 INFO First source client ID for extent 162, mrl: flush_mismatch, : mend
35017 Sep 22 23:15:03.658 INFO extent:162 gens: 0 1 1, mrl: flush_mismatch, : mend
35018 Sep 22 23:15:03.658 INFO extent:162 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35019 Sep 22 23:15:03.658 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35020 Sep 22 23:15:03.658 INFO extent:162 dirty: false false false, mrl: flush_mismatch, : mend
35021 Sep 22 23:15:03.658 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35022 Sep 22 23:15:03.658 INFO find dest for source 1 for extent at index 162, mrl: flush_mismatch, : mend
35023 Sep 22 23:15:03.658 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35024 Sep 22 23:15:03.659 INFO Extent 163 has flush number mismatch, : mend
35025 Sep 22 23:15:03.659 INFO First source client ID for extent 163, mrl: flush_mismatch, : mend
35026 Sep 22 23:15:03.659 INFO extent:163 gens: 0 1 1, mrl: flush_mismatch, : mend
35027 Sep 22 23:15:03.659 INFO extent:163 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35028 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35029 Sep 22 23:15:03.659 INFO extent:163 dirty: false false false, mrl: flush_mismatch, : mend
35030 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35031 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 163, mrl: flush_mismatch, : mend
35032 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35033 Sep 22 23:15:03.659 INFO Extent 164 has flush number mismatch, : mend
35034 Sep 22 23:15:03.659 INFO First source client ID for extent 164, mrl: flush_mismatch, : mend
35035 Sep 22 23:15:03.659 INFO extent:164 gens: 0 1 1, mrl: flush_mismatch, : mend
35036 Sep 22 23:15:03.659 INFO extent:164 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35037 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35038 Sep 22 23:15:03.659 INFO extent:164 dirty: false false false, mrl: flush_mismatch, : mend
35039 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35040 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 164, mrl: flush_mismatch, : mend
35041 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35042 Sep 22 23:15:03.659 INFO Extent 165 has flush number mismatch, : mend
35043 Sep 22 23:15:03.659 INFO First source client ID for extent 165, mrl: flush_mismatch, : mend
35044 Sep 22 23:15:03.659 INFO extent:165 gens: 0 1 1, mrl: flush_mismatch, : mend
35045 Sep 22 23:15:03.659 INFO extent:165 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35046 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35047 Sep 22 23:15:03.659 INFO extent:165 dirty: false false false, mrl: flush_mismatch, : mend
35048 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35049 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 165, mrl: flush_mismatch, : mend
35050 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35051 Sep 22 23:15:03.659 INFO Extent 166 has flush number mismatch, : mend
35052 Sep 22 23:15:03.659 INFO First source client ID for extent 166, mrl: flush_mismatch, : mend
35053 Sep 22 23:15:03.659 INFO extent:166 gens: 0 1 1, mrl: flush_mismatch, : mend
35054 Sep 22 23:15:03.659 INFO extent:166 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35055 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35056 Sep 22 23:15:03.659 INFO extent:166 dirty: false false false, mrl: flush_mismatch, : mend
35057 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35058 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 166, mrl: flush_mismatch, : mend
35059 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35060 Sep 22 23:15:03.659 INFO Extent 167 has flush number mismatch, : mend
35061 Sep 22 23:15:03.659 INFO First source client ID for extent 167, mrl: flush_mismatch, : mend
35062 Sep 22 23:15:03.659 INFO extent:167 gens: 0 1 1, mrl: flush_mismatch, : mend
35063 Sep 22 23:15:03.659 INFO extent:167 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35064 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35065 Sep 22 23:15:03.659 INFO extent:167 dirty: false false false, mrl: flush_mismatch, : mend
35066 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35067 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 167, mrl: flush_mismatch, : mend
35068 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35069 Sep 22 23:15:03.659 INFO Extent 168 has flush number mismatch, : mend
35070 Sep 22 23:15:03.659 INFO First source client ID for extent 168, mrl: flush_mismatch, : mend
35071 Sep 22 23:15:03.659 INFO extent:168 gens: 0 1 1, mrl: flush_mismatch, : mend
35072 Sep 22 23:15:03.659 INFO extent:168 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35073 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35074 Sep 22 23:15:03.659 INFO extent:168 dirty: false false false, mrl: flush_mismatch, : mend
35075 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35076 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 168, mrl: flush_mismatch, : mend
35077 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35078 Sep 22 23:15:03.659 INFO Extent 169 has flush number mismatch, : mend
35079 Sep 22 23:15:03.659 INFO First source client ID for extent 169, mrl: flush_mismatch, : mend
35080 Sep 22 23:15:03.659 INFO extent:169 gens: 0 1 1, mrl: flush_mismatch, : mend
35081 Sep 22 23:15:03.659 INFO extent:169 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35082 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35083 Sep 22 23:15:03.659 INFO extent:169 dirty: false false false, mrl: flush_mismatch, : mend
35084 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35085 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 169, mrl: flush_mismatch, : mend
35086 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35087 Sep 22 23:15:03.659 INFO Extent 170 has flush number mismatch, : mend
35088 Sep 22 23:15:03.659 INFO First source client ID for extent 170, mrl: flush_mismatch, : mend
35089 Sep 22 23:15:03.659 INFO extent:170 gens: 0 1 1, mrl: flush_mismatch, : mend
35090 Sep 22 23:15:03.659 INFO extent:170 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35091 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35092 Sep 22 23:15:03.659 INFO extent:170 dirty: false false false, mrl: flush_mismatch, : mend
35093 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35094 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 170, mrl: flush_mismatch, : mend
35095 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35096 Sep 22 23:15:03.659 INFO Extent 171 has flush number mismatch, : mend
35097 Sep 22 23:15:03.659 INFO First source client ID for extent 171, mrl: flush_mismatch, : mend
35098 Sep 22 23:15:03.659 INFO extent:171 gens: 0 1 1, mrl: flush_mismatch, : mend
35099 Sep 22 23:15:03.659 INFO extent:171 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35100 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35101 Sep 22 23:15:03.659 INFO extent:171 dirty: false false false, mrl: flush_mismatch, : mend
35102 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35103 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 171, mrl: flush_mismatch, : mend
35104 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35105 Sep 22 23:15:03.659 INFO Extent 172 has flush number mismatch, : mend
35106 Sep 22 23:15:03.659 INFO First source client ID for extent 172, mrl: flush_mismatch, : mend
35107 Sep 22 23:15:03.659 INFO extent:172 gens: 0 1 1, mrl: flush_mismatch, : mend
35108 Sep 22 23:15:03.659 INFO extent:172 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35109 Sep 22 23:15:03.659 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35110 Sep 22 23:15:03.659 INFO extent:172 dirty: false false false, mrl: flush_mismatch, : mend
35111 Sep 22 23:15:03.659 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35112 Sep 22 23:15:03.659 INFO find dest for source 1 for extent at index 172, mrl: flush_mismatch, : mend
35113 Sep 22 23:15:03.659 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35114 Sep 22 23:15:03.660 INFO Extent 173 has flush number mismatch, : mend
35115 Sep 22 23:15:03.660 INFO First source client ID for extent 173, mrl: flush_mismatch, : mend
35116 Sep 22 23:15:03.660 INFO extent:173 gens: 0 1 1, mrl: flush_mismatch, : mend
35117 Sep 22 23:15:03.660 INFO extent:173 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35118 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35119 Sep 22 23:15:03.660 INFO extent:173 dirty: false false false, mrl: flush_mismatch, : mend
35120 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35121 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 173, mrl: flush_mismatch, : mend
35122 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35123 Sep 22 23:15:03.660 INFO Extent 174 has flush number mismatch, : mend
35124 Sep 22 23:15:03.660 INFO First source client ID for extent 174, mrl: flush_mismatch, : mend
35125 Sep 22 23:15:03.660 INFO extent:174 gens: 0 1 1, mrl: flush_mismatch, : mend
35126 Sep 22 23:15:03.660 INFO extent:174 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35127 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35128 Sep 22 23:15:03.660 INFO extent:174 dirty: false false false, mrl: flush_mismatch, : mend
35129 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35130 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 174, mrl: flush_mismatch, : mend
35131 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35132 Sep 22 23:15:03.660 INFO Extent 175 has flush number mismatch, : mend
35133 Sep 22 23:15:03.660 INFO First source client ID for extent 175, mrl: flush_mismatch, : mend
35134 Sep 22 23:15:03.660 INFO extent:175 gens: 0 1 1, mrl: flush_mismatch, : mend
35135 Sep 22 23:15:03.660 INFO extent:175 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35136 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35137 Sep 22 23:15:03.660 INFO extent:175 dirty: false false false, mrl: flush_mismatch, : mend
35138 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35139 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 175, mrl: flush_mismatch, : mend
35140 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35141 Sep 22 23:15:03.660 INFO Extent 176 has flush number mismatch, : mend
35142 Sep 22 23:15:03.660 INFO First source client ID for extent 176, mrl: flush_mismatch, : mend
35143 Sep 22 23:15:03.660 INFO extent:176 gens: 0 1 1, mrl: flush_mismatch, : mend
35144 Sep 22 23:15:03.660 INFO extent:176 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35145 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35146 Sep 22 23:15:03.660 INFO extent:176 dirty: false false false, mrl: flush_mismatch, : mend
35147 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35148 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 176, mrl: flush_mismatch, : mend
35149 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35150 Sep 22 23:15:03.660 INFO Extent 177 has flush number mismatch, : mend
35151 Sep 22 23:15:03.660 INFO First source client ID for extent 177, mrl: flush_mismatch, : mend
35152 Sep 22 23:15:03.660 INFO extent:177 gens: 0 1 1, mrl: flush_mismatch, : mend
35153 Sep 22 23:15:03.660 INFO extent:177 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35154 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35155 Sep 22 23:15:03.660 INFO extent:177 dirty: false false false, mrl: flush_mismatch, : mend
35156 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35157 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 177, mrl: flush_mismatch, : mend
35158 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35159 Sep 22 23:15:03.660 INFO Extent 178 has flush number mismatch, : mend
35160 Sep 22 23:15:03.660 INFO First source client ID for extent 178, mrl: flush_mismatch, : mend
35161 Sep 22 23:15:03.660 INFO extent:178 gens: 0 1 1, mrl: flush_mismatch, : mend
35162 Sep 22 23:15:03.660 INFO extent:178 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35163 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35164 Sep 22 23:15:03.660 INFO extent:178 dirty: false false false, mrl: flush_mismatch, : mend
35165 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35166 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 178, mrl: flush_mismatch, : mend
35167 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35168 Sep 22 23:15:03.660 INFO Extent 179 has flush number mismatch, : mend
35169 Sep 22 23:15:03.660 INFO First source client ID for extent 179, mrl: flush_mismatch, : mend
35170 Sep 22 23:15:03.660 INFO extent:179 gens: 0 1 1, mrl: flush_mismatch, : mend
35171 Sep 22 23:15:03.660 INFO extent:179 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35172 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35173 Sep 22 23:15:03.660 INFO extent:179 dirty: false false false, mrl: flush_mismatch, : mend
35174 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35175 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 179, mrl: flush_mismatch, : mend
35176 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35177 Sep 22 23:15:03.660 INFO Extent 180 has flush number mismatch, : mend
35178 Sep 22 23:15:03.660 INFO First source client ID for extent 180, mrl: flush_mismatch, : mend
35179 Sep 22 23:15:03.660 INFO extent:180 gens: 0 1 1, mrl: flush_mismatch, : mend
35180 Sep 22 23:15:03.660 INFO extent:180 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35181 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35182 Sep 22 23:15:03.660 INFO extent:180 dirty: false false false, mrl: flush_mismatch, : mend
35183 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35184 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 180, mrl: flush_mismatch, : mend
35185 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35186 Sep 22 23:15:03.660 INFO Extent 181 has flush number mismatch, : mend
35187 Sep 22 23:15:03.660 INFO First source client ID for extent 181, mrl: flush_mismatch, : mend
35188 Sep 22 23:15:03.660 INFO extent:181 gens: 0 1 1, mrl: flush_mismatch, : mend
35189 Sep 22 23:15:03.660 INFO extent:181 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35190 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35191 Sep 22 23:15:03.660 INFO extent:181 dirty: false false false, mrl: flush_mismatch, : mend
35192 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35193 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 181, mrl: flush_mismatch, : mend
35194 Sep 22 23:15:03.660 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35195 Sep 22 23:15:03.660 INFO Extent 182 has flush number mismatch, : mend
35196 Sep 22 23:15:03.660 INFO First source client ID for extent 182, mrl: flush_mismatch, : mend
35197 Sep 22 23:15:03.660 INFO extent:182 gens: 0 1 1, mrl: flush_mismatch, : mend
35198 Sep 22 23:15:03.660 INFO extent:182 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35199 Sep 22 23:15:03.660 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35200 Sep 22 23:15:03.660 INFO extent:182 dirty: false false false, mrl: flush_mismatch, : mend
35201 Sep 22 23:15:03.660 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35202 Sep 22 23:15:03.660 INFO find dest for source 1 for extent at index 182, mrl: flush_mismatch, : mend
35203 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35204 Sep 22 23:15:03.661 INFO Extent 183 has flush number mismatch, : mend
35205 Sep 22 23:15:03.661 INFO First source client ID for extent 183, mrl: flush_mismatch, : mend
35206 Sep 22 23:15:03.661 INFO extent:183 gens: 0 1 1, mrl: flush_mismatch, : mend
35207 Sep 22 23:15:03.661 INFO extent:183 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35208 Sep 22 23:15:03.661 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35209 Sep 22 23:15:03.661 INFO extent:183 dirty: false false false, mrl: flush_mismatch, : mend
35210 Sep 22 23:15:03.661 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35211 Sep 22 23:15:03.661 INFO find dest for source 1 for extent at index 183, mrl: flush_mismatch, : mend
35212 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35213 Sep 22 23:15:03.661 INFO Extent 184 has flush number mismatch, : mend
35214 Sep 22 23:15:03.661 INFO First source client ID for extent 184, mrl: flush_mismatch, : mend
35215 Sep 22 23:15:03.661 INFO extent:184 gens: 0 1 1, mrl: flush_mismatch, : mend
35216 Sep 22 23:15:03.661 INFO extent:184 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35217 Sep 22 23:15:03.661 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35218 Sep 22 23:15:03.661 INFO extent:184 dirty: false false false, mrl: flush_mismatch, : mend
35219 Sep 22 23:15:03.661 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35220 Sep 22 23:15:03.661 INFO find dest for source 1 for extent at index 184, mrl: flush_mismatch, : mend
35221 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35222 Sep 22 23:15:03.661 INFO Extent 185 has flush number mismatch, : mend
35223 Sep 22 23:15:03.661 INFO First source client ID for extent 185, mrl: flush_mismatch, : mend
35224 Sep 22 23:15:03.661 INFO extent:185 gens: 0 1 1, mrl: flush_mismatch, : mend
35225 Sep 22 23:15:03.661 INFO extent:185 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35226 Sep 22 23:15:03.661 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35227 Sep 22 23:15:03.661 INFO extent:185 dirty: false false false, mrl: flush_mismatch, : mend
35228 Sep 22 23:15:03.661 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35229 Sep 22 23:15:03.661 INFO find dest for source 1 for extent at index 185, mrl: flush_mismatch, : mend
35230 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35231 Sep 22 23:15:03.661 INFO Extent 186 has flush number mismatch, : mend
35232 Sep 22 23:15:03.661 INFO First source client ID for extent 186, mrl: flush_mismatch, : mend
35233 Sep 22 23:15:03.661 INFO extent:186 gens: 0 1 1, mrl: flush_mismatch, : mend
35234 Sep 22 23:15:03.661 INFO extent:186 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35235 Sep 22 23:15:03.661 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35236 Sep 22 23:15:03.661 INFO extent:186 dirty: false false false, mrl: flush_mismatch, : mend
35237 Sep 22 23:15:03.661 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35238 Sep 22 23:15:03.661 INFO find dest for source 1 for extent at index 186, mrl: flush_mismatch, : mend
35239 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35240 Sep 22 23:15:03.661 INFO Extent 187 has flush number mismatch, : mend
35241 Sep 22 23:15:03.661 INFO First source client ID for extent 187, mrl: flush_mismatch, : mend
35242 Sep 22 23:15:03.661 INFO extent:187 gens: 0 1 1, mrl: flush_mismatch, : mend
35243 Sep 22 23:15:03.661 INFO extent:187 flush: 0 1 1 scs: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35244 Sep 22 23:15:03.661 INFO max_flush now has: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35245 Sep 22 23:15:03.661 INFO extent:187 dirty: false false false, mrl: flush_mismatch, : mend
35246 Sep 22 23:15:03.661 INFO No maxes found, left with: [ClientId(1), ClientId(2)], mrl: flush_mismatch, : mend
35247 Sep 22 23:15:03.661 INFO find dest for source 1 for extent at index 187, mrl: flush_mismatch, : mend
35248 Sep 22 23:15:03.661 INFO source 1, add dest 0 gen, mrl: flush_mismatch, : mend
35249 Sep 22 23:15:03.661 INFO Transition from WaitQuorum to Repair
35250 Sep 22 23:15:03.661 INFO Transition from WaitQuorum to Repair
35251 Sep 22 23:15:03.661 INFO Transition from WaitQuorum to Repair
35252 Sep 22 23:15:03.661 INFO Found 188 extents that need repair
35253 Sep 22 23:15:03.661 INFO Full repair list: {115: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 0: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 143: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 151: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 169: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 185: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 11: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 16: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 67: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 137: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 146: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 105: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 4: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 70: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 113: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 85: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 69: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 102: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 73: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 76: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 98: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 65: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 45: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 75: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 68: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 35: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 152: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 30: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 95: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 133: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 174: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 158: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 106: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 103: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 80: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 177: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 84: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 91: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 2: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 99: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 110: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 121: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 157: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 57: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 32: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 60: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 7: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 139: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 163: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 165: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 71: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 141: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 114: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 107: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 150: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 164: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 6: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 52: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 19: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 79: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 97: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 112: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 118: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 130: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 25: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 47: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 3: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 54: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 58: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 18: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 138: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 100: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 123: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 96: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 28: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 178: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 36: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 92: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 147: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 111: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 187: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 149: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 160: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 122: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 77: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 136: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 176: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 125: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 90: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 144: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 156: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 33: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 48: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 78: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 88: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 82: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 86: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 94: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 145: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 168: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 127: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 26: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 34: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 12: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 172: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 53: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 175: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 140: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 167: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 142: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 171: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 42: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 43: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 179: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 120: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 66: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 132: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 173: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 180: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 184: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 24: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 50: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 117: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 38: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 129: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 153: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 181: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 128: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 72: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 21: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 61: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 134: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 46: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 41: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 22: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 56: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 23: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 10: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 44: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 59: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 154: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 159: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 31: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 87: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 148: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 162: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 51: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 39: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 155: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 29: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 135: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 161: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 15: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 64: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 93: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 40: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 49: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 81: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 170: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 13: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 27: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 8: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 1: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 9: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 116: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 131: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 17: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 182: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 183: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 20: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 63: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 166: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 14: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 37: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 124: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 5: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 55: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 62: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 89: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 126: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 186: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 108: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 109: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 104: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 101: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 74: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 83: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }, 119: ExtentFix { source: ClientId(1), dest: [ClientId(0)] }}, : downstairs
35254 Sep 22 23:15:03.662 INFO Task list: [ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }, ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }, state: ClientData([New, New, New]) }], : downstairs
35255 Sep 22 23:15:03.666 INFO Begin repair with 752 commands
35256 Sep 22 23:15:03.666 INFO Pop front: ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35257 Sep 22 23:15:03.666 INFO Sent repair work, now wait for resp
35258 Sep 22 23:15:03.666 INFO [0] received reconcile message
35259 Sep 22 23:15:03.666 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35260 Sep 22 23:15:03.666 INFO [0] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35261 Sep 22 23:15:03.666 INFO [1] received reconcile message
35262 Sep 22 23:15:03.666 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35263 Sep 22 23:15:03.666 INFO [1] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35264 Sep 22 23:15:03.666 INFO [2] received reconcile message
35265 Sep 22 23:15:03.666 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(0), op: ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35266 Sep 22 23:15:03.666 INFO [2] client ExtentFlush { repair_id: ReconciliationId(0), extent_id: 115, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35267 Sep 22 23:15:03.666 DEBG 0 Flush extent 115 with f:2 g:2
35268 Sep 22 23:15:03.666 DEBG Flush just extent 115 with f:2 and g:2
35269 Sep 22 23:15:03.666 DEBG [1] It's time to notify for 0
35270 Sep 22 23:15:03.666 INFO Completion from [1] id:0 status:true
35271 Sep 22 23:15:03.667 INFO [1/752] Repair commands completed
35272 Sep 22 23:15:03.667 INFO Pop front: ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }, state: ClientData([New, New, New]) }
35273 Sep 22 23:15:03.667 INFO Sent repair work, now wait for resp
35274 Sep 22 23:15:03.667 INFO [0] received reconcile message
35275 Sep 22 23:15:03.667 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }, state: ClientData([InProgress, New, New]) }, : downstairs
35276 Sep 22 23:15:03.667 INFO [0] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }
35277 Sep 22 23:15:03.667 INFO [1] received reconcile message
35278 Sep 22 23:15:03.667 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35279 Sep 22 23:15:03.667 INFO [1] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }
35280 Sep 22 23:15:03.667 INFO [2] received reconcile message
35281 Sep 22 23:15:03.667 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(1), op: ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35282 Sep 22 23:15:03.667 INFO [2] client ExtentClose { repair_id: ReconciliationId(1), extent_id: 115 }
35283 Sep 22 23:15:03.667 DEBG 1 Close extent 115
35284 Sep 22 23:15:03.667 DEBG 1 Close extent 115
35285 Sep 22 23:15:03.668 DEBG 1 Close extent 115
35286 Sep 22 23:15:03.668 DEBG [2] It's time to notify for 1
35287 Sep 22 23:15:03.668 INFO Completion from [2] id:1 status:true
35288 Sep 22 23:15:03.668 INFO [2/752] Repair commands completed
35289 Sep 22 23:15:03.668 INFO Pop front: ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35290 Sep 22 23:15:03.668 INFO Sent repair work, now wait for resp
35291 Sep 22 23:15:03.668 INFO [0] received reconcile message
35292 Sep 22 23:15:03.668 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35293 Sep 22 23:15:03.668 INFO [0] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35294 Sep 22 23:15:03.668 INFO [0] Sending repair request ReconciliationId(2)
35295 Sep 22 23:15:03.668 INFO [1] received reconcile message
35296 Sep 22 23:15:03.668 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35297 Sep 22 23:15:03.668 INFO [1] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35298 Sep 22 23:15:03.668 INFO [1] No action required ReconciliationId(2)
35299 Sep 22 23:15:03.668 INFO [2] received reconcile message
35300 Sep 22 23:15:03.668 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(2), op: ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35301 Sep 22 23:15:03.668 INFO [2] client ExtentRepair { repair_id: ReconciliationId(2), extent_id: 115, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35302 Sep 22 23:15:03.668 INFO [2] No action required ReconciliationId(2)
35303 Sep 22 23:15:03.668 DEBG 2 Repair extent 115 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35304 Sep 22 23:15:03.668 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/073.copy"
35305 Sep 22 23:15:03.733 INFO accepted connection, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35306 Sep 22 23:15:03.734 TRCE incoming request, uri: /extent/115/files, method: GET, req_id: fc4aa63c-5d77-4b7d-9d5a-6f38c0bfc191, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35307 Sep 22 23:15:03.734 INFO request completed, latency_us: 247, response_code: 200, uri: /extent/115/files, method: GET, req_id: fc4aa63c-5d77-4b7d-9d5a-6f38c0bfc191, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35308 Sep 22 23:15:03.734 INFO eid:115 Found repair files: ["073", "073.db"]
35309 Sep 22 23:15:03.734 TRCE incoming request, uri: /newextent/115/data, method: GET, req_id: d59ca312-fa66-4fb5-84df-ae6f84c20c0e, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35310 Sep 22 23:15:03.735 INFO request completed, latency_us: 473, response_code: 200, uri: /newextent/115/data, method: GET, req_id: d59ca312-fa66-4fb5-84df-ae6f84c20c0e, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35311 Sep 22 23:15:03.740 TRCE incoming request, uri: /newextent/115/db, method: GET, req_id: a1ec65b7-9644-49e4-876e-f6d91b61ee23, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35312 Sep 22 23:15:03.740 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/115/db, method: GET, req_id: a1ec65b7-9644-49e4-876e-f6d91b61ee23, remote_addr: 127.0.0.1:60228, local_addr: 127.0.0.1:46213, task: repair
35313 Sep 22 23:15:03.742 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/073.copy" to "/tmp/downstairs-vrx8aK6L/00/000/073.replace"
35314 Sep 22 23:15:03.742 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35315 Sep 22 23:15:03.743 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/073.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35316 Sep 22 23:15:03.743 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/073"
35317 Sep 22 23:15:03.743 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/073.db"
35318 Sep 22 23:15:03.743 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35319 Sep 22 23:15:03.743 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/073.replace" to "/tmp/downstairs-vrx8aK6L/00/000/073.completed"
35320 Sep 22 23:15:03.743 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35321 Sep 22 23:15:03.743 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35322 Sep 22 23:15:03.743 DEBG [0] It's time to notify for 2
35323 Sep 22 23:15:03.744 INFO Completion from [0] id:2 status:true
35324 Sep 22 23:15:03.744 INFO [3/752] Repair commands completed
35325 Sep 22 23:15:03.744 INFO Pop front: ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }, state: ClientData([New, New, New]) }
35326 Sep 22 23:15:03.744 INFO Sent repair work, now wait for resp
35327 Sep 22 23:15:03.744 INFO [0] received reconcile message
35328 Sep 22 23:15:03.744 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }, state: ClientData([InProgress, New, New]) }, : downstairs
35329 Sep 22 23:15:03.744 INFO [0] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }
35330 Sep 22 23:15:03.744 INFO [1] received reconcile message
35331 Sep 22 23:15:03.744 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35332 Sep 22 23:15:03.744 INFO [1] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }
35333 Sep 22 23:15:03.744 INFO [2] received reconcile message
35334 Sep 22 23:15:03.744 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(3), op: ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35335 Sep 22 23:15:03.744 INFO [2] client ExtentReopen { repair_id: ReconciliationId(3), extent_id: 115 }
35336 Sep 22 23:15:03.744 DEBG 3 Reopen extent 115
35337 Sep 22 23:15:03.745 DEBG 3 Reopen extent 115
35338 Sep 22 23:15:03.745 DEBG 3 Reopen extent 115
35339 Sep 22 23:15:03.746 DEBG [2] It's time to notify for 3
35340 Sep 22 23:15:03.746 INFO Completion from [2] id:3 status:true
35341 Sep 22 23:15:03.746 INFO [4/752] Repair commands completed
35342 Sep 22 23:15:03.746 INFO Pop front: ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35343 Sep 22 23:15:03.746 INFO Sent repair work, now wait for resp
35344 Sep 22 23:15:03.746 INFO [0] received reconcile message
35345 Sep 22 23:15:03.746 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35346 Sep 22 23:15:03.746 INFO [0] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35347 Sep 22 23:15:03.746 INFO [1] received reconcile message
35348 Sep 22 23:15:03.746 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35349 Sep 22 23:15:03.746 INFO [1] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35350 Sep 22 23:15:03.746 INFO [2] received reconcile message
35351 Sep 22 23:15:03.746 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(4), op: ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35352 Sep 22 23:15:03.746 INFO [2] client ExtentFlush { repair_id: ReconciliationId(4), extent_id: 0, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35353 Sep 22 23:15:03.746 DEBG 4 Flush extent 0 with f:2 g:2
35354 Sep 22 23:15:03.746 DEBG Flush just extent 0 with f:2 and g:2
35355 Sep 22 23:15:03.746 DEBG [1] It's time to notify for 4
35356 Sep 22 23:15:03.746 INFO Completion from [1] id:4 status:true
35357 Sep 22 23:15:03.746 INFO [5/752] Repair commands completed
35358 Sep 22 23:15:03.746 INFO Pop front: ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }, state: ClientData([New, New, New]) }
35359 Sep 22 23:15:03.746 INFO Sent repair work, now wait for resp
35360 Sep 22 23:15:03.746 INFO [0] received reconcile message
35361 Sep 22 23:15:03.747 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }, state: ClientData([InProgress, New, New]) }, : downstairs
35362 Sep 22 23:15:03.747 INFO [0] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }
35363 Sep 22 23:15:03.747 INFO [1] received reconcile message
35364 Sep 22 23:15:03.747 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35365 Sep 22 23:15:03.747 INFO [1] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }
35366 Sep 22 23:15:03.747 INFO [2] received reconcile message
35367 Sep 22 23:15:03.747 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(5), op: ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35368 Sep 22 23:15:03.747 INFO [2] client ExtentClose { repair_id: ReconciliationId(5), extent_id: 0 }
35369 Sep 22 23:15:03.747 DEBG 5 Close extent 0
35370 Sep 22 23:15:03.747 DEBG 5 Close extent 0
35371 Sep 22 23:15:03.747 DEBG 5 Close extent 0
35372 Sep 22 23:15:03.748 DEBG [2] It's time to notify for 5
35373 Sep 22 23:15:03.748 INFO Completion from [2] id:5 status:true
35374 Sep 22 23:15:03.748 INFO [6/752] Repair commands completed
35375 Sep 22 23:15:03.748 INFO Pop front: ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35376 Sep 22 23:15:03.748 INFO Sent repair work, now wait for resp
35377 Sep 22 23:15:03.748 INFO [0] received reconcile message
35378 Sep 22 23:15:03.748 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35379 Sep 22 23:15:03.748 INFO [0] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35380 Sep 22 23:15:03.748 INFO [0] Sending repair request ReconciliationId(6)
35381 Sep 22 23:15:03.748 INFO [1] received reconcile message
35382 Sep 22 23:15:03.748 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35383 Sep 22 23:15:03.748 INFO [1] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35384 Sep 22 23:15:03.748 INFO [1] No action required ReconciliationId(6)
35385 Sep 22 23:15:03.748 INFO [2] received reconcile message
35386 Sep 22 23:15:03.748 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(6), op: ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35387 Sep 22 23:15:03.748 INFO [2] client ExtentRepair { repair_id: ReconciliationId(6), extent_id: 0, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35388 Sep 22 23:15:03.748 INFO [2] No action required ReconciliationId(6)
35389 Sep 22 23:15:03.748 DEBG 6 Repair extent 0 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35390 Sep 22 23:15:03.748 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/000.copy"
35391 Sep 22 23:15:03.813 INFO accepted connection, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35392 Sep 22 23:15:03.813 TRCE incoming request, uri: /extent/0/files, method: GET, req_id: 0d8be5b1-9b4c-484f-a8aa-f45fc2ffbf62, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35393 Sep 22 23:15:03.814 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/0/files, method: GET, req_id: 0d8be5b1-9b4c-484f-a8aa-f45fc2ffbf62, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35394 Sep 22 23:15:03.814 INFO eid:0 Found repair files: ["000", "000.db"]
35395 Sep 22 23:15:03.814 TRCE incoming request, uri: /newextent/0/data, method: GET, req_id: 3bc4bfd9-1d32-4eab-b20a-0541a1f03211, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35396 Sep 22 23:15:03.815 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/0/data, method: GET, req_id: 3bc4bfd9-1d32-4eab-b20a-0541a1f03211, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35397 Sep 22 23:15:03.820 TRCE incoming request, uri: /newextent/0/db, method: GET, req_id: 17282a65-af12-4b9f-8647-66c0701d13f1, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35398 Sep 22 23:15:03.820 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/0/db, method: GET, req_id: 17282a65-af12-4b9f-8647-66c0701d13f1, remote_addr: 127.0.0.1:59635, local_addr: 127.0.0.1:46213, task: repair
35399 Sep 22 23:15:03.821 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/000.copy" to "/tmp/downstairs-vrx8aK6L/00/000/000.replace"
35400 Sep 22 23:15:03.821 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35401 Sep 22 23:15:03.822 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/000.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35402 Sep 22 23:15:03.822 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/000"
35403 Sep 22 23:15:03.822 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/000.db"
35404 Sep 22 23:15:03.823 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35405 Sep 22 23:15:03.823 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/000.replace" to "/tmp/downstairs-vrx8aK6L/00/000/000.completed"
35406 Sep 22 23:15:03.823 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35407 Sep 22 23:15:03.823 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35408 Sep 22 23:15:03.823 DEBG [0] It's time to notify for 6
35409 Sep 22 23:15:03.823 INFO Completion from [0] id:6 status:true
35410 Sep 22 23:15:03.823 INFO [7/752] Repair commands completed
35411 Sep 22 23:15:03.823 INFO Pop front: ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }, state: ClientData([New, New, New]) }
35412 Sep 22 23:15:03.823 INFO Sent repair work, now wait for resp
35413 Sep 22 23:15:03.823 INFO [0] received reconcile message
35414 Sep 22 23:15:03.823 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }, state: ClientData([InProgress, New, New]) }, : downstairs
35415 Sep 22 23:15:03.823 INFO [0] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }
35416 Sep 22 23:15:03.823 INFO [1] received reconcile message
35417 Sep 22 23:15:03.823 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35418 Sep 22 23:15:03.823 INFO [1] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }
35419 Sep 22 23:15:03.823 INFO [2] received reconcile message
35420 Sep 22 23:15:03.823 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(7), op: ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35421 Sep 22 23:15:03.823 INFO [2] client ExtentReopen { repair_id: ReconciliationId(7), extent_id: 0 }
35422 Sep 22 23:15:03.823 DEBG 7 Reopen extent 0
35423 Sep 22 23:15:03.824 DEBG 7 Reopen extent 0
35424 Sep 22 23:15:03.825 DEBG 7 Reopen extent 0
35425 Sep 22 23:15:03.825 DEBG [2] It's time to notify for 7
35426 Sep 22 23:15:03.825 INFO Completion from [2] id:7 status:true
35427 Sep 22 23:15:03.825 INFO [8/752] Repair commands completed
35428 Sep 22 23:15:03.825 INFO Pop front: ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35429 Sep 22 23:15:03.825 INFO Sent repair work, now wait for resp
35430 Sep 22 23:15:03.825 INFO [0] received reconcile message
35431 Sep 22 23:15:03.825 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35432 Sep 22 23:15:03.825 INFO [0] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35433 Sep 22 23:15:03.825 INFO [1] received reconcile message
35434 Sep 22 23:15:03.825 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35435 Sep 22 23:15:03.825 INFO [1] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35436 Sep 22 23:15:03.825 INFO [2] received reconcile message
35437 Sep 22 23:15:03.826 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(8), op: ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35438 Sep 22 23:15:03.826 INFO [2] client ExtentFlush { repair_id: ReconciliationId(8), extent_id: 143, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35439 Sep 22 23:15:03.826 DEBG 8 Flush extent 143 with f:2 g:2
35440 Sep 22 23:15:03.826 DEBG Flush just extent 143 with f:2 and g:2
35441 Sep 22 23:15:03.826 DEBG [1] It's time to notify for 8
35442 Sep 22 23:15:03.826 INFO Completion from [1] id:8 status:true
35443 Sep 22 23:15:03.826 INFO [9/752] Repair commands completed
35444 Sep 22 23:15:03.826 INFO Pop front: ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }, state: ClientData([New, New, New]) }
35445 Sep 22 23:15:03.826 INFO Sent repair work, now wait for resp
35446 Sep 22 23:15:03.826 INFO [0] received reconcile message
35447 Sep 22 23:15:03.826 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }, state: ClientData([InProgress, New, New]) }, : downstairs
35448 Sep 22 23:15:03.826 INFO [0] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }
35449 Sep 22 23:15:03.826 INFO [1] received reconcile message
35450 Sep 22 23:15:03.826 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35451 Sep 22 23:15:03.826 INFO [1] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }
35452 Sep 22 23:15:03.826 INFO [2] received reconcile message
35453 Sep 22 23:15:03.826 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(9), op: ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35454 Sep 22 23:15:03.826 INFO [2] client ExtentClose { repair_id: ReconciliationId(9), extent_id: 143 }
35455 Sep 22 23:15:03.826 DEBG 9 Close extent 143
35456 Sep 22 23:15:03.826 DEBG 9 Close extent 143
35457 Sep 22 23:15:03.827 DEBG 9 Close extent 143
35458 Sep 22 23:15:03.827 DEBG [2] It's time to notify for 9
35459 Sep 22 23:15:03.827 INFO Completion from [2] id:9 status:true
35460 Sep 22 23:15:03.827 INFO [10/752] Repair commands completed
35461 Sep 22 23:15:03.827 INFO Pop front: ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35462 Sep 22 23:15:03.827 INFO Sent repair work, now wait for resp
35463 Sep 22 23:15:03.827 INFO [0] received reconcile message
35464 Sep 22 23:15:03.827 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35465 Sep 22 23:15:03.827 INFO [0] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35466 Sep 22 23:15:03.827 INFO [0] Sending repair request ReconciliationId(10)
35467 Sep 22 23:15:03.827 INFO [1] received reconcile message
35468 Sep 22 23:15:03.827 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35469 Sep 22 23:15:03.827 INFO [1] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35470 Sep 22 23:15:03.827 INFO [1] No action required ReconciliationId(10)
35471 Sep 22 23:15:03.827 INFO [2] received reconcile message
35472 Sep 22 23:15:03.828 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(10), op: ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35473 Sep 22 23:15:03.828 INFO [2] client ExtentRepair { repair_id: ReconciliationId(10), extent_id: 143, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35474 Sep 22 23:15:03.828 INFO [2] No action required ReconciliationId(10)
35475 Sep 22 23:15:03.828 DEBG 10 Repair extent 143 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35476 Sep 22 23:15:03.828 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08F.copy"
35477 Sep 22 23:15:03.891 INFO accepted connection, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35478 Sep 22 23:15:03.891 TRCE incoming request, uri: /extent/143/files, method: GET, req_id: af04195d-46c5-4493-8f92-caf2bd07b9b5, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35479 Sep 22 23:15:03.892 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/143/files, method: GET, req_id: af04195d-46c5-4493-8f92-caf2bd07b9b5, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35480 Sep 22 23:15:03.892 INFO eid:143 Found repair files: ["08F", "08F.db"]
35481 Sep 22 23:15:03.892 TRCE incoming request, uri: /newextent/143/data, method: GET, req_id: a7ca02ea-0a2a-4689-8953-a295b7a3447a, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35482 Sep 22 23:15:03.892 INFO request completed, latency_us: 250, response_code: 200, uri: /newextent/143/data, method: GET, req_id: a7ca02ea-0a2a-4689-8953-a295b7a3447a, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35483 Sep 22 23:15:03.898 TRCE incoming request, uri: /newextent/143/db, method: GET, req_id: d1471026-4a00-483b-8e58-7d87331b1162, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35484 Sep 22 23:15:03.898 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/143/db, method: GET, req_id: d1471026-4a00-483b-8e58-7d87331b1162, remote_addr: 127.0.0.1:48510, local_addr: 127.0.0.1:46213, task: repair
35485 Sep 22 23:15:03.899 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08F.replace"
35486 Sep 22 23:15:03.899 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35487 Sep 22 23:15:03.900 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35488 Sep 22 23:15:03.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08F"
35489 Sep 22 23:15:03.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08F.db"
35490 Sep 22 23:15:03.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35491 Sep 22 23:15:03.900 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08F.completed"
35492 Sep 22 23:15:03.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35493 Sep 22 23:15:03.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35494 Sep 22 23:15:03.901 DEBG [0] It's time to notify for 10
35495 Sep 22 23:15:03.901 INFO Completion from [0] id:10 status:true
35496 Sep 22 23:15:03.901 INFO [11/752] Repair commands completed
35497 Sep 22 23:15:03.901 INFO Pop front: ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }, state: ClientData([New, New, New]) }
35498 Sep 22 23:15:03.901 INFO Sent repair work, now wait for resp
35499 Sep 22 23:15:03.901 INFO [0] received reconcile message
35500 Sep 22 23:15:03.901 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }, state: ClientData([InProgress, New, New]) }, : downstairs
35501 Sep 22 23:15:03.901 INFO [0] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }
35502 Sep 22 23:15:03.901 INFO [1] received reconcile message
35503 Sep 22 23:15:03.901 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35504 Sep 22 23:15:03.901 INFO [1] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }
35505 Sep 22 23:15:03.901 INFO [2] received reconcile message
35506 Sep 22 23:15:03.901 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(11), op: ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35507 Sep 22 23:15:03.901 INFO [2] client ExtentReopen { repair_id: ReconciliationId(11), extent_id: 143 }
35508 Sep 22 23:15:03.901 DEBG 11 Reopen extent 143
35509 Sep 22 23:15:03.902 DEBG 11 Reopen extent 143
35510 Sep 22 23:15:03.902 DEBG 11 Reopen extent 143
35511 Sep 22 23:15:03.903 DEBG [2] It's time to notify for 11
35512 Sep 22 23:15:03.903 INFO Completion from [2] id:11 status:true
35513 Sep 22 23:15:03.903 INFO [12/752] Repair commands completed
35514 Sep 22 23:15:03.903 INFO Pop front: ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35515 Sep 22 23:15:03.903 INFO Sent repair work, now wait for resp
35516 Sep 22 23:15:03.903 INFO [0] received reconcile message
35517 Sep 22 23:15:03.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35518 Sep 22 23:15:03.903 INFO [0] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35519 Sep 22 23:15:03.903 INFO [1] received reconcile message
35520 Sep 22 23:15:03.903 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35521 Sep 22 23:15:03.903 INFO [1] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35522 Sep 22 23:15:03.903 INFO [2] received reconcile message
35523 Sep 22 23:15:03.903 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(12), op: ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35524 Sep 22 23:15:03.903 INFO [2] client ExtentFlush { repair_id: ReconciliationId(12), extent_id: 151, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35525 Sep 22 23:15:03.903 DEBG 12 Flush extent 151 with f:2 g:2
35526 Sep 22 23:15:03.903 DEBG Flush just extent 151 with f:2 and g:2
35527 Sep 22 23:15:03.903 DEBG [1] It's time to notify for 12
35528 Sep 22 23:15:03.903 INFO Completion from [1] id:12 status:true
35529 Sep 22 23:15:03.903 INFO [13/752] Repair commands completed
35530 Sep 22 23:15:03.903 INFO Pop front: ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }, state: ClientData([New, New, New]) }
35531 Sep 22 23:15:03.903 INFO Sent repair work, now wait for resp
35532 Sep 22 23:15:03.903 INFO [0] received reconcile message
35533 Sep 22 23:15:03.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }, state: ClientData([InProgress, New, New]) }, : downstairs
35534 Sep 22 23:15:03.903 INFO [0] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }
35535 Sep 22 23:15:03.904 INFO [1] received reconcile message
35536 Sep 22 23:15:03.904 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35537 Sep 22 23:15:03.904 INFO [1] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }
35538 Sep 22 23:15:03.904 INFO [2] received reconcile message
35539 Sep 22 23:15:03.904 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(13), op: ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35540 Sep 22 23:15:03.904 INFO [2] client ExtentClose { repair_id: ReconciliationId(13), extent_id: 151 }
35541 Sep 22 23:15:03.904 DEBG 13 Close extent 151
35542 Sep 22 23:15:03.904 DEBG 13 Close extent 151
35543 Sep 22 23:15:03.904 DEBG 13 Close extent 151
35544 Sep 22 23:15:03.905 DEBG [2] It's time to notify for 13
35545 Sep 22 23:15:03.905 INFO Completion from [2] id:13 status:true
35546 Sep 22 23:15:03.905 INFO [14/752] Repair commands completed
35547 Sep 22 23:15:03.905 INFO Pop front: ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35548 Sep 22 23:15:03.905 INFO Sent repair work, now wait for resp
35549 Sep 22 23:15:03.905 INFO [0] received reconcile message
35550 Sep 22 23:15:03.905 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35551 Sep 22 23:15:03.905 INFO [0] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35552 Sep 22 23:15:03.905 INFO [0] Sending repair request ReconciliationId(14)
35553 Sep 22 23:15:03.905 INFO [1] received reconcile message
35554 Sep 22 23:15:03.905 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35555 Sep 22 23:15:03.905 INFO [1] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35556 Sep 22 23:15:03.905 INFO [1] No action required ReconciliationId(14)
35557 Sep 22 23:15:03.905 INFO [2] received reconcile message
35558 Sep 22 23:15:03.905 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(14), op: ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35559 Sep 22 23:15:03.905 INFO [2] client ExtentRepair { repair_id: ReconciliationId(14), extent_id: 151, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35560 Sep 22 23:15:03.905 INFO [2] No action required ReconciliationId(14)
35561 Sep 22 23:15:03.905 DEBG 14 Repair extent 151 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35562 Sep 22 23:15:03.905 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/097.copy"
35563 Sep 22 23:15:03.913 DEBG [2] Read AckReady 1072, : downstairs
35564 Sep 22 23:15:03.914 DEBG up_ds_listen was notified
35565 Sep 22 23:15:03.914 DEBG up_ds_listen process 1072
35566 Sep 22 23:15:03.914 DEBG [A] ack job 1072:73, : downstairs
35567 Sep 22 23:15:03.968 DEBG up_ds_listen checked 1 jobs, back to waiting
35568 Sep 22 23:15:03.969 INFO accepted connection, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35569 Sep 22 23:15:03.969 TRCE incoming request, uri: /extent/151/files, method: GET, req_id: b3d5e4bc-8f05-4b62-bb0b-9062dd73f389, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35570 Sep 22 23:15:03.969 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/151/files, method: GET, req_id: b3d5e4bc-8f05-4b62-bb0b-9062dd73f389, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35571 Sep 22 23:15:03.969 INFO eid:151 Found repair files: ["097", "097.db"]
35572 Sep 22 23:15:03.970 TRCE incoming request, uri: /newextent/151/data, method: GET, req_id: ad576779-3026-445d-8b4b-fe704a09030f, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35573 Sep 22 23:15:03.970 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/151/data, method: GET, req_id: ad576779-3026-445d-8b4b-fe704a09030f, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35574 Sep 22 23:15:03.973 DEBG Flush :1071 extent_limit None deps:[JobId(1070), JobId(1069)] res:true f:25 g:1
35575 Sep 22 23:15:03.973 INFO [lossy] skipping 1072
35576 Sep 22 23:15:03.973 WARN 1073 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
35577 Sep 22 23:15:03.975 TRCE incoming request, uri: /newextent/151/db, method: GET, req_id: b7ec581b-28f6-4e46-b0f7-43d84228c035, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35578 Sep 22 23:15:03.975 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/151/db, method: GET, req_id: b7ec581b-28f6-4e46-b0f7-43d84228c035, remote_addr: 127.0.0.1:56341, local_addr: 127.0.0.1:46213, task: repair
35579 Sep 22 23:15:03.976 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/097.copy" to "/tmp/downstairs-vrx8aK6L/00/000/097.replace"
35580 Sep 22 23:15:03.977 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35581 Sep 22 23:15:03.977 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/097.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35582 Sep 22 23:15:03.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/097"
35583 Sep 22 23:15:03.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/097.db"
35584 Sep 22 23:15:03.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35585 Sep 22 23:15:03.978 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/097.replace" to "/tmp/downstairs-vrx8aK6L/00/000/097.completed"
35586 Sep 22 23:15:03.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35587 Sep 22 23:15:03.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35588 Sep 22 23:15:03.978 DEBG [0] It's time to notify for 14
35589 Sep 22 23:15:03.978 INFO Completion from [0] id:14 status:true
35590 Sep 22 23:15:03.978 INFO [15/752] Repair commands completed
35591 Sep 22 23:15:03.978 INFO Pop front: ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }, state: ClientData([New, New, New]) }
35592 Sep 22 23:15:03.978 INFO Sent repair work, now wait for resp
35593 Sep 22 23:15:03.978 INFO [0] received reconcile message
35594 Sep 22 23:15:03.978 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }, state: ClientData([InProgress, New, New]) }, : downstairs
35595 Sep 22 23:15:03.978 INFO [0] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }
35596 Sep 22 23:15:03.978 INFO [1] received reconcile message
35597 Sep 22 23:15:03.978 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35598 Sep 22 23:15:03.979 INFO [1] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }
35599 Sep 22 23:15:03.979 INFO [2] received reconcile message
35600 Sep 22 23:15:03.979 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(15), op: ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35601 Sep 22 23:15:03.979 INFO [2] client ExtentReopen { repair_id: ReconciliationId(15), extent_id: 151 }
35602 Sep 22 23:15:03.979 DEBG 15 Reopen extent 151
35603 Sep 22 23:15:03.979 DEBG Read :1072 deps:[JobId(1071)] res:true
35604 Sep 22 23:15:03.979 DEBG 15 Reopen extent 151
35605 Sep 22 23:15:03.980 DEBG 15 Reopen extent 151
35606 Sep 22 23:15:03.980 DEBG [2] It's time to notify for 15
35607 Sep 22 23:15:03.980 INFO Completion from [2] id:15 status:true
35608 Sep 22 23:15:03.980 INFO [16/752] Repair commands completed
35609 Sep 22 23:15:03.980 INFO Pop front: ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35610 Sep 22 23:15:03.980 INFO Sent repair work, now wait for resp
35611 Sep 22 23:15:03.981 INFO [0] received reconcile message
35612 Sep 22 23:15:03.981 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35613 Sep 22 23:15:03.981 INFO [0] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35614 Sep 22 23:15:03.981 INFO [1] received reconcile message
35615 Sep 22 23:15:03.981 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35616 Sep 22 23:15:03.981 INFO [1] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35617 Sep 22 23:15:03.981 INFO [2] received reconcile message
35618 Sep 22 23:15:03.981 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(16), op: ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35619 Sep 22 23:15:03.981 INFO [2] client ExtentFlush { repair_id: ReconciliationId(16), extent_id: 169, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35620 Sep 22 23:15:03.981 DEBG 16 Flush extent 169 with f:2 g:2
35621 Sep 22 23:15:03.981 DEBG Flush just extent 169 with f:2 and g:2
35622 Sep 22 23:15:03.981 DEBG [1] It's time to notify for 16
35623 Sep 22 23:15:03.981 INFO Completion from [1] id:16 status:true
35624 Sep 22 23:15:03.981 INFO [17/752] Repair commands completed
35625 Sep 22 23:15:03.981 INFO Pop front: ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }, state: ClientData([New, New, New]) }
35626 Sep 22 23:15:03.981 INFO Sent repair work, now wait for resp
35627 Sep 22 23:15:03.981 INFO [0] received reconcile message
35628 Sep 22 23:15:03.981 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }, state: ClientData([InProgress, New, New]) }, : downstairs
35629 Sep 22 23:15:03.981 INFO [0] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }
35630 Sep 22 23:15:03.981 INFO [1] received reconcile message
35631 Sep 22 23:15:03.981 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35632 Sep 22 23:15:03.981 INFO [1] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }
35633 Sep 22 23:15:03.981 INFO [2] received reconcile message
35634 Sep 22 23:15:03.981 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(17), op: ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35635 Sep 22 23:15:03.981 INFO [2] client ExtentClose { repair_id: ReconciliationId(17), extent_id: 169 }
35636 Sep 22 23:15:03.981 DEBG 17 Close extent 169
35637 Sep 22 23:15:03.982 DEBG 17 Close extent 169
35638 Sep 22 23:15:03.982 DEBG 17 Close extent 169
35639 Sep 22 23:15:03.982 DEBG [2] It's time to notify for 17
35640 Sep 22 23:15:03.982 INFO Completion from [2] id:17 status:true
35641 Sep 22 23:15:03.982 INFO [18/752] Repair commands completed
35642 Sep 22 23:15:03.982 INFO Pop front: ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35643 Sep 22 23:15:03.982 INFO Sent repair work, now wait for resp
35644 Sep 22 23:15:03.982 INFO [0] received reconcile message
35645 Sep 22 23:15:03.982 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35646 Sep 22 23:15:03.982 INFO [0] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35647 Sep 22 23:15:03.983 INFO [0] Sending repair request ReconciliationId(18)
35648 Sep 22 23:15:03.983 INFO [1] received reconcile message
35649 Sep 22 23:15:03.983 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35650 Sep 22 23:15:03.983 INFO [1] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35651 Sep 22 23:15:03.983 INFO [1] No action required ReconciliationId(18)
35652 Sep 22 23:15:03.983 INFO [2] received reconcile message
35653 Sep 22 23:15:03.983 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(18), op: ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35654 Sep 22 23:15:03.983 INFO [2] client ExtentRepair { repair_id: ReconciliationId(18), extent_id: 169, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35655 Sep 22 23:15:03.983 INFO [2] No action required ReconciliationId(18)
35656 Sep 22 23:15:03.983 DEBG 18 Repair extent 169 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35657 Sep 22 23:15:03.983 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A9.copy"
35658 Sep 22 23:15:04.001 DEBG IO Read 1074 has deps [JobId(1073)]
35659 Sep 22 23:15:04.007 DEBG up_ds_listen was notified
35660 Sep 22 23:15:04.007 DEBG up_ds_listen process 1071
35661 Sep 22 23:15:04.007 DEBG [A] ack job 1071:72, : downstairs
35662 Sep 22 23:15:04.007 DEBG up_ds_listen checked 1 jobs, back to waiting
35663 Sep 22 23:15:04.016 WARN returning error on flush!
35664 Sep 22 23:15:04.016 DEBG Flush :1073 extent_limit None deps:[JobId(1072), JobId(1071)] res:false f:26 g:1
35665 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35666 Sep 22 23:15:04.016 WARN returning error on flush!
35667 Sep 22 23:15:04.016 DEBG Flush :1073 extent_limit None deps:[JobId(1072), JobId(1071)] res:false f:26 g:1
35668 Sep 22 23:15:04.016 DEBG Flush :1073 extent_limit None deps:[JobId(1072), JobId(1071)] res:true f:26 g:1
35669 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35670 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35671 Sep 22 23:15:04.016 WARN returning error on read!
35672 Sep 22 23:15:04.016 DEBG Read :1074 deps:[JobId(1073)] res:false
35673 Sep 22 23:15:04.016 WARN returning error on read!
35674 Sep 22 23:15:04.016 DEBG Read :1074 deps:[JobId(1073)] res:false
35675 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35676 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35677 Sep 22 23:15:04.016 INFO [lossy] skipping 1074
35678 Sep 22 23:15:04.022 DEBG Read :1074 deps:[JobId(1073)] res:true
35679 Sep 22 23:15:04.045 INFO accepted connection, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35680 Sep 22 23:15:04.045 TRCE incoming request, uri: /extent/169/files, method: GET, req_id: 399c34c7-e683-431a-95ad-4dd2f649d7c1, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35681 Sep 22 23:15:04.045 INFO request completed, latency_us: 238, response_code: 200, uri: /extent/169/files, method: GET, req_id: 399c34c7-e683-431a-95ad-4dd2f649d7c1, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35682 Sep 22 23:15:04.045 INFO eid:169 Found repair files: ["0A9", "0A9.db"]
35683 Sep 22 23:15:04.046 TRCE incoming request, uri: /newextent/169/data, method: GET, req_id: f20cbb11-b32b-4c1e-a03e-8f710b489dd0, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35684 Sep 22 23:15:04.046 INFO request completed, latency_us: 341, response_code: 200, uri: /newextent/169/data, method: GET, req_id: f20cbb11-b32b-4c1e-a03e-8f710b489dd0, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35685 Sep 22 23:15:04.051 TRCE incoming request, uri: /newextent/169/db, method: GET, req_id: e2689a95-b2f4-4ac1-a4a3-b49b73593ab9, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35686 Sep 22 23:15:04.051 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/169/db, method: GET, req_id: e2689a95-b2f4-4ac1-a4a3-b49b73593ab9, remote_addr: 127.0.0.1:56054, local_addr: 127.0.0.1:46213, task: repair
35687 Sep 22 23:15:04.053 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A9.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A9.replace"
35688 Sep 22 23:15:04.053 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35689 Sep 22 23:15:04.054 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A9.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35690 Sep 22 23:15:04.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A9"
35691 Sep 22 23:15:04.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A9.db"
35692 Sep 22 23:15:04.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35693 Sep 22 23:15:04.054 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A9.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A9.completed"
35694 Sep 22 23:15:04.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35695 Sep 22 23:15:04.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35696 Sep 22 23:15:04.054 DEBG [0] It's time to notify for 18
35697 Sep 22 23:15:04.054 INFO Completion from [0] id:18 status:true
35698 Sep 22 23:15:04.054 INFO [19/752] Repair commands completed
35699 Sep 22 23:15:04.054 INFO Pop front: ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }, state: ClientData([New, New, New]) }
35700 Sep 22 23:15:04.054 INFO Sent repair work, now wait for resp
35701 Sep 22 23:15:04.055 INFO [0] received reconcile message
35702 Sep 22 23:15:04.055 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }, state: ClientData([InProgress, New, New]) }, : downstairs
35703 Sep 22 23:15:04.055 INFO [0] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }
35704 Sep 22 23:15:04.055 INFO [1] received reconcile message
35705 Sep 22 23:15:04.055 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35706 Sep 22 23:15:04.055 INFO [1] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }
35707 Sep 22 23:15:04.055 INFO [2] received reconcile message
35708 Sep 22 23:15:04.055 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(19), op: ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35709 Sep 22 23:15:04.055 INFO [2] client ExtentReopen { repair_id: ReconciliationId(19), extent_id: 169 }
35710 Sep 22 23:15:04.055 DEBG 19 Reopen extent 169
35711 Sep 22 23:15:04.056 DEBG 19 Reopen extent 169
35712 Sep 22 23:15:04.056 DEBG 19 Reopen extent 169
35713 Sep 22 23:15:04.057 DEBG [2] It's time to notify for 19
35714 Sep 22 23:15:04.057 INFO Completion from [2] id:19 status:true
35715 Sep 22 23:15:04.057 INFO [20/752] Repair commands completed
35716 Sep 22 23:15:04.057 INFO Pop front: ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35717 Sep 22 23:15:04.057 INFO Sent repair work, now wait for resp
35718 Sep 22 23:15:04.057 INFO [0] received reconcile message
35719 Sep 22 23:15:04.057 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35720 Sep 22 23:15:04.057 INFO [0] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35721 Sep 22 23:15:04.057 INFO [1] received reconcile message
35722 Sep 22 23:15:04.057 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35723 Sep 22 23:15:04.057 INFO [1] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35724 Sep 22 23:15:04.057 INFO [2] received reconcile message
35725 Sep 22 23:15:04.057 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(20), op: ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35726 Sep 22 23:15:04.057 INFO [2] client ExtentFlush { repair_id: ReconciliationId(20), extent_id: 185, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35727 Sep 22 23:15:04.057 DEBG 20 Flush extent 185 with f:2 g:2
35728 Sep 22 23:15:04.057 DEBG Flush just extent 185 with f:2 and g:2
35729 Sep 22 23:15:04.057 DEBG [1] It's time to notify for 20
35730 Sep 22 23:15:04.057 INFO Completion from [1] id:20 status:true
35731 Sep 22 23:15:04.057 INFO [21/752] Repair commands completed
35732 Sep 22 23:15:04.057 INFO Pop front: ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }, state: ClientData([New, New, New]) }
35733 Sep 22 23:15:04.057 INFO Sent repair work, now wait for resp
35734 Sep 22 23:15:04.057 INFO [0] received reconcile message
35735 Sep 22 23:15:04.057 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }, state: ClientData([InProgress, New, New]) }, : downstairs
35736 Sep 22 23:15:04.057 INFO [0] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }
35737 Sep 22 23:15:04.057 INFO [1] received reconcile message
35738 Sep 22 23:15:04.057 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35739 Sep 22 23:15:04.057 INFO [1] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }
35740 Sep 22 23:15:04.057 INFO [2] received reconcile message
35741 Sep 22 23:15:04.057 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(21), op: ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35742 Sep 22 23:15:04.057 INFO [2] client ExtentClose { repair_id: ReconciliationId(21), extent_id: 185 }
35743 Sep 22 23:15:04.058 DEBG 21 Close extent 185
35744 Sep 22 23:15:04.058 DEBG 21 Close extent 185
35745 Sep 22 23:15:04.058 DEBG 21 Close extent 185
35746 Sep 22 23:15:04.059 DEBG [2] It's time to notify for 21
35747 Sep 22 23:15:04.059 INFO Completion from [2] id:21 status:true
35748 Sep 22 23:15:04.059 INFO [22/752] Repair commands completed
35749 Sep 22 23:15:04.059 INFO Pop front: ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35750 Sep 22 23:15:04.059 INFO Sent repair work, now wait for resp
35751 Sep 22 23:15:04.059 INFO [0] received reconcile message
35752 Sep 22 23:15:04.059 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35753 Sep 22 23:15:04.059 INFO [0] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35754 Sep 22 23:15:04.059 INFO [0] Sending repair request ReconciliationId(22)
35755 Sep 22 23:15:04.059 INFO [1] received reconcile message
35756 Sep 22 23:15:04.059 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35757 Sep 22 23:15:04.059 INFO [1] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35758 Sep 22 23:15:04.059 INFO [1] No action required ReconciliationId(22)
35759 Sep 22 23:15:04.059 INFO [2] received reconcile message
35760 Sep 22 23:15:04.059 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(22), op: ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35761 Sep 22 23:15:04.059 INFO [2] client ExtentRepair { repair_id: ReconciliationId(22), extent_id: 185, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35762 Sep 22 23:15:04.059 INFO [2] No action required ReconciliationId(22)
35763 Sep 22 23:15:04.059 DEBG 22 Repair extent 185 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35764 Sep 22 23:15:04.059 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B9.copy"
35765 Sep 22 23:15:04.120 INFO accepted connection, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35766 Sep 22 23:15:04.120 TRCE incoming request, uri: /extent/185/files, method: GET, req_id: b191d31f-80ce-431d-a61f-c83308a9b1fb, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35767 Sep 22 23:15:04.120 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/185/files, method: GET, req_id: b191d31f-80ce-431d-a61f-c83308a9b1fb, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35768 Sep 22 23:15:04.120 INFO eid:185 Found repair files: ["0B9", "0B9.db"]
35769 Sep 22 23:15:04.121 TRCE incoming request, uri: /newextent/185/data, method: GET, req_id: 59dab808-59f2-4be0-905f-fb02a78b7119, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35770 Sep 22 23:15:04.121 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/185/data, method: GET, req_id: 59dab808-59f2-4be0-905f-fb02a78b7119, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35771 Sep 22 23:15:04.126 TRCE incoming request, uri: /newextent/185/db, method: GET, req_id: 54da01a1-4b13-4299-9283-5b14fbf75b16, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35772 Sep 22 23:15:04.126 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/185/db, method: GET, req_id: 54da01a1-4b13-4299-9283-5b14fbf75b16, remote_addr: 127.0.0.1:56651, local_addr: 127.0.0.1:46213, task: repair
35773 Sep 22 23:15:04.128 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B9.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B9.replace"
35774 Sep 22 23:15:04.128 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35775 Sep 22 23:15:04.128 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B9.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35776 Sep 22 23:15:04.129 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B9"
35777 Sep 22 23:15:04.129 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B9.db"
35778 Sep 22 23:15:04.129 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35779 Sep 22 23:15:04.129 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B9.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B9.completed"
35780 Sep 22 23:15:04.129 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35781 Sep 22 23:15:04.129 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35782 Sep 22 23:15:04.129 DEBG [0] It's time to notify for 22
35783 Sep 22 23:15:04.129 INFO Completion from [0] id:22 status:true
35784 Sep 22 23:15:04.129 INFO [23/752] Repair commands completed
35785 Sep 22 23:15:04.129 INFO Pop front: ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }, state: ClientData([New, New, New]) }
35786 Sep 22 23:15:04.129 INFO Sent repair work, now wait for resp
35787 Sep 22 23:15:04.129 INFO [0] received reconcile message
35788 Sep 22 23:15:04.129 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }, state: ClientData([InProgress, New, New]) }, : downstairs
35789 Sep 22 23:15:04.129 INFO [0] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }
35790 Sep 22 23:15:04.129 INFO [1] received reconcile message
35791 Sep 22 23:15:04.129 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35792 Sep 22 23:15:04.129 INFO [1] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }
35793 Sep 22 23:15:04.129 INFO [2] received reconcile message
35794 Sep 22 23:15:04.129 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(23), op: ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35795 Sep 22 23:15:04.129 INFO [2] client ExtentReopen { repair_id: ReconciliationId(23), extent_id: 185 }
35796 Sep 22 23:15:04.130 DEBG 23 Reopen extent 185
35797 Sep 22 23:15:04.130 DEBG 23 Reopen extent 185
35798 Sep 22 23:15:04.131 DEBG 23 Reopen extent 185
35799 Sep 22 23:15:04.131 DEBG [2] It's time to notify for 23
35800 Sep 22 23:15:04.131 INFO Completion from [2] id:23 status:true
35801 Sep 22 23:15:04.131 INFO [24/752] Repair commands completed
35802 Sep 22 23:15:04.131 INFO Pop front: ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35803 Sep 22 23:15:04.131 INFO Sent repair work, now wait for resp
35804 Sep 22 23:15:04.131 INFO [0] received reconcile message
35805 Sep 22 23:15:04.131 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35806 Sep 22 23:15:04.131 INFO [0] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35807 Sep 22 23:15:04.131 INFO [1] received reconcile message
35808 Sep 22 23:15:04.131 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35809 Sep 22 23:15:04.131 INFO [1] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35810 Sep 22 23:15:04.131 INFO [2] received reconcile message
35811 Sep 22 23:15:04.131 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(24), op: ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35812 Sep 22 23:15:04.131 INFO [2] client ExtentFlush { repair_id: ReconciliationId(24), extent_id: 11, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35813 Sep 22 23:15:04.132 DEBG 24 Flush extent 11 with f:2 g:2
35814 Sep 22 23:15:04.132 DEBG Flush just extent 11 with f:2 and g:2
35815 Sep 22 23:15:04.132 DEBG [1] It's time to notify for 24
35816 Sep 22 23:15:04.132 INFO Completion from [1] id:24 status:true
35817 Sep 22 23:15:04.132 INFO [25/752] Repair commands completed
35818 Sep 22 23:15:04.132 INFO Pop front: ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }, state: ClientData([New, New, New]) }
35819 Sep 22 23:15:04.132 INFO Sent repair work, now wait for resp
35820 Sep 22 23:15:04.132 INFO [0] received reconcile message
35821 Sep 22 23:15:04.132 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }, state: ClientData([InProgress, New, New]) }, : downstairs
35822 Sep 22 23:15:04.132 INFO [0] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }
35823 Sep 22 23:15:04.132 INFO [1] received reconcile message
35824 Sep 22 23:15:04.132 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35825 Sep 22 23:15:04.132 INFO [1] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }
35826 Sep 22 23:15:04.132 INFO [2] received reconcile message
35827 Sep 22 23:15:04.132 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(25), op: ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35828 Sep 22 23:15:04.132 INFO [2] client ExtentClose { repair_id: ReconciliationId(25), extent_id: 11 }
35829 Sep 22 23:15:04.132 DEBG 25 Close extent 11
35830 Sep 22 23:15:04.132 DEBG 25 Close extent 11
35831 Sep 22 23:15:04.133 DEBG 25 Close extent 11
35832 Sep 22 23:15:04.133 DEBG [2] It's time to notify for 25
35833 Sep 22 23:15:04.133 INFO Completion from [2] id:25 status:true
35834 Sep 22 23:15:04.133 INFO [26/752] Repair commands completed
35835 Sep 22 23:15:04.133 INFO Pop front: ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35836 Sep 22 23:15:04.133 INFO Sent repair work, now wait for resp
35837 Sep 22 23:15:04.133 INFO [0] received reconcile message
35838 Sep 22 23:15:04.133 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35839 Sep 22 23:15:04.133 INFO [0] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35840 Sep 22 23:15:04.133 INFO [0] Sending repair request ReconciliationId(26)
35841 Sep 22 23:15:04.133 INFO [1] received reconcile message
35842 Sep 22 23:15:04.133 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35843 Sep 22 23:15:04.133 INFO [1] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35844 Sep 22 23:15:04.133 INFO [1] No action required ReconciliationId(26)
35845 Sep 22 23:15:04.133 INFO [2] received reconcile message
35846 Sep 22 23:15:04.133 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(26), op: ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35847 Sep 22 23:15:04.133 INFO [2] client ExtentRepair { repair_id: ReconciliationId(26), extent_id: 11, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35848 Sep 22 23:15:04.134 INFO [2] No action required ReconciliationId(26)
35849 Sep 22 23:15:04.134 DEBG 26 Repair extent 11 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35850 Sep 22 23:15:04.134 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00B.copy"
35851 Sep 22 23:15:04.197 INFO accepted connection, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35852 Sep 22 23:15:04.197 TRCE incoming request, uri: /extent/11/files, method: GET, req_id: 7be2ad5a-a85f-4792-8154-5b18504ceefc, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35853 Sep 22 23:15:04.197 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/11/files, method: GET, req_id: 7be2ad5a-a85f-4792-8154-5b18504ceefc, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35854 Sep 22 23:15:04.197 INFO eid:11 Found repair files: ["00B", "00B.db"]
35855 Sep 22 23:15:04.198 TRCE incoming request, uri: /newextent/11/data, method: GET, req_id: 71e899e6-2917-4aad-ae54-74e9c21fccbd, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35856 Sep 22 23:15:04.198 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/11/data, method: GET, req_id: 71e899e6-2917-4aad-ae54-74e9c21fccbd, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35857 Sep 22 23:15:04.203 TRCE incoming request, uri: /newextent/11/db, method: GET, req_id: 2d389c87-3b92-4ac1-a8bd-9e76b06b1c1c, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35858 Sep 22 23:15:04.203 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/11/db, method: GET, req_id: 2d389c87-3b92-4ac1-a8bd-9e76b06b1c1c, remote_addr: 127.0.0.1:51187, local_addr: 127.0.0.1:46213, task: repair
35859 Sep 22 23:15:04.204 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00B.replace"
35860 Sep 22 23:15:04.204 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35861 Sep 22 23:15:04.205 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35862 Sep 22 23:15:04.206 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00B"
35863 Sep 22 23:15:04.206 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00B.db"
35864 Sep 22 23:15:04.206 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35865 Sep 22 23:15:04.206 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00B.completed"
35866 Sep 22 23:15:04.206 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35867 Sep 22 23:15:04.206 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35868 Sep 22 23:15:04.206 DEBG [0] It's time to notify for 26
35869 Sep 22 23:15:04.206 INFO Completion from [0] id:26 status:true
35870 Sep 22 23:15:04.206 INFO [27/752] Repair commands completed
35871 Sep 22 23:15:04.206 INFO Pop front: ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }, state: ClientData([New, New, New]) }
35872 Sep 22 23:15:04.206 INFO Sent repair work, now wait for resp
35873 Sep 22 23:15:04.206 INFO [0] received reconcile message
35874 Sep 22 23:15:04.206 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }, state: ClientData([InProgress, New, New]) }, : downstairs
35875 Sep 22 23:15:04.206 INFO [0] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }
35876 Sep 22 23:15:04.206 INFO [1] received reconcile message
35877 Sep 22 23:15:04.206 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35878 Sep 22 23:15:04.206 INFO [1] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }
35879 Sep 22 23:15:04.206 INFO [2] received reconcile message
35880 Sep 22 23:15:04.206 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(27), op: ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35881 Sep 22 23:15:04.206 INFO [2] client ExtentReopen { repair_id: ReconciliationId(27), extent_id: 11 }
35882 Sep 22 23:15:04.207 DEBG 27 Reopen extent 11
35883 Sep 22 23:15:04.207 DEBG 27 Reopen extent 11
35884 Sep 22 23:15:04.208 DEBG 27 Reopen extent 11
35885 Sep 22 23:15:04.208 DEBG [2] It's time to notify for 27
35886 Sep 22 23:15:04.208 INFO Completion from [2] id:27 status:true
35887 Sep 22 23:15:04.208 INFO [28/752] Repair commands completed
35888 Sep 22 23:15:04.208 INFO Pop front: ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35889 Sep 22 23:15:04.208 INFO Sent repair work, now wait for resp
35890 Sep 22 23:15:04.208 INFO [0] received reconcile message
35891 Sep 22 23:15:04.208 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35892 Sep 22 23:15:04.208 INFO [0] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35893 Sep 22 23:15:04.208 INFO [1] received reconcile message
35894 Sep 22 23:15:04.208 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35895 Sep 22 23:15:04.208 INFO [1] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35896 Sep 22 23:15:04.209 INFO [2] received reconcile message
35897 Sep 22 23:15:04.209 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(28), op: ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35898 Sep 22 23:15:04.209 INFO [2] client ExtentFlush { repair_id: ReconciliationId(28), extent_id: 16, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35899 Sep 22 23:15:04.209 DEBG 28 Flush extent 16 with f:2 g:2
35900 Sep 22 23:15:04.209 DEBG Flush just extent 16 with f:2 and g:2
35901 Sep 22 23:15:04.209 DEBG [1] It's time to notify for 28
35902 Sep 22 23:15:04.209 INFO Completion from [1] id:28 status:true
35903 Sep 22 23:15:04.209 INFO [29/752] Repair commands completed
35904 Sep 22 23:15:04.209 INFO Pop front: ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }, state: ClientData([New, New, New]) }
35905 Sep 22 23:15:04.209 INFO Sent repair work, now wait for resp
35906 Sep 22 23:15:04.209 INFO [0] received reconcile message
35907 Sep 22 23:15:04.209 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }, state: ClientData([InProgress, New, New]) }, : downstairs
35908 Sep 22 23:15:04.209 INFO [0] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }
35909 Sep 22 23:15:04.209 INFO [1] received reconcile message
35910 Sep 22 23:15:04.209 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35911 Sep 22 23:15:04.209 INFO [1] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }
35912 Sep 22 23:15:04.209 INFO [2] received reconcile message
35913 Sep 22 23:15:04.209 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(29), op: ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35914 Sep 22 23:15:04.209 INFO [2] client ExtentClose { repair_id: ReconciliationId(29), extent_id: 16 }
35915 Sep 22 23:15:04.209 DEBG 29 Close extent 16
35916 Sep 22 23:15:04.209 DEBG 29 Close extent 16
35917 Sep 22 23:15:04.210 DEBG 29 Close extent 16
35918 Sep 22 23:15:04.210 DEBG [2] It's time to notify for 29
35919 Sep 22 23:15:04.210 INFO Completion from [2] id:29 status:true
35920 Sep 22 23:15:04.210 INFO [30/752] Repair commands completed
35921 Sep 22 23:15:04.210 INFO Pop front: ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
35922 Sep 22 23:15:04.210 INFO Sent repair work, now wait for resp
35923 Sep 22 23:15:04.210 INFO [0] received reconcile message
35924 Sep 22 23:15:04.210 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
35925 Sep 22 23:15:04.210 INFO [0] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35926 Sep 22 23:15:04.210 INFO [0] Sending repair request ReconciliationId(30)
35927 Sep 22 23:15:04.210 INFO [1] received reconcile message
35928 Sep 22 23:15:04.210 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35929 Sep 22 23:15:04.210 INFO [1] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35930 Sep 22 23:15:04.210 INFO [1] No action required ReconciliationId(30)
35931 Sep 22 23:15:04.211 INFO [2] received reconcile message
35932 Sep 22 23:15:04.211 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(30), op: ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
35933 Sep 22 23:15:04.211 INFO [2] client ExtentRepair { repair_id: ReconciliationId(30), extent_id: 16, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
35934 Sep 22 23:15:04.211 INFO [2] No action required ReconciliationId(30)
35935 Sep 22 23:15:04.211 DEBG 30 Repair extent 16 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
35936 Sep 22 23:15:04.211 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/010.copy"
35937 Sep 22 23:15:04.275 INFO accepted connection, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35938 Sep 22 23:15:04.275 TRCE incoming request, uri: /extent/16/files, method: GET, req_id: 70672ca2-f83d-4706-8879-e6dab8b2c87b, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35939 Sep 22 23:15:04.275 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/16/files, method: GET, req_id: 70672ca2-f83d-4706-8879-e6dab8b2c87b, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35940 Sep 22 23:15:04.275 INFO eid:16 Found repair files: ["010", "010.db"]
35941 Sep 22 23:15:04.276 TRCE incoming request, uri: /newextent/16/data, method: GET, req_id: c4c672da-1bc0-4b33-843f-a79992d3a7ec, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35942 Sep 22 23:15:04.276 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/16/data, method: GET, req_id: c4c672da-1bc0-4b33-843f-a79992d3a7ec, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35943 Sep 22 23:15:04.281 TRCE incoming request, uri: /newextent/16/db, method: GET, req_id: 34bfd9ff-fbb1-4ae5-bf6d-0c17fdd2a155, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35944 Sep 22 23:15:04.281 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/16/db, method: GET, req_id: 34bfd9ff-fbb1-4ae5-bf6d-0c17fdd2a155, remote_addr: 127.0.0.1:41743, local_addr: 127.0.0.1:46213, task: repair
35945 Sep 22 23:15:04.282 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/010.copy" to "/tmp/downstairs-vrx8aK6L/00/000/010.replace"
35946 Sep 22 23:15:04.282 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35947 Sep 22 23:15:04.283 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/010.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
35948 Sep 22 23:15:04.283 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/010"
35949 Sep 22 23:15:04.284 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/010.db"
35950 Sep 22 23:15:04.284 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35951 Sep 22 23:15:04.284 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/010.replace" to "/tmp/downstairs-vrx8aK6L/00/000/010.completed"
35952 Sep 22 23:15:04.284 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35953 Sep 22 23:15:04.284 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
35954 Sep 22 23:15:04.284 DEBG [0] It's time to notify for 30
35955 Sep 22 23:15:04.284 INFO Completion from [0] id:30 status:true
35956 Sep 22 23:15:04.284 INFO [31/752] Repair commands completed
35957 Sep 22 23:15:04.284 INFO Pop front: ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }, state: ClientData([New, New, New]) }
35958 Sep 22 23:15:04.284 INFO Sent repair work, now wait for resp
35959 Sep 22 23:15:04.284 INFO [0] received reconcile message
35960 Sep 22 23:15:04.284 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }, state: ClientData([InProgress, New, New]) }, : downstairs
35961 Sep 22 23:15:04.284 INFO [0] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }
35962 Sep 22 23:15:04.284 INFO [1] received reconcile message
35963 Sep 22 23:15:04.284 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35964 Sep 22 23:15:04.284 INFO [1] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }
35965 Sep 22 23:15:04.284 INFO [2] received reconcile message
35966 Sep 22 23:15:04.284 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(31), op: ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
35967 Sep 22 23:15:04.284 INFO [2] client ExtentReopen { repair_id: ReconciliationId(31), extent_id: 16 }
35968 Sep 22 23:15:04.284 DEBG 31 Reopen extent 16
35969 Sep 22 23:15:04.285 DEBG 31 Reopen extent 16
35970 Sep 22 23:15:04.286 DEBG 31 Reopen extent 16
35971 Sep 22 23:15:04.286 DEBG [2] It's time to notify for 31
35972 Sep 22 23:15:04.286 INFO Completion from [2] id:31 status:true
35973 Sep 22 23:15:04.286 INFO [32/752] Repair commands completed
35974 Sep 22 23:15:04.286 INFO Pop front: ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
35975 Sep 22 23:15:04.286 INFO Sent repair work, now wait for resp
35976 Sep 22 23:15:04.286 INFO [0] received reconcile message
35977 Sep 22 23:15:04.286 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
35978 Sep 22 23:15:04.286 INFO [0] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35979 Sep 22 23:15:04.286 INFO [1] received reconcile message
35980 Sep 22 23:15:04.286 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
35981 Sep 22 23:15:04.286 INFO [1] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35982 Sep 22 23:15:04.286 INFO [2] received reconcile message
35983 Sep 22 23:15:04.286 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(32), op: ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
35984 Sep 22 23:15:04.286 INFO [2] client ExtentFlush { repair_id: ReconciliationId(32), extent_id: 67, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
35985 Sep 22 23:15:04.287 DEBG 32 Flush extent 67 with f:2 g:2
35986 Sep 22 23:15:04.287 DEBG Flush just extent 67 with f:2 and g:2
35987 Sep 22 23:15:04.287 DEBG [1] It's time to notify for 32
35988 Sep 22 23:15:04.287 INFO Completion from [1] id:32 status:true
35989 Sep 22 23:15:04.287 INFO [33/752] Repair commands completed
35990 Sep 22 23:15:04.287 INFO Pop front: ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }, state: ClientData([New, New, New]) }
35991 Sep 22 23:15:04.287 INFO Sent repair work, now wait for resp
35992 Sep 22 23:15:04.287 INFO [0] received reconcile message
35993 Sep 22 23:15:04.287 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }, state: ClientData([InProgress, New, New]) }, : downstairs
35994 Sep 22 23:15:04.287 INFO [0] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }
35995 Sep 22 23:15:04.287 INFO [1] received reconcile message
35996 Sep 22 23:15:04.287 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
35997 Sep 22 23:15:04.287 INFO [1] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }
35998 Sep 22 23:15:04.287 INFO [2] received reconcile message
35999 Sep 22 23:15:04.287 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(33), op: ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36000 Sep 22 23:15:04.287 INFO [2] client ExtentClose { repair_id: ReconciliationId(33), extent_id: 67 }
36001 Sep 22 23:15:04.287 DEBG 33 Close extent 67
36002 Sep 22 23:15:04.287 DEBG 33 Close extent 67
36003 Sep 22 23:15:04.288 DEBG 33 Close extent 67
36004 Sep 22 23:15:04.288 DEBG [2] It's time to notify for 33
36005 Sep 22 23:15:04.288 INFO Completion from [2] id:33 status:true
36006 Sep 22 23:15:04.288 INFO [34/752] Repair commands completed
36007 Sep 22 23:15:04.288 INFO Pop front: ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36008 Sep 22 23:15:04.288 INFO Sent repair work, now wait for resp
36009 Sep 22 23:15:04.288 INFO [0] received reconcile message
36010 Sep 22 23:15:04.288 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36011 Sep 22 23:15:04.288 INFO [0] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36012 Sep 22 23:15:04.288 INFO [0] Sending repair request ReconciliationId(34)
36013 Sep 22 23:15:04.288 INFO [1] received reconcile message
36014 Sep 22 23:15:04.288 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36015 Sep 22 23:15:04.288 INFO [1] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36016 Sep 22 23:15:04.288 INFO [1] No action required ReconciliationId(34)
36017 Sep 22 23:15:04.288 INFO [2] received reconcile message
36018 Sep 22 23:15:04.288 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(34), op: ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36019 Sep 22 23:15:04.288 INFO [2] client ExtentRepair { repair_id: ReconciliationId(34), extent_id: 67, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36020 Sep 22 23:15:04.288 INFO [2] No action required ReconciliationId(34)
36021 Sep 22 23:15:04.289 DEBG 34 Repair extent 67 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36022 Sep 22 23:15:04.289 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/043.copy"
36023 Sep 22 23:15:04.352 INFO accepted connection, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36024 Sep 22 23:15:04.353 TRCE incoming request, uri: /extent/67/files, method: GET, req_id: b6f10fa0-643d-4aa6-9748-bd9e27813086, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36025 Sep 22 23:15:04.353 INFO request completed, latency_us: 201, response_code: 200, uri: /extent/67/files, method: GET, req_id: b6f10fa0-643d-4aa6-9748-bd9e27813086, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36026 Sep 22 23:15:04.353 INFO eid:67 Found repair files: ["043", "043.db"]
36027 Sep 22 23:15:04.353 TRCE incoming request, uri: /newextent/67/data, method: GET, req_id: dcfe8f0c-cb29-4033-a986-d3f6dc563319, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36028 Sep 22 23:15:04.354 INFO request completed, latency_us: 328, response_code: 200, uri: /newextent/67/data, method: GET, req_id: dcfe8f0c-cb29-4033-a986-d3f6dc563319, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36029 Sep 22 23:15:04.359 TRCE incoming request, uri: /newextent/67/db, method: GET, req_id: 17332c8d-50b9-431f-ab77-cce357c047d7, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36030 Sep 22 23:15:04.359 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/67/db, method: GET, req_id: 17332c8d-50b9-431f-ab77-cce357c047d7, remote_addr: 127.0.0.1:46377, local_addr: 127.0.0.1:46213, task: repair
36031 Sep 22 23:15:04.360 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/043.copy" to "/tmp/downstairs-vrx8aK6L/00/000/043.replace"
36032 Sep 22 23:15:04.360 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36033 Sep 22 23:15:04.361 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/043.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36034 Sep 22 23:15:04.361 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/043"
36035 Sep 22 23:15:04.361 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/043.db"
36036 Sep 22 23:15:04.361 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36037 Sep 22 23:15:04.361 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/043.replace" to "/tmp/downstairs-vrx8aK6L/00/000/043.completed"
36038 Sep 22 23:15:04.361 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36039 Sep 22 23:15:04.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36040 Sep 22 23:15:04.362 DEBG [0] It's time to notify for 34
36041 Sep 22 23:15:04.362 INFO Completion from [0] id:34 status:true
36042 Sep 22 23:15:04.362 INFO [35/752] Repair commands completed
36043 Sep 22 23:15:04.362 INFO Pop front: ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }, state: ClientData([New, New, New]) }
36044 Sep 22 23:15:04.362 INFO Sent repair work, now wait for resp
36045 Sep 22 23:15:04.362 INFO [0] received reconcile message
36046 Sep 22 23:15:04.362 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }, state: ClientData([InProgress, New, New]) }, : downstairs
36047 Sep 22 23:15:04.362 INFO [0] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }
36048 Sep 22 23:15:04.362 INFO [1] received reconcile message
36049 Sep 22 23:15:04.362 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36050 Sep 22 23:15:04.362 INFO [1] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }
36051 Sep 22 23:15:04.362 INFO [2] received reconcile message
36052 Sep 22 23:15:04.362 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(35), op: ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36053 Sep 22 23:15:04.362 INFO [2] client ExtentReopen { repair_id: ReconciliationId(35), extent_id: 67 }
36054 Sep 22 23:15:04.362 DEBG 35 Reopen extent 67
36055 Sep 22 23:15:04.363 DEBG 35 Reopen extent 67
36056 Sep 22 23:15:04.363 DEBG 35 Reopen extent 67
36057 Sep 22 23:15:04.364 DEBG [2] It's time to notify for 35
36058 Sep 22 23:15:04.364 INFO Completion from [2] id:35 status:true
36059 Sep 22 23:15:04.364 INFO [36/752] Repair commands completed
36060 Sep 22 23:15:04.364 INFO Pop front: ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36061 Sep 22 23:15:04.364 INFO Sent repair work, now wait for resp
36062 Sep 22 23:15:04.364 INFO [0] received reconcile message
36063 Sep 22 23:15:04.364 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36064 Sep 22 23:15:04.364 INFO [0] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36065 Sep 22 23:15:04.364 INFO [1] received reconcile message
36066 Sep 22 23:15:04.364 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36067 Sep 22 23:15:04.364 INFO [1] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36068 Sep 22 23:15:04.364 INFO [2] received reconcile message
36069 Sep 22 23:15:04.364 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(36), op: ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36070 Sep 22 23:15:04.364 INFO [2] client ExtentFlush { repair_id: ReconciliationId(36), extent_id: 137, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36071 Sep 22 23:15:04.364 DEBG 36 Flush extent 137 with f:2 g:2
36072 Sep 22 23:15:04.364 DEBG Flush just extent 137 with f:2 and g:2
36073 Sep 22 23:15:04.365 DEBG [1] It's time to notify for 36
36074 Sep 22 23:15:04.365 INFO Completion from [1] id:36 status:true
36075 Sep 22 23:15:04.365 INFO [37/752] Repair commands completed
36076 Sep 22 23:15:04.365 INFO Pop front: ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }, state: ClientData([New, New, New]) }
36077 Sep 22 23:15:04.365 INFO Sent repair work, now wait for resp
36078 Sep 22 23:15:04.365 INFO [0] received reconcile message
36079 Sep 22 23:15:04.365 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }, state: ClientData([InProgress, New, New]) }, : downstairs
36080 Sep 22 23:15:04.365 INFO [0] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }
36081 Sep 22 23:15:04.365 INFO [1] received reconcile message
36082 Sep 22 23:15:04.365 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36083 Sep 22 23:15:04.365 INFO [1] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }
36084 Sep 22 23:15:04.365 INFO [2] received reconcile message
36085 Sep 22 23:15:04.365 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(37), op: ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36086 Sep 22 23:15:04.365 INFO [2] client ExtentClose { repair_id: ReconciliationId(37), extent_id: 137 }
36087 Sep 22 23:15:04.365 DEBG 37 Close extent 137
36088 Sep 22 23:15:04.365 DEBG 37 Close extent 137
36089 Sep 22 23:15:04.366 DEBG 37 Close extent 137
36090 Sep 22 23:15:04.366 DEBG [2] It's time to notify for 37
36091 Sep 22 23:15:04.366 INFO Completion from [2] id:37 status:true
36092 Sep 22 23:15:04.366 INFO [38/752] Repair commands completed
36093 Sep 22 23:15:04.366 INFO Pop front: ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36094 Sep 22 23:15:04.366 INFO Sent repair work, now wait for resp
36095 Sep 22 23:15:04.366 INFO [0] received reconcile message
36096 Sep 22 23:15:04.366 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36097 Sep 22 23:15:04.366 INFO [0] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36098 Sep 22 23:15:04.366 INFO [0] Sending repair request ReconciliationId(38)
36099 Sep 22 23:15:04.366 INFO [1] received reconcile message
36100 Sep 22 23:15:04.366 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36101 Sep 22 23:15:04.366 INFO [1] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36102 Sep 22 23:15:04.366 INFO [1] No action required ReconciliationId(38)
36103 Sep 22 23:15:04.366 INFO [2] received reconcile message
36104 Sep 22 23:15:04.366 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(38), op: ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36105 Sep 22 23:15:04.366 INFO [2] client ExtentRepair { repair_id: ReconciliationId(38), extent_id: 137, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36106 Sep 22 23:15:04.366 INFO [2] No action required ReconciliationId(38)
36107 Sep 22 23:15:04.366 DEBG 38 Repair extent 137 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36108 Sep 22 23:15:04.366 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/089.copy"
36109 Sep 22 23:15:04.426 INFO accepted connection, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36110 Sep 22 23:15:04.426 TRCE incoming request, uri: /extent/137/files, method: GET, req_id: 23b41528-ab64-4da8-a3b7-43cfbefd3844, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36111 Sep 22 23:15:04.426 INFO request completed, latency_us: 194, response_code: 200, uri: /extent/137/files, method: GET, req_id: 23b41528-ab64-4da8-a3b7-43cfbefd3844, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36112 Sep 22 23:15:04.427 INFO eid:137 Found repair files: ["089", "089.db"]
36113 Sep 22 23:15:04.427 TRCE incoming request, uri: /newextent/137/data, method: GET, req_id: bb6bc246-6e13-4973-9bc9-5fda48a53a1c, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36114 Sep 22 23:15:04.427 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/137/data, method: GET, req_id: bb6bc246-6e13-4973-9bc9-5fda48a53a1c, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36115 Sep 22 23:15:04.428 ERRO [0] job id 1073 saw error GenericError("test error")
36116 Sep 22 23:15:04.428 ERRO [0] job id 1073 saw error GenericError("test error")
36117 Sep 22 23:15:04.428 ERRO [0] job id 1074 saw error GenericError("test error")
36118 Sep 22 23:15:04.428 ERRO [0] job id 1074 saw error GenericError("test error")
36119 Sep 22 23:15:04.429 WARN returning error on write!
36120 Sep 22 23:15:04.429 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:false
36121 Sep 22 23:15:04.430 WARN 1071 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
36122 Sep 22 23:15:04.432 TRCE incoming request, uri: /newextent/137/db, method: GET, req_id: a2684cb8-54ca-4b46-b202-2bcd71157a56, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36123 Sep 22 23:15:04.433 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/137/db, method: GET, req_id: a2684cb8-54ca-4b46-b202-2bcd71157a56, remote_addr: 127.0.0.1:36872, local_addr: 127.0.0.1:46213, task: repair
36124 Sep 22 23:15:04.434 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/089.copy" to "/tmp/downstairs-vrx8aK6L/00/000/089.replace"
36125 Sep 22 23:15:04.434 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36126 Sep 22 23:15:04.435 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/089.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36127 Sep 22 23:15:04.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/089"
36128 Sep 22 23:15:04.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/089.db"
36129 Sep 22 23:15:04.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36130 Sep 22 23:15:04.435 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/089.replace" to "/tmp/downstairs-vrx8aK6L/00/000/089.completed"
36131 Sep 22 23:15:04.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36132 Sep 22 23:15:04.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36133 Sep 22 23:15:04.435 DEBG [0] It's time to notify for 38
36134 Sep 22 23:15:04.436 INFO Completion from [0] id:38 status:true
36135 Sep 22 23:15:04.436 INFO [39/752] Repair commands completed
36136 Sep 22 23:15:04.436 INFO Pop front: ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }, state: ClientData([New, New, New]) }
36137 Sep 22 23:15:04.436 INFO Sent repair work, now wait for resp
36138 Sep 22 23:15:04.436 INFO [0] received reconcile message
36139 Sep 22 23:15:04.436 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }, state: ClientData([InProgress, New, New]) }, : downstairs
36140 Sep 22 23:15:04.436 INFO [0] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }
36141 Sep 22 23:15:04.436 INFO [1] received reconcile message
36142 Sep 22 23:15:04.436 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36143 Sep 22 23:15:04.436 INFO [1] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }
36144 Sep 22 23:15:04.436 INFO [2] received reconcile message
36145 Sep 22 23:15:04.436 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(39), op: ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36146 Sep 22 23:15:04.436 INFO [2] client ExtentReopen { repair_id: ReconciliationId(39), extent_id: 137 }
36147 Sep 22 23:15:04.436 DEBG 39 Reopen extent 137
36148 Sep 22 23:15:04.437 DEBG 39 Reopen extent 137
36149 Sep 22 23:15:04.437 DEBG 39 Reopen extent 137
36150 Sep 22 23:15:04.438 DEBG [2] It's time to notify for 39
36151 Sep 22 23:15:04.438 INFO Completion from [2] id:39 status:true
36152 Sep 22 23:15:04.438 INFO [40/752] Repair commands completed
36153 Sep 22 23:15:04.438 INFO Pop front: ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36154 Sep 22 23:15:04.438 INFO Sent repair work, now wait for resp
36155 Sep 22 23:15:04.438 INFO [0] received reconcile message
36156 Sep 22 23:15:04.438 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36157 Sep 22 23:15:04.438 INFO [0] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36158 Sep 22 23:15:04.438 INFO [1] received reconcile message
36159 Sep 22 23:15:04.438 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36160 Sep 22 23:15:04.438 INFO [1] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36161 Sep 22 23:15:04.438 INFO [2] received reconcile message
36162 Sep 22 23:15:04.438 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(40), op: ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36163 Sep 22 23:15:04.438 INFO [2] client ExtentFlush { repair_id: ReconciliationId(40), extent_id: 146, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36164 Sep 22 23:15:04.438 DEBG 40 Flush extent 146 with f:2 g:2
36165 Sep 22 23:15:04.438 DEBG Flush just extent 146 with f:2 and g:2
36166 Sep 22 23:15:04.438 DEBG [1] It's time to notify for 40
36167 Sep 22 23:15:04.438 INFO Completion from [1] id:40 status:true
36168 Sep 22 23:15:04.438 INFO [41/752] Repair commands completed
36169 Sep 22 23:15:04.438 INFO Pop front: ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }, state: ClientData([New, New, New]) }
36170 Sep 22 23:15:04.438 INFO Sent repair work, now wait for resp
36171 Sep 22 23:15:04.438 INFO [0] received reconcile message
36172 Sep 22 23:15:04.438 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }, state: ClientData([InProgress, New, New]) }, : downstairs
36173 Sep 22 23:15:04.438 INFO [0] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }
36174 Sep 22 23:15:04.438 INFO [1] received reconcile message
36175 Sep 22 23:15:04.438 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36176 Sep 22 23:15:04.438 INFO [1] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }
36177 Sep 22 23:15:04.438 INFO [2] received reconcile message
36178 Sep 22 23:15:04.438 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(41), op: ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36179 Sep 22 23:15:04.439 INFO [2] client ExtentClose { repair_id: ReconciliationId(41), extent_id: 146 }
36180 Sep 22 23:15:04.439 DEBG 41 Close extent 146
36181 Sep 22 23:15:04.439 DEBG 41 Close extent 146
36182 Sep 22 23:15:04.439 DEBG 41 Close extent 146
36183 Sep 22 23:15:04.440 DEBG [2] It's time to notify for 41
36184 Sep 22 23:15:04.440 INFO Completion from [2] id:41 status:true
36185 Sep 22 23:15:04.440 INFO [42/752] Repair commands completed
36186 Sep 22 23:15:04.440 INFO Pop front: ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36187 Sep 22 23:15:04.440 INFO Sent repair work, now wait for resp
36188 Sep 22 23:15:04.440 INFO [0] received reconcile message
36189 Sep 22 23:15:04.440 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36190 Sep 22 23:15:04.440 INFO [0] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36191 Sep 22 23:15:04.440 INFO [0] Sending repair request ReconciliationId(42)
36192 Sep 22 23:15:04.440 INFO [1] received reconcile message
36193 Sep 22 23:15:04.440 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36194 Sep 22 23:15:04.440 INFO [1] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36195 Sep 22 23:15:04.440 INFO [1] No action required ReconciliationId(42)
36196 Sep 22 23:15:04.440 INFO [2] received reconcile message
36197 Sep 22 23:15:04.440 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(42), op: ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36198 Sep 22 23:15:04.440 INFO [2] client ExtentRepair { repair_id: ReconciliationId(42), extent_id: 146, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36199 Sep 22 23:15:04.440 INFO [2] No action required ReconciliationId(42)
36200 Sep 22 23:15:04.440 DEBG 42 Repair extent 146 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36201 Sep 22 23:15:04.440 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/092.copy"
36202 Sep 22 23:15:04.459 DEBG Write :1070 deps:[JobId(1069), JobId(1066), JobId(1063), JobId(1060), JobId(1057), JobId(1054), JobId(1051), JobId(1048), JobId(1045), JobId(1042), JobId(1040), JobId(1037), JobId(1034), JobId(1031), JobId(1028), JobId(1026), JobId(1023), JobId(1020), JobId(1017), JobId(1014), JobId(1011), JobId(1008), JobId(1005), JobId(1002)] res:true
36203 Sep 22 23:15:04.461 INFO [lossy] skipping 1071
36204 Sep 22 23:15:04.461 INFO [lossy] skipping 1073
36205 Sep 22 23:15:04.461 INFO [lossy] skipping 1071
36206 Sep 22 23:15:04.461 INFO [lossy] skipping 1073
36207 Sep 22 23:15:04.461 WARN returning error on flush!
36208 Sep 22 23:15:04.461 DEBG Flush :1071 extent_limit None deps:[JobId(1070), JobId(1069)] res:false f:25 g:1
36209 Sep 22 23:15:04.461 INFO [lossy] skipping 1073
36210 Sep 22 23:15:04.461 WARN returning error on flush!
36211 Sep 22 23:15:04.461 DEBG Flush :1071 extent_limit None deps:[JobId(1070), JobId(1069)] res:false f:25 g:1
36212 Sep 22 23:15:04.461 INFO [lossy] skipping 1073
36213 Sep 22 23:15:04.461 INFO [lossy] skipping 1071
36214 Sep 22 23:15:04.461 INFO [lossy] skipping 1073
36215 Sep 22 23:15:04.464 DEBG Flush :1071 extent_limit None deps:[JobId(1070), JobId(1069)] res:true f:25 g:1
36216 Sep 22 23:15:04.464 INFO [lossy] skipping 1073
36217 Sep 22 23:15:04.464 WARN 1073 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
36218 Sep 22 23:15:04.464 INFO [lossy] skipping 1072
36219 Sep 22 23:15:04.464 INFO [lossy] skipping 1073
36220 Sep 22 23:15:04.464 INFO [lossy] skipping 1074
36221 Sep 22 23:15:04.470 DEBG Read :1072 deps:[JobId(1071)] res:true
36222 Sep 22 23:15:04.492 DEBG IO Flush 1075 has deps [JobId(1074), JobId(1073)]
36223 Sep 22 23:15:04.492 ERRO [1] job id 1070 saw error GenericError("test error")
36224 Sep 22 23:15:04.492 ERRO [1] job id 1071 saw error GenericError("test error")
36225 Sep 22 23:15:04.492 ERRO [1] job id 1071 saw error GenericError("test error")
36226 Sep 22 23:15:04.493 DEBG [rc] retire 1071 clears [JobId(1070), JobId(1071)], : downstairs
36227 Sep 22 23:15:04.496 INFO [lossy] sleeping 1 second
36228 Sep 22 23:15:04.502 INFO accepted connection, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36229 Sep 22 23:15:04.502 TRCE incoming request, uri: /extent/146/files, method: GET, req_id: 3b3b0721-4abd-48c3-8f81-d354f2026591, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36230 Sep 22 23:15:04.503 INFO request completed, latency_us: 217, response_code: 200, uri: /extent/146/files, method: GET, req_id: 3b3b0721-4abd-48c3-8f81-d354f2026591, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36231 Sep 22 23:15:04.503 INFO eid:146 Found repair files: ["092", "092.db"]
36232 Sep 22 23:15:04.503 TRCE incoming request, uri: /newextent/146/data, method: GET, req_id: 8217c803-3b29-43da-ba79-94146ad04d4d, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36233 Sep 22 23:15:04.504 INFO request completed, latency_us: 338, response_code: 200, uri: /newextent/146/data, method: GET, req_id: 8217c803-3b29-43da-ba79-94146ad04d4d, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36234 Sep 22 23:15:04.509 TRCE incoming request, uri: /newextent/146/db, method: GET, req_id: 6946b944-c5c9-470c-9112-fb04347c6b00, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36235 Sep 22 23:15:04.509 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/146/db, method: GET, req_id: 6946b944-c5c9-470c-9112-fb04347c6b00, remote_addr: 127.0.0.1:55504, local_addr: 127.0.0.1:46213, task: repair
36236 Sep 22 23:15:04.510 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/092.copy" to "/tmp/downstairs-vrx8aK6L/00/000/092.replace"
36237 Sep 22 23:15:04.510 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36238 Sep 22 23:15:04.511 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/092.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36239 Sep 22 23:15:04.511 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/092"
36240 Sep 22 23:15:04.511 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/092.db"
36241 Sep 22 23:15:04.511 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36242 Sep 22 23:15:04.511 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/092.replace" to "/tmp/downstairs-vrx8aK6L/00/000/092.completed"
36243 Sep 22 23:15:04.512 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36244 Sep 22 23:15:04.512 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36245 Sep 22 23:15:04.512 DEBG [0] It's time to notify for 42
36246 Sep 22 23:15:04.512 INFO Completion from [0] id:42 status:true
36247 Sep 22 23:15:04.512 INFO [43/752] Repair commands completed
36248 Sep 22 23:15:04.512 INFO Pop front: ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }, state: ClientData([New, New, New]) }
36249 Sep 22 23:15:04.512 INFO Sent repair work, now wait for resp
36250 Sep 22 23:15:04.512 INFO [0] received reconcile message
36251 Sep 22 23:15:04.512 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }, state: ClientData([InProgress, New, New]) }, : downstairs
36252 Sep 22 23:15:04.512 INFO [0] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }
36253 Sep 22 23:15:04.512 INFO [1] received reconcile message
36254 Sep 22 23:15:04.512 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36255 Sep 22 23:15:04.512 INFO [1] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }
36256 Sep 22 23:15:04.512 INFO [2] received reconcile message
36257 Sep 22 23:15:04.512 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(43), op: ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36258 Sep 22 23:15:04.512 INFO [2] client ExtentReopen { repair_id: ReconciliationId(43), extent_id: 146 }
36259 Sep 22 23:15:04.512 DEBG 43 Reopen extent 146
36260 Sep 22 23:15:04.513 DEBG 43 Reopen extent 146
36261 Sep 22 23:15:04.514 DEBG 43 Reopen extent 146
36262 Sep 22 23:15:04.514 DEBG [2] It's time to notify for 43
36263 Sep 22 23:15:04.514 INFO Completion from [2] id:43 status:true
36264 Sep 22 23:15:04.514 INFO [44/752] Repair commands completed
36265 Sep 22 23:15:04.514 INFO Pop front: ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36266 Sep 22 23:15:04.514 INFO Sent repair work, now wait for resp
36267 Sep 22 23:15:04.514 INFO [0] received reconcile message
36268 Sep 22 23:15:04.514 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36269 Sep 22 23:15:04.514 INFO [0] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36270 Sep 22 23:15:04.514 INFO [1] received reconcile message
36271 Sep 22 23:15:04.514 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36272 Sep 22 23:15:04.514 INFO [1] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36273 Sep 22 23:15:04.514 INFO [2] received reconcile message
36274 Sep 22 23:15:04.514 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(44), op: ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36275 Sep 22 23:15:04.514 INFO [2] client ExtentFlush { repair_id: ReconciliationId(44), extent_id: 105, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36276 Sep 22 23:15:04.515 DEBG 44 Flush extent 105 with f:2 g:2
36277 Sep 22 23:15:04.515 DEBG Flush just extent 105 with f:2 and g:2
36278 Sep 22 23:15:04.515 DEBG [1] It's time to notify for 44
36279 Sep 22 23:15:04.515 INFO Completion from [1] id:44 status:true
36280 Sep 22 23:15:04.515 INFO [45/752] Repair commands completed
36281 Sep 22 23:15:04.515 INFO Pop front: ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }, state: ClientData([New, New, New]) }
36282 Sep 22 23:15:04.515 INFO Sent repair work, now wait for resp
36283 Sep 22 23:15:04.515 INFO [0] received reconcile message
36284 Sep 22 23:15:04.515 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }, state: ClientData([InProgress, New, New]) }, : downstairs
36285 Sep 22 23:15:04.515 INFO [0] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }
36286 Sep 22 23:15:04.515 INFO [1] received reconcile message
36287 Sep 22 23:15:04.515 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36288 Sep 22 23:15:04.515 INFO [1] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }
36289 Sep 22 23:15:04.515 INFO [2] received reconcile message
36290 Sep 22 23:15:04.515 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(45), op: ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36291 Sep 22 23:15:04.515 INFO [2] client ExtentClose { repair_id: ReconciliationId(45), extent_id: 105 }
36292 Sep 22 23:15:04.515 DEBG 45 Close extent 105
36293 Sep 22 23:15:04.515 DEBG 45 Close extent 105
36294 Sep 22 23:15:04.516 DEBG 45 Close extent 105
36295 Sep 22 23:15:04.516 DEBG [2] It's time to notify for 45
36296 Sep 22 23:15:04.516 INFO Completion from [2] id:45 status:true
36297 Sep 22 23:15:04.516 INFO [46/752] Repair commands completed
36298 Sep 22 23:15:04.516 INFO Pop front: ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36299 Sep 22 23:15:04.516 INFO Sent repair work, now wait for resp
36300 Sep 22 23:15:04.516 INFO [0] received reconcile message
36301 Sep 22 23:15:04.516 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36302 Sep 22 23:15:04.516 INFO [0] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36303 Sep 22 23:15:04.516 INFO [0] Sending repair request ReconciliationId(46)
36304 Sep 22 23:15:04.516 INFO [1] received reconcile message
36305 Sep 22 23:15:04.516 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36306 Sep 22 23:15:04.516 INFO [1] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36307 Sep 22 23:15:04.516 INFO [1] No action required ReconciliationId(46)
36308 Sep 22 23:15:04.516 INFO [2] received reconcile message
36309 Sep 22 23:15:04.516 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(46), op: ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36310 Sep 22 23:15:04.516 INFO [2] client ExtentRepair { repair_id: ReconciliationId(46), extent_id: 105, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36311 Sep 22 23:15:04.516 INFO [2] No action required ReconciliationId(46)
36312 Sep 22 23:15:04.517 DEBG 46 Repair extent 105 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36313 Sep 22 23:15:04.517 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/069.copy"
36314 Sep 22 23:15:04.580 INFO accepted connection, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36315 Sep 22 23:15:04.580 TRCE incoming request, uri: /extent/105/files, method: GET, req_id: a51b7036-8645-4bd0-9e38-f04c561524f8, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36316 Sep 22 23:15:04.580 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 0
36317 Sep 22 23:15:04.580 INFO [0] Proc runs for 127.0.0.1:50216 in state Replaced
36318 Sep 22 23:15:04.580 INFO accepted connection from 127.0.0.1:49037, task: main
36319 Sep 22 23:15:04.581 INFO request completed, latency_us: 367, response_code: 200, uri: /extent/105/files, method: GET, req_id: a51b7036-8645-4bd0-9e38-f04c561524f8, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36320 Sep 22 23:15:04.581 INFO eid:105 Found repair files: ["069", "069.db"]
36321 Sep 22 23:15:04.581 TRCE incoming request, uri: /newextent/105/data, method: GET, req_id: 63859b79-03ec-417b-a25a-1c0153228680, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36322 Sep 22 23:15:04.581 INFO request completed, latency_us: 259, response_code: 200, uri: /newextent/105/data, method: GET, req_id: 63859b79-03ec-417b-a25a-1c0153228680, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36323 Sep 22 23:15:04.587 TRCE incoming request, uri: /newextent/105/db, method: GET, req_id: 73725398-9ab7-4f67-800a-ba915c1d2207, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36324 Sep 22 23:15:04.587 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/105/db, method: GET, req_id: 73725398-9ab7-4f67-800a-ba915c1d2207, remote_addr: 127.0.0.1:34460, local_addr: 127.0.0.1:46213, task: repair
36325 Sep 22 23:15:04.588 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/069.copy" to "/tmp/downstairs-vrx8aK6L/00/000/069.replace"
36326 Sep 22 23:15:04.588 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36327 Sep 22 23:15:04.589 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/069.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36328 Sep 22 23:15:04.589 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/069"
36329 Sep 22 23:15:04.589 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/069.db"
36330 Sep 22 23:15:04.589 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36331 Sep 22 23:15:04.589 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/069.replace" to "/tmp/downstairs-vrx8aK6L/00/000/069.completed"
36332 Sep 22 23:15:04.589 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36333 Sep 22 23:15:04.589 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36334 Sep 22 23:15:04.590 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
36335 Sep 22 23:15:04.590 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
36336 Sep 22 23:15:04.590 DEBG [0] It's time to notify for 46
36337 Sep 22 23:15:04.590 INFO [0] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Replaced Disconnected Disconnected ds_transition to WaitActive
36338 thread 'test::integration_test_volume_replace_downstairs_then_takeover' panicked at '[0] 6e5c0f70-fd56-4280-9d20-71288e488216 Initializing Negotiation failed, Replaced -> WaitActive', upstairs/src/lib.rs:6349:21
36339 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
36340 Sep 22 23:15:04.590 INFO Completion from [0] id:46 status:true
36341 Sep 22 23:15:04.591 INFO [47/752] Repair commands completed
36342 Sep 22 23:15:04.591 INFO Pop front: ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }, state: ClientData([New, New, New]) }
36343 Sep 22 23:15:04.591 INFO Sent repair work, now wait for resp
36344 Sep 22 23:15:04.591 INFO [0] received reconcile message
36345 Sep 22 23:15:04.591 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }, state: ClientData([InProgress, New, New]) }, : downstairs
36346 Sep 22 23:15:04.591 INFO [0] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }
36347 Sep 22 23:15:04.591 INFO [1] received reconcile message
36348 Sep 22 23:15:04.591 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36349 Sep 22 23:15:04.591 INFO [1] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }
36350 Sep 22 23:15:04.591 INFO [2] received reconcile message
36351 Sep 22 23:15:04.591 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(47), op: ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36352 Sep 22 23:15:04.591 INFO [2] client ExtentReopen { repair_id: ReconciliationId(47), extent_id: 105 }
36353 Sep 22 23:15:04.591 WARN UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } cannot grab lock, does not match UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: 2cf90053-4dbd-4cc4-8468-9d35f085e47a, gen: 2 }!
36354 Sep 22 23:15:04.591 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } disconnected, Upstairs is not active, task: proc
36355 Sep 22 23:15:04.591 INFO connection (127.0.0.1:49037): all done
36356 Sep 22 23:15:04.591 DEBG 47 Reopen extent 105
36357 Sep 22 23:15:04.592 DEBG 47 Reopen extent 105
36358 Sep 22 23:15:04.592 DEBG 47 Reopen extent 105
36359 Sep 22 23:15:04.593 DEBG [2] It's time to notify for 47
36360 Sep 22 23:15:04.593 INFO Completion from [2] id:47 status:true
36361 Sep 22 23:15:04.593 INFO [48/752] Repair commands completed
36362 Sep 22 23:15:04.593 INFO Pop front: ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36363 Sep 22 23:15:04.593 INFO Sent repair work, now wait for resp
36364 Sep 22 23:15:04.593 INFO [0] received reconcile message
36365 Sep 22 23:15:04.593 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36366 Sep 22 23:15:04.593 INFO [0] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36367 Sep 22 23:15:04.593 INFO [1] received reconcile message
36368 Sep 22 23:15:04.593 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36369 Sep 22 23:15:04.593 INFO [1] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36370 Sep 22 23:15:04.593 INFO [2] received reconcile message
36371 Sep 22 23:15:04.593 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(48), op: ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36372 Sep 22 23:15:04.593 INFO [2] client ExtentFlush { repair_id: ReconciliationId(48), extent_id: 4, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36373 Sep 22 23:15:04.593 DEBG 48 Flush extent 4 with f:2 g:2
36374 Sep 22 23:15:04.593 DEBG Flush just extent 4 with f:2 and g:2
36375 Sep 22 23:15:04.593 DEBG [1] It's time to notify for 48
36376 Sep 22 23:15:04.593 INFO Completion from [1] id:48 status:true
36377 Sep 22 23:15:04.593 INFO [49/752] Repair commands completed
36378 Sep 22 23:15:04.593 INFO Pop front: ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }, state: ClientData([New, New, New]) }
36379 Sep 22 23:15:04.593 INFO Sent repair work, now wait for resp
36380 Sep 22 23:15:04.593 INFO [0] received reconcile message
36381 Sep 22 23:15:04.593 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }, state: ClientData([InProgress, New, New]) }, : downstairs
36382 Sep 22 23:15:04.593 INFO [0] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }
36383 Sep 22 23:15:04.594 INFO [1] received reconcile message
36384 Sep 22 23:15:04.594 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36385 Sep 22 23:15:04.594 INFO [1] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }
36386 Sep 22 23:15:04.594 INFO [2] received reconcile message
36387 Sep 22 23:15:04.594 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(49), op: ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36388 Sep 22 23:15:04.594 INFO [2] client ExtentClose { repair_id: ReconciliationId(49), extent_id: 4 }
36389 Sep 22 23:15:04.594 DEBG 49 Close extent 4
36390 Sep 22 23:15:04.594 DEBG 49 Close extent 4
36391 Sep 22 23:15:04.594 DEBG 49 Close extent 4
36392 Sep 22 23:15:04.595 DEBG [2] It's time to notify for 49
36393 Sep 22 23:15:04.595 INFO Completion from [2] id:49 status:true
36394 Sep 22 23:15:04.595 INFO [50/752] Repair commands completed
36395 Sep 22 23:15:04.595 INFO Pop front: ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36396 Sep 22 23:15:04.595 INFO Sent repair work, now wait for resp
36397 Sep 22 23:15:04.595 INFO [0] received reconcile message
36398 Sep 22 23:15:04.595 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36399 Sep 22 23:15:04.595 INFO [0] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36400 Sep 22 23:15:04.595 INFO [0] Sending repair request ReconciliationId(50)
36401 Sep 22 23:15:04.595 INFO [1] received reconcile message
36402 Sep 22 23:15:04.595 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36403 Sep 22 23:15:04.595 INFO [1] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36404 Sep 22 23:15:04.595 INFO [1] No action required ReconciliationId(50)
36405 Sep 22 23:15:04.595 INFO [2] received reconcile message
36406 Sep 22 23:15:04.595 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(50), op: ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36407 Sep 22 23:15:04.595 INFO [2] client ExtentRepair { repair_id: ReconciliationId(50), extent_id: 4, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36408 Sep 22 23:15:04.595 INFO [2] No action required ReconciliationId(50)
36409 Sep 22 23:15:04.595 DEBG 50 Repair extent 4 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36410 Sep 22 23:15:04.595 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/004.copy"
36411 Sep 22 23:15:04.660 INFO accepted connection, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36412 Sep 22 23:15:04.661 TRCE incoming request, uri: /extent/4/files, method: GET, req_id: ab73936a-a5ba-49a4-99d4-012d27718f57, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36413 Sep 22 23:15:04.661 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 1
36414 Sep 22 23:15:04.661 INFO [1] Proc runs for 127.0.0.1:64149 in state Disconnected
36415 Sep 22 23:15:04.661 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 looper connected, looper: 2
36416 Sep 22 23:15:04.661 INFO [2] Proc runs for 127.0.0.1:58182 in state Disconnected
36417 Sep 22 23:15:04.661 INFO accepted connection from 127.0.0.1:34434, task: main
36418 Sep 22 23:15:04.661 INFO accepted connection from 127.0.0.1:37115, task: main
36419 Sep 22 23:15:04.661 INFO request completed, latency_us: 638, response_code: 200, uri: /extent/4/files, method: GET, req_id: ab73936a-a5ba-49a4-99d4-012d27718f57, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36420 Sep 22 23:15:04.662 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
36421 Sep 22 23:15:04.662 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
36422 Sep 22 23:15:04.662 INFO Connection request from 6e5c0f70-fd56-4280-9d20-71288e488216 with version 4, task: proc
36423 Sep 22 23:15:04.662 INFO upstairs UpstairsConnection { upstairs_id: 6e5c0f70-fd56-4280-9d20-71288e488216, session_id: f334c95e-b851-4a8a-a731-3fb69e42e934, gen: 1 } connected, version 4, task: proc
36424 Sep 22 23:15:04.662 INFO eid:4 Found repair files: ["004", "004.db"]
36425 Sep 22 23:15:04.662 TRCE incoming request, uri: /newextent/4/data, method: GET, req_id: 6d92556d-1fa6-4d2e-aac8-16488048fcbe, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36426 Sep 22 23:15:04.662 INFO [1] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Replaced Disconnected Disconnected ds_transition to WaitActive
36427 Sep 22 23:15:04.662 INFO [1] Transition from Disconnected to WaitActive
36428 Sep 22 23:15:04.662 INFO [2] 6e5c0f70-fd56-4280-9d20-71288e488216 (f334c95e-b851-4a8a-a731-3fb69e42e934) Replaced WaitActive Disconnected ds_transition to WaitActive
36429 Sep 22 23:15:04.662 INFO [2] Transition from Disconnected to WaitActive
36430 Sep 22 23:15:04.663 INFO request completed, latency_us: 526, response_code: 200, uri: /newextent/4/data, method: GET, req_id: 6d92556d-1fa6-4d2e-aac8-16488048fcbe, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36431 Sep 22 23:15:04.668 TRCE incoming request, uri: /newextent/4/db, method: GET, req_id: 023adc09-90c4-4da1-99b2-2b793557932e, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36432 Sep 22 23:15:04.668 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/4/db, method: GET, req_id: 023adc09-90c4-4da1-99b2-2b793557932e, remote_addr: 127.0.0.1:36698, local_addr: 127.0.0.1:46213, task: repair
36433 Sep 22 23:15:04.669 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/004.copy" to "/tmp/downstairs-vrx8aK6L/00/000/004.replace"
36434 Sep 22 23:15:04.669 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36435 Sep 22 23:15:04.670 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/004.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36436 Sep 22 23:15:04.671 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/004"
36437 Sep 22 23:15:04.671 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/004.db"
36438 Sep 22 23:15:04.671 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36439 Sep 22 23:15:04.671 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/004.replace" to "/tmp/downstairs-vrx8aK6L/00/000/004.completed"
36440 Sep 22 23:15:04.671 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36441 Sep 22 23:15:04.671 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36442 Sep 22 23:15:04.671 DEBG [0] It's time to notify for 50
36443 Sep 22 23:15:04.671 INFO Completion from [0] id:50 status:true
36444 Sep 22 23:15:04.671 INFO [51/752] Repair commands completed
36445 Sep 22 23:15:04.671 INFO Pop front: ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }, state: ClientData([New, New, New]) }
36446 Sep 22 23:15:04.671 INFO Sent repair work, now wait for resp
36447 Sep 22 23:15:04.671 INFO [0] received reconcile message
36448 Sep 22 23:15:04.671 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }, state: ClientData([InProgress, New, New]) }, : downstairs
36449 Sep 22 23:15:04.671 INFO [0] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }
36450 Sep 22 23:15:04.671 INFO [1] received reconcile message
36451 Sep 22 23:15:04.671 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36452 Sep 22 23:15:04.671 INFO [1] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }
36453 Sep 22 23:15:04.671 INFO [2] received reconcile message
36454 Sep 22 23:15:04.671 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(51), op: ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36455 Sep 22 23:15:04.671 INFO [2] client ExtentReopen { repair_id: ReconciliationId(51), extent_id: 4 }
36456 Sep 22 23:15:04.672 DEBG 51 Reopen extent 4
36457 Sep 22 23:15:04.672 DEBG 51 Reopen extent 4
36458 Sep 22 23:15:04.673 DEBG 51 Reopen extent 4
36459 Sep 22 23:15:04.674 DEBG [2] It's time to notify for 51
36460 Sep 22 23:15:04.674 INFO Completion from [2] id:51 status:true
36461 Sep 22 23:15:04.674 INFO [52/752] Repair commands completed
36462 Sep 22 23:15:04.674 INFO Pop front: ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36463 Sep 22 23:15:04.674 INFO Sent repair work, now wait for resp
36464 Sep 22 23:15:04.674 INFO [0] received reconcile message
36465 Sep 22 23:15:04.674 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36466 Sep 22 23:15:04.674 INFO [0] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36467 Sep 22 23:15:04.674 INFO [1] received reconcile message
36468 Sep 22 23:15:04.674 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36469 Sep 22 23:15:04.674 INFO [1] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36470 Sep 22 23:15:04.674 INFO [2] received reconcile message
36471 Sep 22 23:15:04.674 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(52), op: ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36472 Sep 22 23:15:04.674 INFO [2] client ExtentFlush { repair_id: ReconciliationId(52), extent_id: 70, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36473 Sep 22 23:15:04.674 DEBG 52 Flush extent 70 with f:2 g:2
36474 Sep 22 23:15:04.674 DEBG Flush just extent 70 with f:2 and g:2
36475 Sep 22 23:15:04.674 DEBG [1] It's time to notify for 52
36476 Sep 22 23:15:04.674 INFO Completion from [1] id:52 status:true
36477 Sep 22 23:15:04.674 INFO [53/752] Repair commands completed
36478 Sep 22 23:15:04.674 INFO Pop front: ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }, state: ClientData([New, New, New]) }
36479 Sep 22 23:15:04.674 INFO Sent repair work, now wait for resp
36480 Sep 22 23:15:04.674 INFO [0] received reconcile message
36481 Sep 22 23:15:04.674 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }, state: ClientData([InProgress, New, New]) }, : downstairs
36482 Sep 22 23:15:04.674 INFO [0] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }
36483 Sep 22 23:15:04.674 INFO [1] received reconcile message
36484 Sep 22 23:15:04.674 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36485 Sep 22 23:15:04.674 INFO [1] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }
36486 Sep 22 23:15:04.674 INFO [2] received reconcile message
36487 Sep 22 23:15:04.674 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(53), op: ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36488 Sep 22 23:15:04.674 INFO [2] client ExtentClose { repair_id: ReconciliationId(53), extent_id: 70 }
36489 Sep 22 23:15:04.675 DEBG 53 Close extent 70
36490 Sep 22 23:15:04.675 DEBG 53 Close extent 70
36491 Sep 22 23:15:04.675 DEBG 53 Close extent 70
36492 Sep 22 23:15:04.676 DEBG [2] It's time to notify for 53
36493 Sep 22 23:15:04.676 INFO Completion from [2] id:53 status:true
36494 Sep 22 23:15:04.676 INFO [54/752] Repair commands completed
36495 Sep 22 23:15:04.676 INFO Pop front: ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36496 Sep 22 23:15:04.676 INFO Sent repair work, now wait for resp
36497 Sep 22 23:15:04.676 INFO [0] received reconcile message
36498 Sep 22 23:15:04.676 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36499 Sep 22 23:15:04.676 INFO [0] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36500 Sep 22 23:15:04.676 INFO [0] Sending repair request ReconciliationId(54)
36501 Sep 22 23:15:04.676 INFO [1] received reconcile message
36502 Sep 22 23:15:04.676 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36503 Sep 22 23:15:04.676 INFO [1] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36504 Sep 22 23:15:04.676 INFO [1] No action required ReconciliationId(54)
36505 Sep 22 23:15:04.676 INFO [2] received reconcile message
36506 Sep 22 23:15:04.676 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(54), op: ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36507 Sep 22 23:15:04.676 INFO [2] client ExtentRepair { repair_id: ReconciliationId(54), extent_id: 70, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36508 Sep 22 23:15:04.676 INFO [2] No action required ReconciliationId(54)
36509 Sep 22 23:15:04.676 DEBG 54 Repair extent 70 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36510 Sep 22 23:15:04.676 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/046.copy"
36511 Sep 22 23:15:04.738 INFO accepted connection, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36512 Sep 22 23:15:04.738 TRCE incoming request, uri: /extent/70/files, method: GET, req_id: 868e61c7-efb7-4dfe-9d51-df237d187d2f, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36513 Sep 22 23:15:04.738 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/70/files, method: GET, req_id: 868e61c7-efb7-4dfe-9d51-df237d187d2f, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36514 Sep 22 23:15:04.739 INFO eid:70 Found repair files: ["046", "046.db"]
36515 Sep 22 23:15:04.739 TRCE incoming request, uri: /newextent/70/data, method: GET, req_id: 3787010d-16e3-4685-883f-ec8c8ed44c31, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36516 Sep 22 23:15:04.739 INFO request completed, latency_us: 332, response_code: 200, uri: /newextent/70/data, method: GET, req_id: 3787010d-16e3-4685-883f-ec8c8ed44c31, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36517 Sep 22 23:15:04.744 TRCE incoming request, uri: /newextent/70/db, method: GET, req_id: e5987dd1-38bc-4583-97a9-83b0d1f57100, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36518 Sep 22 23:15:04.745 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/70/db, method: GET, req_id: e5987dd1-38bc-4583-97a9-83b0d1f57100, remote_addr: 127.0.0.1:53939, local_addr: 127.0.0.1:46213, task: repair
36519 Sep 22 23:15:04.746 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/046.copy" to "/tmp/downstairs-vrx8aK6L/00/000/046.replace"
36520 Sep 22 23:15:04.746 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36521 Sep 22 23:15:04.747 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/046.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36522 Sep 22 23:15:04.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/046"
36523 Sep 22 23:15:04.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/046.db"
36524 Sep 22 23:15:04.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36525 Sep 22 23:15:04.747 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/046.replace" to "/tmp/downstairs-vrx8aK6L/00/000/046.completed"
36526 Sep 22 23:15:04.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36527 Sep 22 23:15:04.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36528 Sep 22 23:15:04.747 DEBG [0] It's time to notify for 54
36529 Sep 22 23:15:04.748 INFO Completion from [0] id:54 status:true
36530 Sep 22 23:15:04.748 INFO [55/752] Repair commands completed
36531 Sep 22 23:15:04.748 INFO Pop front: ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }, state: ClientData([New, New, New]) }
36532 Sep 22 23:15:04.748 INFO Sent repair work, now wait for resp
36533 Sep 22 23:15:04.748 INFO [0] received reconcile message
36534 Sep 22 23:15:04.748 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }, state: ClientData([InProgress, New, New]) }, : downstairs
36535 Sep 22 23:15:04.748 INFO [0] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }
36536 Sep 22 23:15:04.748 INFO [1] received reconcile message
36537 Sep 22 23:15:04.748 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36538 Sep 22 23:15:04.748 INFO [1] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }
36539 Sep 22 23:15:04.748 INFO [2] received reconcile message
36540 Sep 22 23:15:04.748 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(55), op: ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36541 Sep 22 23:15:04.748 INFO [2] client ExtentReopen { repair_id: ReconciliationId(55), extent_id: 70 }
36542 Sep 22 23:15:04.748 DEBG 55 Reopen extent 70
36543 Sep 22 23:15:04.749 DEBG 55 Reopen extent 70
36544 Sep 22 23:15:04.749 DEBG 55 Reopen extent 70
36545 Sep 22 23:15:04.750 DEBG [2] It's time to notify for 55
36546 Sep 22 23:15:04.750 INFO Completion from [2] id:55 status:true
36547 Sep 22 23:15:04.750 INFO [56/752] Repair commands completed
36548 Sep 22 23:15:04.750 INFO Pop front: ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36549 Sep 22 23:15:04.750 INFO Sent repair work, now wait for resp
36550 Sep 22 23:15:04.750 INFO [0] received reconcile message
36551 Sep 22 23:15:04.750 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36552 Sep 22 23:15:04.750 INFO [0] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36553 Sep 22 23:15:04.750 INFO [1] received reconcile message
36554 Sep 22 23:15:04.750 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36555 Sep 22 23:15:04.750 INFO [1] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36556 Sep 22 23:15:04.750 INFO [2] received reconcile message
36557 Sep 22 23:15:04.750 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(56), op: ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36558 Sep 22 23:15:04.750 INFO [2] client ExtentFlush { repair_id: ReconciliationId(56), extent_id: 113, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36559 Sep 22 23:15:04.750 DEBG 56 Flush extent 113 with f:2 g:2
36560 Sep 22 23:15:04.750 DEBG Flush just extent 113 with f:2 and g:2
36561 Sep 22 23:15:04.750 DEBG [1] It's time to notify for 56
36562 Sep 22 23:15:04.750 INFO Completion from [1] id:56 status:true
36563 Sep 22 23:15:04.750 INFO [57/752] Repair commands completed
36564 Sep 22 23:15:04.750 INFO Pop front: ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }, state: ClientData([New, New, New]) }
36565 Sep 22 23:15:04.750 INFO Sent repair work, now wait for resp
36566 Sep 22 23:15:04.750 INFO [0] received reconcile message
36567 Sep 22 23:15:04.750 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }, state: ClientData([InProgress, New, New]) }, : downstairs
36568 Sep 22 23:15:04.751 INFO [0] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }
36569 Sep 22 23:15:04.751 INFO [1] received reconcile message
36570 Sep 22 23:15:04.751 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36571 Sep 22 23:15:04.751 INFO [1] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }
36572 Sep 22 23:15:04.751 INFO [2] received reconcile message
36573 Sep 22 23:15:04.751 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(57), op: ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36574 Sep 22 23:15:04.751 INFO [2] client ExtentClose { repair_id: ReconciliationId(57), extent_id: 113 }
36575 Sep 22 23:15:04.751 DEBG 57 Close extent 113
36576 Sep 22 23:15:04.751 DEBG 57 Close extent 113
36577 Sep 22 23:15:04.751 DEBG 57 Close extent 113
36578 Sep 22 23:15:04.752 DEBG [2] It's time to notify for 57
36579 Sep 22 23:15:04.752 INFO Completion from [2] id:57 status:true
36580 Sep 22 23:15:04.752 INFO [58/752] Repair commands completed
36581 Sep 22 23:15:04.752 INFO Pop front: ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36582 Sep 22 23:15:04.752 INFO Sent repair work, now wait for resp
36583 Sep 22 23:15:04.752 INFO [0] received reconcile message
36584 Sep 22 23:15:04.752 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36585 Sep 22 23:15:04.752 INFO [0] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36586 Sep 22 23:15:04.752 INFO [0] Sending repair request ReconciliationId(58)
36587 Sep 22 23:15:04.752 INFO [1] received reconcile message
36588 Sep 22 23:15:04.752 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36589 Sep 22 23:15:04.752 INFO [1] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36590 Sep 22 23:15:04.752 INFO [1] No action required ReconciliationId(58)
36591 Sep 22 23:15:04.752 INFO [2] received reconcile message
36592 Sep 22 23:15:04.752 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(58), op: ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36593 Sep 22 23:15:04.752 INFO [2] client ExtentRepair { repair_id: ReconciliationId(58), extent_id: 113, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36594 Sep 22 23:15:04.752 INFO [2] No action required ReconciliationId(58)
36595 Sep 22 23:15:04.752 DEBG 58 Repair extent 113 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36596 Sep 22 23:15:04.752 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/071.copy"
36597 Sep 22 23:15:04.814 INFO accepted connection, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36598 Sep 22 23:15:04.814 TRCE incoming request, uri: /extent/113/files, method: GET, req_id: ad804ade-f04e-4464-a5c3-c9ec73ce248e, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36599 Sep 22 23:15:04.815 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/113/files, method: GET, req_id: ad804ade-f04e-4464-a5c3-c9ec73ce248e, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36600 Sep 22 23:15:04.815 INFO eid:113 Found repair files: ["071", "071.db"]
36601 Sep 22 23:15:04.815 TRCE incoming request, uri: /newextent/113/data, method: GET, req_id: 96e86de9-c029-4871-bffb-506effcb2b00, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36602 Sep 22 23:15:04.816 INFO request completed, latency_us: 372, response_code: 200, uri: /newextent/113/data, method: GET, req_id: 96e86de9-c029-4871-bffb-506effcb2b00, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36603 Sep 22 23:15:04.821 TRCE incoming request, uri: /newextent/113/db, method: GET, req_id: 1614d7e7-96eb-4743-816c-71c4aa2f6045, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36604 Sep 22 23:15:04.821 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/113/db, method: GET, req_id: 1614d7e7-96eb-4743-816c-71c4aa2f6045, remote_addr: 127.0.0.1:38597, local_addr: 127.0.0.1:46213, task: repair
36605 Sep 22 23:15:04.823 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/071.copy" to "/tmp/downstairs-vrx8aK6L/00/000/071.replace"
36606 Sep 22 23:15:04.823 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36607 Sep 22 23:15:04.824 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/071.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36608 Sep 22 23:15:04.824 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/071"
36609 Sep 22 23:15:04.824 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/071.db"
36610 Sep 22 23:15:04.824 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36611 Sep 22 23:15:04.824 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/071.replace" to "/tmp/downstairs-vrx8aK6L/00/000/071.completed"
36612 Sep 22 23:15:04.824 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36613 Sep 22 23:15:04.824 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36614 Sep 22 23:15:04.824 DEBG [0] It's time to notify for 58
36615 Sep 22 23:15:04.825 INFO Completion from [0] id:58 status:true
36616 Sep 22 23:15:04.825 INFO [59/752] Repair commands completed
36617 Sep 22 23:15:04.825 INFO Pop front: ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }, state: ClientData([New, New, New]) }
36618 Sep 22 23:15:04.825 INFO Sent repair work, now wait for resp
36619 Sep 22 23:15:04.825 INFO [0] received reconcile message
36620 Sep 22 23:15:04.825 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }, state: ClientData([InProgress, New, New]) }, : downstairs
36621 Sep 22 23:15:04.825 INFO [0] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }
36622 Sep 22 23:15:04.825 INFO [1] received reconcile message
36623 Sep 22 23:15:04.825 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36624 Sep 22 23:15:04.825 INFO [1] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }
36625 Sep 22 23:15:04.825 INFO [2] received reconcile message
36626 Sep 22 23:15:04.825 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(59), op: ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36627 Sep 22 23:15:04.825 INFO [2] client ExtentReopen { repair_id: ReconciliationId(59), extent_id: 113 }
36628 Sep 22 23:15:04.825 DEBG 59 Reopen extent 113
36629 Sep 22 23:15:04.826 DEBG 59 Reopen extent 113
36630 Sep 22 23:15:04.826 DEBG 59 Reopen extent 113
36631 Sep 22 23:15:04.827 DEBG [2] It's time to notify for 59
36632 Sep 22 23:15:04.827 INFO Completion from [2] id:59 status:true
36633 Sep 22 23:15:04.827 INFO [60/752] Repair commands completed
36634 Sep 22 23:15:04.827 INFO Pop front: ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36635 Sep 22 23:15:04.827 INFO Sent repair work, now wait for resp
36636 Sep 22 23:15:04.827 INFO [0] received reconcile message
36637 Sep 22 23:15:04.827 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36638 Sep 22 23:15:04.827 INFO [0] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36639 Sep 22 23:15:04.827 INFO [1] received reconcile message
36640 Sep 22 23:15:04.827 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36641 Sep 22 23:15:04.827 INFO [1] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36642 Sep 22 23:15:04.827 INFO [2] received reconcile message
36643 Sep 22 23:15:04.827 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(60), op: ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36644 Sep 22 23:15:04.827 INFO [2] client ExtentFlush { repair_id: ReconciliationId(60), extent_id: 85, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36645 Sep 22 23:15:04.827 DEBG 60 Flush extent 85 with f:2 g:2
36646 Sep 22 23:15:04.827 DEBG Flush just extent 85 with f:2 and g:2
36647 Sep 22 23:15:04.827 DEBG [1] It's time to notify for 60
36648 Sep 22 23:15:04.828 INFO Completion from [1] id:60 status:true
36649 Sep 22 23:15:04.828 INFO [61/752] Repair commands completed
36650 Sep 22 23:15:04.828 INFO Pop front: ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }, state: ClientData([New, New, New]) }
36651 Sep 22 23:15:04.828 INFO Sent repair work, now wait for resp
36652 Sep 22 23:15:04.828 INFO [0] received reconcile message
36653 Sep 22 23:15:04.828 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }, state: ClientData([InProgress, New, New]) }, : downstairs
36654 Sep 22 23:15:04.828 INFO [0] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }
36655 Sep 22 23:15:04.828 INFO [1] received reconcile message
36656 Sep 22 23:15:04.828 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36657 Sep 22 23:15:04.828 INFO [1] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }
36658 Sep 22 23:15:04.828 INFO [2] received reconcile message
36659 Sep 22 23:15:04.828 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(61), op: ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36660 Sep 22 23:15:04.828 INFO [2] client ExtentClose { repair_id: ReconciliationId(61), extent_id: 85 }
36661 Sep 22 23:15:04.828 DEBG 61 Close extent 85
36662 Sep 22 23:15:04.828 DEBG 61 Close extent 85
36663 Sep 22 23:15:04.829 DEBG 61 Close extent 85
36664 Sep 22 23:15:04.829 DEBG [2] It's time to notify for 61
36665 Sep 22 23:15:04.829 INFO Completion from [2] id:61 status:true
36666 Sep 22 23:15:04.829 INFO [62/752] Repair commands completed
36667 Sep 22 23:15:04.829 INFO Pop front: ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36668 Sep 22 23:15:04.829 INFO Sent repair work, now wait for resp
36669 Sep 22 23:15:04.829 INFO [0] received reconcile message
36670 Sep 22 23:15:04.829 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36671 Sep 22 23:15:04.829 INFO [0] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36672 Sep 22 23:15:04.829 INFO [0] Sending repair request ReconciliationId(62)
36673 Sep 22 23:15:04.829 INFO [1] received reconcile message
36674 Sep 22 23:15:04.829 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36675 Sep 22 23:15:04.829 INFO [1] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36676 Sep 22 23:15:04.829 INFO [1] No action required ReconciliationId(62)
36677 Sep 22 23:15:04.829 INFO [2] received reconcile message
36678 Sep 22 23:15:04.829 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(62), op: ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36679 Sep 22 23:15:04.829 INFO [2] client ExtentRepair { repair_id: ReconciliationId(62), extent_id: 85, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36680 Sep 22 23:15:04.829 INFO [2] No action required ReconciliationId(62)
36681 Sep 22 23:15:04.829 DEBG 62 Repair extent 85 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36682 Sep 22 23:15:04.829 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/055.copy"
36683 Sep 22 23:15:04.880 DEBG [0] Read AckReady 1074, : downstairs
36684 Sep 22 23:15:04.881 DEBG up_ds_listen was notified
36685 Sep 22 23:15:04.881 DEBG up_ds_listen process 1074
36686 Sep 22 23:15:04.881 DEBG [A] ack job 1074:75, : downstairs
36687 Sep 22 23:15:04.892 INFO accepted connection, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36688 Sep 22 23:15:04.892 TRCE incoming request, uri: /extent/85/files, method: GET, req_id: 6dbe19d5-6f02-4415-a9b8-7cf4e1d2d04e, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36689 Sep 22 23:15:04.893 INFO request completed, latency_us: 231, response_code: 200, uri: /extent/85/files, method: GET, req_id: 6dbe19d5-6f02-4415-a9b8-7cf4e1d2d04e, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36690 Sep 22 23:15:04.893 INFO eid:85 Found repair files: ["055", "055.db"]
36691 Sep 22 23:15:04.893 TRCE incoming request, uri: /newextent/85/data, method: GET, req_id: e77f3179-2814-4f74-9c6d-500ad3eec275, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36692 Sep 22 23:15:04.894 INFO request completed, latency_us: 349, response_code: 200, uri: /newextent/85/data, method: GET, req_id: e77f3179-2814-4f74-9c6d-500ad3eec275, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36693 Sep 22 23:15:04.899 TRCE incoming request, uri: /newextent/85/db, method: GET, req_id: 0b0333f2-4c84-4343-9b24-7b581563cf33, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36694 Sep 22 23:15:04.899 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/85/db, method: GET, req_id: 0b0333f2-4c84-4343-9b24-7b581563cf33, remote_addr: 127.0.0.1:50509, local_addr: 127.0.0.1:46213, task: repair
36695 Sep 22 23:15:04.900 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/055.copy" to "/tmp/downstairs-vrx8aK6L/00/000/055.replace"
36696 Sep 22 23:15:04.900 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36697 Sep 22 23:15:04.901 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/055.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36698 Sep 22 23:15:04.901 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/055"
36699 Sep 22 23:15:04.901 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/055.db"
36700 Sep 22 23:15:04.901 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36701 Sep 22 23:15:04.902 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/055.replace" to "/tmp/downstairs-vrx8aK6L/00/000/055.completed"
36702 Sep 22 23:15:04.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36703 Sep 22 23:15:04.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36704 Sep 22 23:15:04.902 DEBG [0] It's time to notify for 62
36705 Sep 22 23:15:04.902 INFO Completion from [0] id:62 status:true
36706 Sep 22 23:15:04.902 INFO [63/752] Repair commands completed
36707 Sep 22 23:15:04.902 INFO Pop front: ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }, state: ClientData([New, New, New]) }
36708 Sep 22 23:15:04.902 INFO Sent repair work, now wait for resp
36709 Sep 22 23:15:04.902 INFO [0] received reconcile message
36710 Sep 22 23:15:04.902 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }, state: ClientData([InProgress, New, New]) }, : downstairs
36711 Sep 22 23:15:04.902 INFO [0] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }
36712 Sep 22 23:15:04.902 INFO [1] received reconcile message
36713 Sep 22 23:15:04.902 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36714 Sep 22 23:15:04.902 INFO [1] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }
36715 Sep 22 23:15:04.902 INFO [2] received reconcile message
36716 Sep 22 23:15:04.902 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(63), op: ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36717 Sep 22 23:15:04.902 INFO [2] client ExtentReopen { repair_id: ReconciliationId(63), extent_id: 85 }
36718 Sep 22 23:15:04.902 DEBG 63 Reopen extent 85
36719 Sep 22 23:15:04.903 DEBG 63 Reopen extent 85
36720 Sep 22 23:15:04.904 DEBG 63 Reopen extent 85
36721 Sep 22 23:15:04.904 DEBG [2] It's time to notify for 63
36722 Sep 22 23:15:04.904 INFO Completion from [2] id:63 status:true
36723 Sep 22 23:15:04.904 INFO [64/752] Repair commands completed
36724 Sep 22 23:15:04.904 INFO Pop front: ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36725 Sep 22 23:15:04.904 INFO Sent repair work, now wait for resp
36726 Sep 22 23:15:04.904 INFO [0] received reconcile message
36727 Sep 22 23:15:04.904 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36728 Sep 22 23:15:04.904 INFO [0] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36729 Sep 22 23:15:04.904 INFO [1] received reconcile message
36730 Sep 22 23:15:04.904 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36731 Sep 22 23:15:04.905 INFO [1] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36732 Sep 22 23:15:04.905 INFO [2] received reconcile message
36733 Sep 22 23:15:04.905 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(64), op: ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36734 Sep 22 23:15:04.905 INFO [2] client ExtentFlush { repair_id: ReconciliationId(64), extent_id: 69, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36735 Sep 22 23:15:04.905 DEBG 64 Flush extent 69 with f:2 g:2
36736 Sep 22 23:15:04.905 DEBG Flush just extent 69 with f:2 and g:2
36737 Sep 22 23:15:04.905 DEBG [1] It's time to notify for 64
36738 Sep 22 23:15:04.905 INFO Completion from [1] id:64 status:true
36739 Sep 22 23:15:04.905 INFO [65/752] Repair commands completed
36740 Sep 22 23:15:04.905 INFO Pop front: ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }, state: ClientData([New, New, New]) }
36741 Sep 22 23:15:04.905 INFO Sent repair work, now wait for resp
36742 Sep 22 23:15:04.905 INFO [0] received reconcile message
36743 Sep 22 23:15:04.905 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }, state: ClientData([InProgress, New, New]) }, : downstairs
36744 Sep 22 23:15:04.905 INFO [0] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }
36745 Sep 22 23:15:04.905 INFO [1] received reconcile message
36746 Sep 22 23:15:04.905 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36747 Sep 22 23:15:04.905 INFO [1] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }
36748 Sep 22 23:15:04.905 INFO [2] received reconcile message
36749 Sep 22 23:15:04.905 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(65), op: ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36750 Sep 22 23:15:04.905 INFO [2] client ExtentClose { repair_id: ReconciliationId(65), extent_id: 69 }
36751 Sep 22 23:15:04.905 DEBG 65 Close extent 69
36752 Sep 22 23:15:04.906 DEBG 65 Close extent 69
36753 Sep 22 23:15:04.906 DEBG 65 Close extent 69
36754 Sep 22 23:15:04.906 DEBG [2] It's time to notify for 65
36755 Sep 22 23:15:04.906 INFO Completion from [2] id:65 status:true
36756 Sep 22 23:15:04.906 INFO [66/752] Repair commands completed
36757 Sep 22 23:15:04.906 INFO Pop front: ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36758 Sep 22 23:15:04.906 INFO Sent repair work, now wait for resp
36759 Sep 22 23:15:04.906 INFO [0] received reconcile message
36760 Sep 22 23:15:04.906 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36761 Sep 22 23:15:04.906 INFO [0] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36762 Sep 22 23:15:04.906 INFO [0] Sending repair request ReconciliationId(66)
36763 Sep 22 23:15:04.907 INFO [1] received reconcile message
36764 Sep 22 23:15:04.907 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36765 Sep 22 23:15:04.907 INFO [1] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36766 Sep 22 23:15:04.907 INFO [1] No action required ReconciliationId(66)
36767 Sep 22 23:15:04.907 INFO [2] received reconcile message
36768 Sep 22 23:15:04.907 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(66), op: ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36769 Sep 22 23:15:04.907 INFO [2] client ExtentRepair { repair_id: ReconciliationId(66), extent_id: 69, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36770 Sep 22 23:15:04.907 INFO [2] No action required ReconciliationId(66)
36771 Sep 22 23:15:04.907 DEBG 66 Repair extent 69 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36772 Sep 22 23:15:04.907 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/045.copy"
36773 Sep 22 23:15:04.935 DEBG up_ds_listen checked 1 jobs, back to waiting
36774 Sep 22 23:15:04.937 DEBG Flush :1073 extent_limit None deps:[JobId(1072), JobId(1071)] res:true f:26 g:1
36775 Sep 22 23:15:04.944 DEBG Read :1074 deps:[JobId(1073)] res:true
36776 Sep 22 23:15:04.965 DEBG Flush :1073 extent_limit None deps:[JobId(1072), JobId(1071)] res:true f:26 g:1
36777 Sep 22 23:15:04.968 INFO accepted connection, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36778 Sep 22 23:15:04.969 TRCE incoming request, uri: /extent/69/files, method: GET, req_id: 0c5a8386-d5ec-43a0-b378-2f0e2615f9ed, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36779 Sep 22 23:15:04.969 INFO request completed, latency_us: 252, response_code: 200, uri: /extent/69/files, method: GET, req_id: 0c5a8386-d5ec-43a0-b378-2f0e2615f9ed, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36780 Sep 22 23:15:04.969 INFO eid:69 Found repair files: ["045", "045.db"]
36781 Sep 22 23:15:04.970 TRCE incoming request, uri: /newextent/69/data, method: GET, req_id: 04162555-5f22-4431-929c-5701fd5f2b0e, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36782 Sep 22 23:15:04.970 INFO request completed, latency_us: 338, response_code: 200, uri: /newextent/69/data, method: GET, req_id: 04162555-5f22-4431-929c-5701fd5f2b0e, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36783 Sep 22 23:15:04.972 DEBG Read :1074 deps:[JobId(1073)] res:true
36784 Sep 22 23:15:04.975 TRCE incoming request, uri: /newextent/69/db, method: GET, req_id: 1b0f4554-077f-45b7-880b-d7f149841b9e, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36785 Sep 22 23:15:04.975 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/69/db, method: GET, req_id: 1b0f4554-077f-45b7-880b-d7f149841b9e, remote_addr: 127.0.0.1:41468, local_addr: 127.0.0.1:46213, task: repair
36786 Sep 22 23:15:04.977 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/045.copy" to "/tmp/downstairs-vrx8aK6L/00/000/045.replace"
36787 Sep 22 23:15:04.977 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36788 Sep 22 23:15:04.978 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/045.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36789 Sep 22 23:15:04.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/045"
36790 Sep 22 23:15:04.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/045.db"
36791 Sep 22 23:15:04.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36792 Sep 22 23:15:04.978 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/045.replace" to "/tmp/downstairs-vrx8aK6L/00/000/045.completed"
36793 Sep 22 23:15:04.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36794 Sep 22 23:15:04.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36795 Sep 22 23:15:04.978 DEBG [0] It's time to notify for 66
36796 Sep 22 23:15:04.978 INFO Completion from [0] id:66 status:true
36797 Sep 22 23:15:04.979 INFO [67/752] Repair commands completed
36798 Sep 22 23:15:04.979 INFO Pop front: ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }, state: ClientData([New, New, New]) }
36799 Sep 22 23:15:04.979 INFO Sent repair work, now wait for resp
36800 Sep 22 23:15:04.979 INFO [0] received reconcile message
36801 Sep 22 23:15:04.979 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }, state: ClientData([InProgress, New, New]) }, : downstairs
36802 Sep 22 23:15:04.979 INFO [0] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }
36803 Sep 22 23:15:04.979 INFO [1] received reconcile message
36804 Sep 22 23:15:04.979 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36805 Sep 22 23:15:04.979 INFO [1] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }
36806 Sep 22 23:15:04.979 INFO [2] received reconcile message
36807 Sep 22 23:15:04.979 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(67), op: ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36808 Sep 22 23:15:04.979 INFO [2] client ExtentReopen { repair_id: ReconciliationId(67), extent_id: 69 }
36809 Sep 22 23:15:04.979 DEBG 67 Reopen extent 69
36810 Sep 22 23:15:04.980 DEBG 67 Reopen extent 69
36811 Sep 22 23:15:04.980 DEBG 67 Reopen extent 69
36812 Sep 22 23:15:04.981 DEBG [2] It's time to notify for 67
36813 Sep 22 23:15:04.981 INFO Completion from [2] id:67 status:true
36814 Sep 22 23:15:04.981 INFO [68/752] Repair commands completed
36815 Sep 22 23:15:04.981 INFO Pop front: ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36816 Sep 22 23:15:04.981 INFO Sent repair work, now wait for resp
36817 Sep 22 23:15:04.981 INFO [0] received reconcile message
36818 Sep 22 23:15:04.981 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36819 Sep 22 23:15:04.981 INFO [0] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36820 Sep 22 23:15:04.981 INFO [1] received reconcile message
36821 Sep 22 23:15:04.981 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36822 Sep 22 23:15:04.981 INFO [1] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36823 Sep 22 23:15:04.981 INFO [2] received reconcile message
36824 Sep 22 23:15:04.981 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(68), op: ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36825 Sep 22 23:15:04.981 INFO [2] client ExtentFlush { repair_id: ReconciliationId(68), extent_id: 102, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36826 Sep 22 23:15:04.981 DEBG 68 Flush extent 102 with f:2 g:2
36827 Sep 22 23:15:04.981 DEBG Flush just extent 102 with f:2 and g:2
36828 Sep 22 23:15:04.981 DEBG [1] It's time to notify for 68
36829 Sep 22 23:15:04.981 INFO Completion from [1] id:68 status:true
36830 Sep 22 23:15:04.981 INFO [69/752] Repair commands completed
36831 Sep 22 23:15:04.981 INFO Pop front: ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }, state: ClientData([New, New, New]) }
36832 Sep 22 23:15:04.982 INFO Sent repair work, now wait for resp
36833 Sep 22 23:15:04.982 INFO [0] received reconcile message
36834 Sep 22 23:15:04.982 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }, state: ClientData([InProgress, New, New]) }, : downstairs
36835 Sep 22 23:15:04.982 INFO [0] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }
36836 Sep 22 23:15:04.982 INFO [1] received reconcile message
36837 Sep 22 23:15:04.982 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36838 Sep 22 23:15:04.982 INFO [1] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }
36839 Sep 22 23:15:04.982 INFO [2] received reconcile message
36840 Sep 22 23:15:04.982 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(69), op: ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36841 Sep 22 23:15:04.982 INFO [2] client ExtentClose { repair_id: ReconciliationId(69), extent_id: 102 }
36842 Sep 22 23:15:04.982 DEBG 69 Close extent 102
36843 Sep 22 23:15:04.982 DEBG 69 Close extent 102
36844 Sep 22 23:15:04.982 DEBG 69 Close extent 102
36845 Sep 22 23:15:04.983 DEBG [2] It's time to notify for 69
36846 Sep 22 23:15:04.983 INFO Completion from [2] id:69 status:true
36847 Sep 22 23:15:04.983 INFO [70/752] Repair commands completed
36848 Sep 22 23:15:04.983 INFO Pop front: ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36849 Sep 22 23:15:04.983 INFO Sent repair work, now wait for resp
36850 Sep 22 23:15:04.983 INFO [0] received reconcile message
36851 Sep 22 23:15:04.983 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36852 Sep 22 23:15:04.983 INFO [0] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36853 Sep 22 23:15:04.983 INFO [0] Sending repair request ReconciliationId(70)
36854 Sep 22 23:15:04.983 INFO [1] received reconcile message
36855 Sep 22 23:15:04.983 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36856 Sep 22 23:15:04.983 INFO [1] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36857 Sep 22 23:15:04.983 INFO [1] No action required ReconciliationId(70)
36858 Sep 22 23:15:04.983 INFO [2] received reconcile message
36859 Sep 22 23:15:04.983 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(70), op: ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36860 Sep 22 23:15:04.983 INFO [2] client ExtentRepair { repair_id: ReconciliationId(70), extent_id: 102, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36861 Sep 22 23:15:04.983 INFO [2] No action required ReconciliationId(70)
36862 Sep 22 23:15:04.983 DEBG 70 Repair extent 102 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36863 Sep 22 23:15:04.983 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/066.copy"
36864 Sep 22 23:15:05.043 DEBG IO Read 1076 has deps [JobId(1075)]
36865 Sep 22 23:15:05.047 INFO accepted connection, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36866 Sep 22 23:15:05.047 TRCE incoming request, uri: /extent/102/files, method: GET, req_id: 9e739f80-0aa4-4c8e-a439-bd1f1f37211f, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36867 Sep 22 23:15:05.047 INFO request completed, latency_us: 264, response_code: 200, uri: /extent/102/files, method: GET, req_id: 9e739f80-0aa4-4c8e-a439-bd1f1f37211f, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36868 Sep 22 23:15:05.048 INFO eid:102 Found repair files: ["066", "066.db"]
36869 Sep 22 23:15:05.048 TRCE incoming request, uri: /newextent/102/data, method: GET, req_id: 0350ac0c-f7e0-4c41-ac6e-3793c41bcbd6, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36870 Sep 22 23:15:05.048 INFO request completed, latency_us: 362, response_code: 200, uri: /newextent/102/data, method: GET, req_id: 0350ac0c-f7e0-4c41-ac6e-3793c41bcbd6, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36871 Sep 22 23:15:05.053 TRCE incoming request, uri: /newextent/102/db, method: GET, req_id: b149eef7-eab5-4476-8d1d-4f4682848eaf, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36872 Sep 22 23:15:05.054 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/102/db, method: GET, req_id: b149eef7-eab5-4476-8d1d-4f4682848eaf, remote_addr: 127.0.0.1:33826, local_addr: 127.0.0.1:46213, task: repair
36873 Sep 22 23:15:05.055 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/066.copy" to "/tmp/downstairs-vrx8aK6L/00/000/066.replace"
36874 Sep 22 23:15:05.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36875 Sep 22 23:15:05.056 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/066.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36876 Sep 22 23:15:05.056 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/066"
36877 Sep 22 23:15:05.056 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/066.db"
36878 Sep 22 23:15:05.056 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36879 Sep 22 23:15:05.056 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/066.replace" to "/tmp/downstairs-vrx8aK6L/00/000/066.completed"
36880 Sep 22 23:15:05.056 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36881 Sep 22 23:15:05.057 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36882 Sep 22 23:15:05.057 DEBG [0] It's time to notify for 70
36883 Sep 22 23:15:05.057 INFO Completion from [0] id:70 status:true
36884 Sep 22 23:15:05.057 INFO [71/752] Repair commands completed
36885 Sep 22 23:15:05.057 INFO Pop front: ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }, state: ClientData([New, New, New]) }
36886 Sep 22 23:15:05.057 INFO Sent repair work, now wait for resp
36887 Sep 22 23:15:05.057 INFO [0] received reconcile message
36888 Sep 22 23:15:05.057 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }, state: ClientData([InProgress, New, New]) }, : downstairs
36889 Sep 22 23:15:05.057 INFO [0] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }
36890 Sep 22 23:15:05.057 INFO [1] received reconcile message
36891 Sep 22 23:15:05.057 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36892 Sep 22 23:15:05.057 INFO [1] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }
36893 Sep 22 23:15:05.057 INFO [2] received reconcile message
36894 Sep 22 23:15:05.057 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(71), op: ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36895 Sep 22 23:15:05.057 INFO [2] client ExtentReopen { repair_id: ReconciliationId(71), extent_id: 102 }
36896 Sep 22 23:15:05.058 DEBG 71 Reopen extent 102
36897 Sep 22 23:15:05.059 DEBG 71 Reopen extent 102
36898 Sep 22 23:15:05.059 DEBG 71 Reopen extent 102
36899 Sep 22 23:15:05.060 DEBG [2] It's time to notify for 71
36900 Sep 22 23:15:05.060 INFO Completion from [2] id:71 status:true
36901 Sep 22 23:15:05.060 INFO [72/752] Repair commands completed
36902 Sep 22 23:15:05.060 INFO Pop front: ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36903 Sep 22 23:15:05.060 INFO Sent repair work, now wait for resp
36904 Sep 22 23:15:05.060 INFO [0] received reconcile message
36905 Sep 22 23:15:05.060 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36906 Sep 22 23:15:05.060 INFO [0] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36907 Sep 22 23:15:05.060 INFO [1] received reconcile message
36908 Sep 22 23:15:05.060 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36909 Sep 22 23:15:05.060 INFO [1] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36910 Sep 22 23:15:05.061 INFO [2] received reconcile message
36911 Sep 22 23:15:05.061 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(72), op: ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36912 Sep 22 23:15:05.061 INFO [2] client ExtentFlush { repair_id: ReconciliationId(72), extent_id: 73, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36913 Sep 22 23:15:05.061 DEBG 72 Flush extent 73 with f:2 g:2
36914 Sep 22 23:15:05.061 DEBG Flush just extent 73 with f:2 and g:2
36915 Sep 22 23:15:05.061 DEBG [1] It's time to notify for 72
36916 Sep 22 23:15:05.061 INFO Completion from [1] id:72 status:true
36917 Sep 22 23:15:05.061 INFO [73/752] Repair commands completed
36918 Sep 22 23:15:05.061 INFO Pop front: ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }, state: ClientData([New, New, New]) }
36919 Sep 22 23:15:05.061 INFO Sent repair work, now wait for resp
36920 Sep 22 23:15:05.061 INFO [0] received reconcile message
36921 Sep 22 23:15:05.061 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }, state: ClientData([InProgress, New, New]) }, : downstairs
36922 Sep 22 23:15:05.061 INFO [0] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }
36923 Sep 22 23:15:05.061 INFO [1] received reconcile message
36924 Sep 22 23:15:05.061 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36925 Sep 22 23:15:05.061 INFO [1] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }
36926 Sep 22 23:15:05.061 INFO [2] received reconcile message
36927 Sep 22 23:15:05.061 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(73), op: ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36928 Sep 22 23:15:05.061 INFO [2] client ExtentClose { repair_id: ReconciliationId(73), extent_id: 73 }
36929 Sep 22 23:15:05.062 DEBG 73 Close extent 73
36930 Sep 22 23:15:05.062 DEBG 73 Close extent 73
36931 Sep 22 23:15:05.062 DEBG 73 Close extent 73
36932 Sep 22 23:15:05.063 DEBG [2] It's time to notify for 73
36933 Sep 22 23:15:05.063 INFO Completion from [2] id:73 status:true
36934 Sep 22 23:15:05.063 INFO [74/752] Repair commands completed
36935 Sep 22 23:15:05.063 INFO Pop front: ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
36936 Sep 22 23:15:05.063 INFO Sent repair work, now wait for resp
36937 Sep 22 23:15:05.063 INFO [0] received reconcile message
36938 Sep 22 23:15:05.063 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
36939 Sep 22 23:15:05.063 INFO [0] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36940 Sep 22 23:15:05.063 INFO [0] Sending repair request ReconciliationId(74)
36941 Sep 22 23:15:05.063 INFO [1] received reconcile message
36942 Sep 22 23:15:05.063 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36943 Sep 22 23:15:05.063 INFO [1] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36944 Sep 22 23:15:05.063 INFO [1] No action required ReconciliationId(74)
36945 Sep 22 23:15:05.063 INFO [2] received reconcile message
36946 Sep 22 23:15:05.063 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(74), op: ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
36947 Sep 22 23:15:05.063 INFO [2] client ExtentRepair { repair_id: ReconciliationId(74), extent_id: 73, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
36948 Sep 22 23:15:05.063 INFO [2] No action required ReconciliationId(74)
36949 Sep 22 23:15:05.063 DEBG 74 Repair extent 73 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
36950 Sep 22 23:15:05.064 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/049.copy"
36951 Sep 22 23:15:05.129 INFO accepted connection, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36952 Sep 22 23:15:05.129 TRCE incoming request, uri: /extent/73/files, method: GET, req_id: b4559a33-b140-4b58-9426-f7bc6a18adce, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36953 Sep 22 23:15:05.130 INFO request completed, latency_us: 252, response_code: 200, uri: /extent/73/files, method: GET, req_id: b4559a33-b140-4b58-9426-f7bc6a18adce, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36954 Sep 22 23:15:05.130 INFO eid:73 Found repair files: ["049", "049.db"]
36955 Sep 22 23:15:05.130 TRCE incoming request, uri: /newextent/73/data, method: GET, req_id: ce523846-f7da-47d9-9193-ae9526ee370c, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36956 Sep 22 23:15:05.131 INFO request completed, latency_us: 360, response_code: 200, uri: /newextent/73/data, method: GET, req_id: ce523846-f7da-47d9-9193-ae9526ee370c, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36957 Sep 22 23:15:05.136 TRCE incoming request, uri: /newextent/73/db, method: GET, req_id: dada99a2-3a20-419e-8109-443c2aaffcf2, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36958 Sep 22 23:15:05.136 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/73/db, method: GET, req_id: dada99a2-3a20-419e-8109-443c2aaffcf2, remote_addr: 127.0.0.1:36308, local_addr: 127.0.0.1:46213, task: repair
36959 Sep 22 23:15:05.137 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/049.copy" to "/tmp/downstairs-vrx8aK6L/00/000/049.replace"
36960 Sep 22 23:15:05.137 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36961 Sep 22 23:15:05.138 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/049.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
36962 Sep 22 23:15:05.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/049"
36963 Sep 22 23:15:05.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/049.db"
36964 Sep 22 23:15:05.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36965 Sep 22 23:15:05.139 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/049.replace" to "/tmp/downstairs-vrx8aK6L/00/000/049.completed"
36966 Sep 22 23:15:05.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36967 Sep 22 23:15:05.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
36968 Sep 22 23:15:05.139 DEBG [0] It's time to notify for 74
36969 Sep 22 23:15:05.139 INFO Completion from [0] id:74 status:true
36970 Sep 22 23:15:05.139 INFO [75/752] Repair commands completed
36971 Sep 22 23:15:05.139 INFO Pop front: ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }, state: ClientData([New, New, New]) }
36972 Sep 22 23:15:05.139 INFO Sent repair work, now wait for resp
36973 Sep 22 23:15:05.139 INFO [0] received reconcile message
36974 Sep 22 23:15:05.139 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }, state: ClientData([InProgress, New, New]) }, : downstairs
36975 Sep 22 23:15:05.139 INFO [0] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }
36976 Sep 22 23:15:05.139 INFO [1] received reconcile message
36977 Sep 22 23:15:05.139 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
36978 Sep 22 23:15:05.139 INFO [1] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }
36979 Sep 22 23:15:05.139 INFO [2] received reconcile message
36980 Sep 22 23:15:05.139 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(75), op: ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
36981 Sep 22 23:15:05.139 INFO [2] client ExtentReopen { repair_id: ReconciliationId(75), extent_id: 73 }
36982 Sep 22 23:15:05.140 DEBG 75 Reopen extent 73
36983 Sep 22 23:15:05.140 DEBG 75 Reopen extent 73
36984 Sep 22 23:15:05.141 DEBG 75 Reopen extent 73
36985 Sep 22 23:15:05.141 DEBG [2] It's time to notify for 75
36986 Sep 22 23:15:05.141 INFO Completion from [2] id:75 status:true
36987 Sep 22 23:15:05.141 INFO [76/752] Repair commands completed
36988 Sep 22 23:15:05.141 INFO Pop front: ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
36989 Sep 22 23:15:05.141 INFO Sent repair work, now wait for resp
36990 Sep 22 23:15:05.141 INFO [0] received reconcile message
36991 Sep 22 23:15:05.142 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
36992 Sep 22 23:15:05.142 INFO [0] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36993 Sep 22 23:15:05.142 INFO [1] received reconcile message
36994 Sep 22 23:15:05.142 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
36995 Sep 22 23:15:05.142 INFO [1] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36996 Sep 22 23:15:05.142 INFO [2] received reconcile message
36997 Sep 22 23:15:05.142 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(76), op: ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
36998 Sep 22 23:15:05.142 INFO [2] client ExtentFlush { repair_id: ReconciliationId(76), extent_id: 76, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
36999 Sep 22 23:15:05.142 DEBG 76 Flush extent 76 with f:2 g:2
37000 Sep 22 23:15:05.142 DEBG Flush just extent 76 with f:2 and g:2
37001 Sep 22 23:15:05.142 DEBG [1] It's time to notify for 76
37002 Sep 22 23:15:05.142 INFO Completion from [1] id:76 status:true
37003 Sep 22 23:15:05.142 INFO [77/752] Repair commands completed
37004 Sep 22 23:15:05.142 INFO Pop front: ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }, state: ClientData([New, New, New]) }
37005 Sep 22 23:15:05.142 INFO Sent repair work, now wait for resp
37006 Sep 22 23:15:05.142 INFO [0] received reconcile message
37007 Sep 22 23:15:05.142 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }, state: ClientData([InProgress, New, New]) }, : downstairs
37008 Sep 22 23:15:05.142 INFO [0] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }
37009 Sep 22 23:15:05.142 INFO [1] received reconcile message
37010 Sep 22 23:15:05.142 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37011 Sep 22 23:15:05.142 INFO [1] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }
37012 Sep 22 23:15:05.142 INFO [2] received reconcile message
37013 Sep 22 23:15:05.142 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(77), op: ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37014 Sep 22 23:15:05.142 INFO [2] client ExtentClose { repair_id: ReconciliationId(77), extent_id: 76 }
37015 Sep 22 23:15:05.142 DEBG 77 Close extent 76
37016 Sep 22 23:15:05.143 DEBG 77 Close extent 76
37017 Sep 22 23:15:05.143 DEBG 77 Close extent 76
37018 Sep 22 23:15:05.143 DEBG [2] It's time to notify for 77
37019 Sep 22 23:15:05.143 INFO Completion from [2] id:77 status:true
37020 Sep 22 23:15:05.143 INFO [78/752] Repair commands completed
37021 Sep 22 23:15:05.143 INFO Pop front: ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37022 Sep 22 23:15:05.143 INFO Sent repair work, now wait for resp
37023 Sep 22 23:15:05.143 INFO [0] received reconcile message
37024 Sep 22 23:15:05.143 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37025 Sep 22 23:15:05.143 INFO [0] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37026 Sep 22 23:15:05.144 INFO [0] Sending repair request ReconciliationId(78)
37027 Sep 22 23:15:05.144 INFO [1] received reconcile message
37028 Sep 22 23:15:05.144 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37029 Sep 22 23:15:05.144 INFO [1] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37030 Sep 22 23:15:05.144 INFO [1] No action required ReconciliationId(78)
37031 Sep 22 23:15:05.144 INFO [2] received reconcile message
37032 Sep 22 23:15:05.144 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(78), op: ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37033 Sep 22 23:15:05.144 INFO [2] client ExtentRepair { repair_id: ReconciliationId(78), extent_id: 76, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37034 Sep 22 23:15:05.144 INFO [2] No action required ReconciliationId(78)
37035 Sep 22 23:15:05.144 DEBG 78 Repair extent 76 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37036 Sep 22 23:15:05.144 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04C.copy"
37037 Sep 22 23:15:05.206 INFO accepted connection, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37038 Sep 22 23:15:05.206 TRCE incoming request, uri: /extent/76/files, method: GET, req_id: 6525aa45-7052-4397-8cc6-4c6d18f5d0a7, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37039 Sep 22 23:15:05.206 INFO request completed, latency_us: 222, response_code: 200, uri: /extent/76/files, method: GET, req_id: 6525aa45-7052-4397-8cc6-4c6d18f5d0a7, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37040 Sep 22 23:15:05.206 INFO eid:76 Found repair files: ["04C", "04C.db"]
37041 Sep 22 23:15:05.207 TRCE incoming request, uri: /newextent/76/data, method: GET, req_id: 18772ffb-896a-4f08-a622-d072dbb2e496, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37042 Sep 22 23:15:05.207 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/76/data, method: GET, req_id: 18772ffb-896a-4f08-a622-d072dbb2e496, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37043 Sep 22 23:15:05.212 TRCE incoming request, uri: /newextent/76/db, method: GET, req_id: 0172b873-710f-4c6c-8454-8d48192f48a5, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37044 Sep 22 23:15:05.213 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/76/db, method: GET, req_id: 0172b873-710f-4c6c-8454-8d48192f48a5, remote_addr: 127.0.0.1:49240, local_addr: 127.0.0.1:46213, task: repair
37045 Sep 22 23:15:05.214 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04C.replace"
37046 Sep 22 23:15:05.214 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37047 Sep 22 23:15:05.215 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37048 Sep 22 23:15:05.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04C"
37049 Sep 22 23:15:05.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04C.db"
37050 Sep 22 23:15:05.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37051 Sep 22 23:15:05.215 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04C.completed"
37052 Sep 22 23:15:05.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37053 Sep 22 23:15:05.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37054 Sep 22 23:15:05.215 DEBG [0] It's time to notify for 78
37055 Sep 22 23:15:05.215 INFO Completion from [0] id:78 status:true
37056 Sep 22 23:15:05.216 INFO [79/752] Repair commands completed
37057 Sep 22 23:15:05.216 INFO Pop front: ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }, state: ClientData([New, New, New]) }
37058 Sep 22 23:15:05.216 INFO Sent repair work, now wait for resp
37059 Sep 22 23:15:05.216 INFO [0] received reconcile message
37060 Sep 22 23:15:05.216 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }, state: ClientData([InProgress, New, New]) }, : downstairs
37061 Sep 22 23:15:05.216 INFO [0] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }
37062 Sep 22 23:15:05.216 INFO [1] received reconcile message
37063 Sep 22 23:15:05.216 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37064 Sep 22 23:15:05.216 INFO [1] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }
37065 Sep 22 23:15:05.216 INFO [2] received reconcile message
37066 Sep 22 23:15:05.216 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(79), op: ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37067 Sep 22 23:15:05.216 INFO [2] client ExtentReopen { repair_id: ReconciliationId(79), extent_id: 76 }
37068 Sep 22 23:15:05.216 DEBG 79 Reopen extent 76
37069 Sep 22 23:15:05.217 DEBG 79 Reopen extent 76
37070 Sep 22 23:15:05.217 DEBG 79 Reopen extent 76
37071 Sep 22 23:15:05.218 DEBG [2] It's time to notify for 79
37072 Sep 22 23:15:05.218 INFO Completion from [2] id:79 status:true
37073 Sep 22 23:15:05.218 INFO [80/752] Repair commands completed
37074 Sep 22 23:15:05.218 INFO Pop front: ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37075 Sep 22 23:15:05.218 INFO Sent repair work, now wait for resp
37076 Sep 22 23:15:05.218 INFO [0] received reconcile message
37077 Sep 22 23:15:05.218 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37078 Sep 22 23:15:05.218 INFO [0] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37079 Sep 22 23:15:05.218 INFO [1] received reconcile message
37080 Sep 22 23:15:05.218 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37081 Sep 22 23:15:05.218 INFO [1] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37082 Sep 22 23:15:05.218 INFO [2] received reconcile message
37083 Sep 22 23:15:05.218 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(80), op: ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37084 Sep 22 23:15:05.218 INFO [2] client ExtentFlush { repair_id: ReconciliationId(80), extent_id: 98, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37085 Sep 22 23:15:05.218 DEBG 80 Flush extent 98 with f:2 g:2
37086 Sep 22 23:15:05.218 DEBG Flush just extent 98 with f:2 and g:2
37087 Sep 22 23:15:05.218 DEBG [1] It's time to notify for 80
37088 Sep 22 23:15:05.218 INFO Completion from [1] id:80 status:true
37089 Sep 22 23:15:05.218 INFO [81/752] Repair commands completed
37090 Sep 22 23:15:05.218 INFO Pop front: ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }, state: ClientData([New, New, New]) }
37091 Sep 22 23:15:05.218 INFO Sent repair work, now wait for resp
37092 Sep 22 23:15:05.218 INFO [0] received reconcile message
37093 Sep 22 23:15:05.218 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }, state: ClientData([InProgress, New, New]) }, : downstairs
37094 Sep 22 23:15:05.218 INFO [0] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }
37095 Sep 22 23:15:05.218 INFO [1] received reconcile message
37096 Sep 22 23:15:05.219 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37097 Sep 22 23:15:05.219 INFO [1] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }
37098 Sep 22 23:15:05.219 INFO [2] received reconcile message
37099 Sep 22 23:15:05.219 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(81), op: ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37100 Sep 22 23:15:05.219 INFO [2] client ExtentClose { repair_id: ReconciliationId(81), extent_id: 98 }
37101 Sep 22 23:15:05.219 DEBG 81 Close extent 98
37102 Sep 22 23:15:05.219 DEBG 81 Close extent 98
37103 Sep 22 23:15:05.219 DEBG 81 Close extent 98
37104 Sep 22 23:15:05.220 DEBG [2] It's time to notify for 81
37105 Sep 22 23:15:05.220 INFO Completion from [2] id:81 status:true
37106 Sep 22 23:15:05.220 INFO [82/752] Repair commands completed
37107 Sep 22 23:15:05.220 INFO Pop front: ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37108 Sep 22 23:15:05.220 INFO Sent repair work, now wait for resp
37109 Sep 22 23:15:05.220 INFO [0] received reconcile message
37110 Sep 22 23:15:05.220 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37111 Sep 22 23:15:05.220 INFO [0] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37112 Sep 22 23:15:05.220 INFO [0] Sending repair request ReconciliationId(82)
37113 Sep 22 23:15:05.220 INFO [1] received reconcile message
37114 Sep 22 23:15:05.220 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37115 Sep 22 23:15:05.220 INFO [1] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37116 Sep 22 23:15:05.220 INFO [1] No action required ReconciliationId(82)
37117 Sep 22 23:15:05.220 INFO [2] received reconcile message
37118 Sep 22 23:15:05.220 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(82), op: ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37119 Sep 22 23:15:05.220 INFO [2] client ExtentRepair { repair_id: ReconciliationId(82), extent_id: 98, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37120 Sep 22 23:15:05.220 INFO [2] No action required ReconciliationId(82)
37121 Sep 22 23:15:05.220 DEBG 82 Repair extent 98 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37122 Sep 22 23:15:05.220 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/062.copy"
37123 Sep 22 23:15:05.283 INFO accepted connection, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37124 Sep 22 23:15:05.283 TRCE incoming request, uri: /extent/98/files, method: GET, req_id: e709a5f2-eab0-444e-bfa2-0473c5efe0d2, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37125 Sep 22 23:15:05.283 INFO request completed, latency_us: 219, response_code: 200, uri: /extent/98/files, method: GET, req_id: e709a5f2-eab0-444e-bfa2-0473c5efe0d2, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37126 Sep 22 23:15:05.283 INFO eid:98 Found repair files: ["062", "062.db"]
37127 Sep 22 23:15:05.284 TRCE incoming request, uri: /newextent/98/data, method: GET, req_id: 8ed1c96b-b133-4d9a-bb38-104a681dc5ef, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37128 Sep 22 23:15:05.284 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/98/data, method: GET, req_id: 8ed1c96b-b133-4d9a-bb38-104a681dc5ef, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37129 Sep 22 23:15:05.289 TRCE incoming request, uri: /newextent/98/db, method: GET, req_id: 59d19b04-69b0-4ac4-ab4c-1439fc6749c9, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37130 Sep 22 23:15:05.290 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/98/db, method: GET, req_id: 59d19b04-69b0-4ac4-ab4c-1439fc6749c9, remote_addr: 127.0.0.1:35298, local_addr: 127.0.0.1:46213, task: repair
37131 Sep 22 23:15:05.291 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/062.copy" to "/tmp/downstairs-vrx8aK6L/00/000/062.replace"
37132 Sep 22 23:15:05.291 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37133 Sep 22 23:15:05.292 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/062.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37134 Sep 22 23:15:05.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/062"
37135 Sep 22 23:15:05.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/062.db"
37136 Sep 22 23:15:05.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37137 Sep 22 23:15:05.292 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/062.replace" to "/tmp/downstairs-vrx8aK6L/00/000/062.completed"
37138 Sep 22 23:15:05.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37139 Sep 22 23:15:05.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37140 Sep 22 23:15:05.292 DEBG [0] It's time to notify for 82
37141 Sep 22 23:15:05.292 INFO Completion from [0] id:82 status:true
37142 Sep 22 23:15:05.292 INFO [83/752] Repair commands completed
37143 Sep 22 23:15:05.293 INFO Pop front: ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }, state: ClientData([New, New, New]) }
37144 Sep 22 23:15:05.293 INFO Sent repair work, now wait for resp
37145 Sep 22 23:15:05.293 INFO [0] received reconcile message
37146 Sep 22 23:15:05.293 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }, state: ClientData([InProgress, New, New]) }, : downstairs
37147 Sep 22 23:15:05.293 INFO [0] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }
37148 Sep 22 23:15:05.293 INFO [1] received reconcile message
37149 Sep 22 23:15:05.293 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37150 Sep 22 23:15:05.293 INFO [1] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }
37151 Sep 22 23:15:05.293 INFO [2] received reconcile message
37152 Sep 22 23:15:05.293 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(83), op: ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37153 Sep 22 23:15:05.293 INFO [2] client ExtentReopen { repair_id: ReconciliationId(83), extent_id: 98 }
37154 Sep 22 23:15:05.293 DEBG 83 Reopen extent 98
37155 Sep 22 23:15:05.294 DEBG 83 Reopen extent 98
37156 Sep 22 23:15:05.294 DEBG 83 Reopen extent 98
37157 Sep 22 23:15:05.295 DEBG [2] It's time to notify for 83
37158 Sep 22 23:15:05.295 INFO Completion from [2] id:83 status:true
37159 Sep 22 23:15:05.295 INFO [84/752] Repair commands completed
37160 Sep 22 23:15:05.295 INFO Pop front: ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37161 Sep 22 23:15:05.295 INFO Sent repair work, now wait for resp
37162 Sep 22 23:15:05.295 INFO [0] received reconcile message
37163 Sep 22 23:15:05.295 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37164 Sep 22 23:15:05.295 INFO [0] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37165 Sep 22 23:15:05.295 INFO [1] received reconcile message
37166 Sep 22 23:15:05.295 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37167 Sep 22 23:15:05.295 INFO [1] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37168 Sep 22 23:15:05.295 INFO [2] received reconcile message
37169 Sep 22 23:15:05.295 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(84), op: ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37170 Sep 22 23:15:05.295 INFO [2] client ExtentFlush { repair_id: ReconciliationId(84), extent_id: 65, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37171 Sep 22 23:15:05.295 DEBG 84 Flush extent 65 with f:2 g:2
37172 Sep 22 23:15:05.295 DEBG Flush just extent 65 with f:2 and g:2
37173 Sep 22 23:15:05.295 DEBG [1] It's time to notify for 84
37174 Sep 22 23:15:05.295 INFO Completion from [1] id:84 status:true
37175 Sep 22 23:15:05.295 INFO [85/752] Repair commands completed
37176 Sep 22 23:15:05.295 INFO Pop front: ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }, state: ClientData([New, New, New]) }
37177 Sep 22 23:15:05.295 INFO Sent repair work, now wait for resp
37178 Sep 22 23:15:05.295 INFO [0] received reconcile message
37179 Sep 22 23:15:05.295 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }, state: ClientData([InProgress, New, New]) }, : downstairs
37180 Sep 22 23:15:05.295 INFO [0] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }
37181 Sep 22 23:15:05.295 INFO [1] received reconcile message
37182 Sep 22 23:15:05.295 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37183 Sep 22 23:15:05.296 INFO [1] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }
37184 Sep 22 23:15:05.296 INFO [2] received reconcile message
37185 Sep 22 23:15:05.296 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(85), op: ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37186 Sep 22 23:15:05.296 INFO [2] client ExtentClose { repair_id: ReconciliationId(85), extent_id: 65 }
37187 Sep 22 23:15:05.296 DEBG 85 Close extent 65
37188 Sep 22 23:15:05.296 DEBG 85 Close extent 65
37189 Sep 22 23:15:05.296 DEBG 85 Close extent 65
37190 Sep 22 23:15:05.297 DEBG [2] It's time to notify for 85
37191 Sep 22 23:15:05.297 INFO Completion from [2] id:85 status:true
37192 Sep 22 23:15:05.297 INFO [86/752] Repair commands completed
37193 Sep 22 23:15:05.297 INFO Pop front: ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37194 Sep 22 23:15:05.297 INFO Sent repair work, now wait for resp
37195 Sep 22 23:15:05.297 INFO [0] received reconcile message
37196 Sep 22 23:15:05.297 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37197 Sep 22 23:15:05.297 INFO [0] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37198 Sep 22 23:15:05.297 INFO [0] Sending repair request ReconciliationId(86)
37199 Sep 22 23:15:05.297 INFO [1] received reconcile message
37200 Sep 22 23:15:05.297 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37201 Sep 22 23:15:05.297 INFO [1] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37202 Sep 22 23:15:05.297 INFO [1] No action required ReconciliationId(86)
37203 Sep 22 23:15:05.297 INFO [2] received reconcile message
37204 Sep 22 23:15:05.297 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(86), op: ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37205 Sep 22 23:15:05.297 INFO [2] client ExtentRepair { repair_id: ReconciliationId(86), extent_id: 65, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37206 Sep 22 23:15:05.297 INFO [2] No action required ReconciliationId(86)
37207 Sep 22 23:15:05.297 DEBG 86 Repair extent 65 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37208 Sep 22 23:15:05.297 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/041.copy"
37209 Sep 22 23:15:05.360 INFO accepted connection, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37210 Sep 22 23:15:05.360 TRCE incoming request, uri: /extent/65/files, method: GET, req_id: 9b156b1c-7db5-41f5-9b30-015bbd1e6a61, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37211 Sep 22 23:15:05.360 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/65/files, method: GET, req_id: 9b156b1c-7db5-41f5-9b30-015bbd1e6a61, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37212 Sep 22 23:15:05.360 INFO eid:65 Found repair files: ["041", "041.db"]
37213 Sep 22 23:15:05.361 TRCE incoming request, uri: /newextent/65/data, method: GET, req_id: 40cab528-10b3-486c-9b74-258628ea0a15, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37214 Sep 22 23:15:05.361 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/65/data, method: GET, req_id: 40cab528-10b3-486c-9b74-258628ea0a15, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37215 Sep 22 23:15:05.366 TRCE incoming request, uri: /newextent/65/db, method: GET, req_id: f2b6d242-86da-4686-acb1-daadfdbe53ac, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37216 Sep 22 23:15:05.367 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/65/db, method: GET, req_id: f2b6d242-86da-4686-acb1-daadfdbe53ac, remote_addr: 127.0.0.1:60292, local_addr: 127.0.0.1:46213, task: repair
37217 Sep 22 23:15:05.368 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/041.copy" to "/tmp/downstairs-vrx8aK6L/00/000/041.replace"
37218 Sep 22 23:15:05.368 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37219 Sep 22 23:15:05.369 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/041.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37220 Sep 22 23:15:05.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/041"
37221 Sep 22 23:15:05.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/041.db"
37222 Sep 22 23:15:05.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37223 Sep 22 23:15:05.369 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/041.replace" to "/tmp/downstairs-vrx8aK6L/00/000/041.completed"
37224 Sep 22 23:15:05.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37225 Sep 22 23:15:05.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37226 Sep 22 23:15:05.369 DEBG [0] It's time to notify for 86
37227 Sep 22 23:15:05.369 INFO Completion from [0] id:86 status:true
37228 Sep 22 23:15:05.369 INFO [87/752] Repair commands completed
37229 Sep 22 23:15:05.369 INFO Pop front: ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }, state: ClientData([New, New, New]) }
37230 Sep 22 23:15:05.369 INFO Sent repair work, now wait for resp
37231 Sep 22 23:15:05.369 INFO [0] received reconcile message
37232 Sep 22 23:15:05.369 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }, state: ClientData([InProgress, New, New]) }, : downstairs
37233 Sep 22 23:15:05.369 INFO [0] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }
37234 Sep 22 23:15:05.369 INFO [1] received reconcile message
37235 Sep 22 23:15:05.370 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37236 Sep 22 23:15:05.370 INFO [1] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }
37237 Sep 22 23:15:05.370 INFO [2] received reconcile message
37238 Sep 22 23:15:05.370 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(87), op: ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37239 Sep 22 23:15:05.370 INFO [2] client ExtentReopen { repair_id: ReconciliationId(87), extent_id: 65 }
37240 Sep 22 23:15:05.370 DEBG 87 Reopen extent 65
37241 Sep 22 23:15:05.370 DEBG 87 Reopen extent 65
37242 Sep 22 23:15:05.371 DEBG 87 Reopen extent 65
37243 Sep 22 23:15:05.371 DEBG [2] It's time to notify for 87
37244 Sep 22 23:15:05.371 INFO Completion from [2] id:87 status:true
37245 Sep 22 23:15:05.371 INFO [88/752] Repair commands completed
37246 Sep 22 23:15:05.371 INFO Pop front: ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37247 Sep 22 23:15:05.371 INFO Sent repair work, now wait for resp
37248 Sep 22 23:15:05.372 INFO [0] received reconcile message
37249 Sep 22 23:15:05.372 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37250 Sep 22 23:15:05.372 INFO [0] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37251 Sep 22 23:15:05.372 INFO [1] received reconcile message
37252 Sep 22 23:15:05.372 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37253 Sep 22 23:15:05.372 INFO [1] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37254 Sep 22 23:15:05.372 INFO [2] received reconcile message
37255 Sep 22 23:15:05.372 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(88), op: ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37256 Sep 22 23:15:05.372 INFO [2] client ExtentFlush { repair_id: ReconciliationId(88), extent_id: 45, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37257 Sep 22 23:15:05.372 DEBG 88 Flush extent 45 with f:2 g:2
37258 Sep 22 23:15:05.372 DEBG Flush just extent 45 with f:2 and g:2
37259 Sep 22 23:15:05.372 DEBG [1] It's time to notify for 88
37260 Sep 22 23:15:05.372 INFO Completion from [1] id:88 status:true
37261 Sep 22 23:15:05.372 INFO [89/752] Repair commands completed
37262 Sep 22 23:15:05.372 INFO Pop front: ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }, state: ClientData([New, New, New]) }
37263 Sep 22 23:15:05.372 INFO Sent repair work, now wait for resp
37264 Sep 22 23:15:05.372 INFO [0] received reconcile message
37265 Sep 22 23:15:05.372 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }, state: ClientData([InProgress, New, New]) }, : downstairs
37266 Sep 22 23:15:05.372 INFO [0] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }
37267 Sep 22 23:15:05.372 INFO [1] received reconcile message
37268 Sep 22 23:15:05.372 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37269 Sep 22 23:15:05.372 INFO [1] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }
37270 Sep 22 23:15:05.372 INFO [2] received reconcile message
37271 Sep 22 23:15:05.372 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(89), op: ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37272 Sep 22 23:15:05.372 INFO [2] client ExtentClose { repair_id: ReconciliationId(89), extent_id: 45 }
37273 Sep 22 23:15:05.372 DEBG 89 Close extent 45
37274 Sep 22 23:15:05.373 DEBG 89 Close extent 45
37275 Sep 22 23:15:05.373 DEBG 89 Close extent 45
37276 Sep 22 23:15:05.373 DEBG [2] It's time to notify for 89
37277 Sep 22 23:15:05.373 INFO Completion from [2] id:89 status:true
37278 Sep 22 23:15:05.373 INFO [90/752] Repair commands completed
37279 Sep 22 23:15:05.373 INFO Pop front: ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37280 Sep 22 23:15:05.373 INFO Sent repair work, now wait for resp
37281 Sep 22 23:15:05.373 INFO [0] received reconcile message
37282 Sep 22 23:15:05.374 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37283 Sep 22 23:15:05.374 INFO [0] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37284 Sep 22 23:15:05.374 INFO [0] Sending repair request ReconciliationId(90)
37285 Sep 22 23:15:05.374 INFO [1] received reconcile message
37286 Sep 22 23:15:05.374 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37287 Sep 22 23:15:05.374 INFO [1] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37288 Sep 22 23:15:05.374 INFO [1] No action required ReconciliationId(90)
37289 Sep 22 23:15:05.374 INFO [2] received reconcile message
37290 Sep 22 23:15:05.374 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(90), op: ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37291 Sep 22 23:15:05.374 INFO [2] client ExtentRepair { repair_id: ReconciliationId(90), extent_id: 45, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37292 Sep 22 23:15:05.374 INFO [2] No action required ReconciliationId(90)
37293 Sep 22 23:15:05.374 DEBG 90 Repair extent 45 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37294 Sep 22 23:15:05.374 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02D.copy"
37295 Sep 22 23:15:05.389 DEBG up_ds_listen was notified
37296 Sep 22 23:15:05.389 DEBG up_ds_listen process 1073
37297 Sep 22 23:15:05.389 DEBG [A] ack job 1073:74, : downstairs
37298 Sep 22 23:15:05.389 DEBG [rc] retire 1073 clears [JobId(1072), JobId(1073)], : downstairs
37299 Sep 22 23:15:05.389 DEBG up_ds_listen checked 1 jobs, back to waiting
37300 Sep 22 23:15:05.399 INFO [lossy] skipping 1075
37301 Sep 22 23:15:05.399 DEBG Flush :1075 extent_limit None deps:[JobId(1074), JobId(1073)] res:true f:27 g:1
37302 Sep 22 23:15:05.405 DEBG Read :1076 deps:[JobId(1075)] res:true
37303 Sep 22 23:15:05.427 DEBG Flush :1075 extent_limit None deps:[JobId(1074), JobId(1073)] res:true f:27 g:1
37304 Sep 22 23:15:05.427 WARN returning error on read!
37305 Sep 22 23:15:05.427 DEBG Read :1076 deps:[JobId(1075)] res:false
37306 Sep 22 23:15:05.427 INFO [lossy] skipping 1076
37307 Sep 22 23:15:05.433 DEBG Read :1076 deps:[JobId(1075)] res:true
37308 Sep 22 23:15:05.437 INFO accepted connection, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37309 Sep 22 23:15:05.437 TRCE incoming request, uri: /extent/45/files, method: GET, req_id: 7665d8c0-3237-4ecd-90ae-e279e9b97fb5, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37310 Sep 22 23:15:05.437 INFO request completed, latency_us: 219, response_code: 200, uri: /extent/45/files, method: GET, req_id: 7665d8c0-3237-4ecd-90ae-e279e9b97fb5, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37311 Sep 22 23:15:05.438 INFO eid:45 Found repair files: ["02D", "02D.db"]
37312 Sep 22 23:15:05.438 TRCE incoming request, uri: /newextent/45/data, method: GET, req_id: d0224391-7635-4a4b-bb92-77afd82e7c7f, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37313 Sep 22 23:15:05.438 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/45/data, method: GET, req_id: d0224391-7635-4a4b-bb92-77afd82e7c7f, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37314 Sep 22 23:15:05.444 TRCE incoming request, uri: /newextent/45/db, method: GET, req_id: 6efbea0d-463f-4565-a920-2c6829832ccb, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37315 Sep 22 23:15:05.444 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/45/db, method: GET, req_id: 6efbea0d-463f-4565-a920-2c6829832ccb, remote_addr: 127.0.0.1:64899, local_addr: 127.0.0.1:46213, task: repair
37316 Sep 22 23:15:05.445 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02D.replace"
37317 Sep 22 23:15:05.445 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37318 Sep 22 23:15:05.446 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37319 Sep 22 23:15:05.446 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02D"
37320 Sep 22 23:15:05.446 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02D.db"
37321 Sep 22 23:15:05.446 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37322 Sep 22 23:15:05.446 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02D.completed"
37323 Sep 22 23:15:05.446 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37324 Sep 22 23:15:05.446 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37325 Sep 22 23:15:05.447 DEBG [0] It's time to notify for 90
37326 Sep 22 23:15:05.447 INFO Completion from [0] id:90 status:true
37327 Sep 22 23:15:05.447 INFO [91/752] Repair commands completed
37328 Sep 22 23:15:05.447 INFO Pop front: ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }, state: ClientData([New, New, New]) }
37329 Sep 22 23:15:05.447 INFO Sent repair work, now wait for resp
37330 Sep 22 23:15:05.447 INFO [0] received reconcile message
37331 Sep 22 23:15:05.447 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }, state: ClientData([InProgress, New, New]) }, : downstairs
37332 Sep 22 23:15:05.447 INFO [0] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }
37333 Sep 22 23:15:05.447 INFO [1] received reconcile message
37334 Sep 22 23:15:05.447 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37335 Sep 22 23:15:05.447 INFO [1] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }
37336 Sep 22 23:15:05.447 INFO [2] received reconcile message
37337 Sep 22 23:15:05.447 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(91), op: ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37338 Sep 22 23:15:05.447 INFO [2] client ExtentReopen { repair_id: ReconciliationId(91), extent_id: 45 }
37339 Sep 22 23:15:05.447 DEBG 91 Reopen extent 45
37340 Sep 22 23:15:05.448 DEBG 91 Reopen extent 45
37341 Sep 22 23:15:05.448 DEBG 91 Reopen extent 45
37342 Sep 22 23:15:05.449 DEBG [2] It's time to notify for 91
37343 Sep 22 23:15:05.449 INFO Completion from [2] id:91 status:true
37344 Sep 22 23:15:05.449 INFO [92/752] Repair commands completed
37345 Sep 22 23:15:05.449 INFO Pop front: ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37346 Sep 22 23:15:05.449 INFO Sent repair work, now wait for resp
37347 Sep 22 23:15:05.449 INFO [0] received reconcile message
37348 Sep 22 23:15:05.449 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37349 Sep 22 23:15:05.449 INFO [0] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37350 Sep 22 23:15:05.449 INFO [1] received reconcile message
37351 Sep 22 23:15:05.449 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37352 Sep 22 23:15:05.449 INFO [1] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37353 Sep 22 23:15:05.449 INFO [2] received reconcile message
37354 Sep 22 23:15:05.449 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(92), op: ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37355 Sep 22 23:15:05.449 INFO [2] client ExtentFlush { repair_id: ReconciliationId(92), extent_id: 75, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37356 Sep 22 23:15:05.449 DEBG 92 Flush extent 75 with f:2 g:2
37357 Sep 22 23:15:05.449 DEBG Flush just extent 75 with f:2 and g:2
37358 Sep 22 23:15:05.450 DEBG [1] It's time to notify for 92
37359 Sep 22 23:15:05.450 INFO Completion from [1] id:92 status:true
37360 Sep 22 23:15:05.450 INFO [93/752] Repair commands completed
37361 Sep 22 23:15:05.450 INFO Pop front: ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }, state: ClientData([New, New, New]) }
37362 Sep 22 23:15:05.450 INFO Sent repair work, now wait for resp
37363 Sep 22 23:15:05.450 INFO [0] received reconcile message
37364 Sep 22 23:15:05.450 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }, state: ClientData([InProgress, New, New]) }, : downstairs
37365 Sep 22 23:15:05.450 INFO [0] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }
37366 Sep 22 23:15:05.450 INFO [1] received reconcile message
37367 Sep 22 23:15:05.450 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37368 Sep 22 23:15:05.450 INFO [1] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }
37369 Sep 22 23:15:05.450 INFO [2] received reconcile message
37370 Sep 22 23:15:05.450 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(93), op: ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37371 Sep 22 23:15:05.450 INFO [2] client ExtentClose { repair_id: ReconciliationId(93), extent_id: 75 }
37372 Sep 22 23:15:05.450 DEBG 93 Close extent 75
37373 Sep 22 23:15:05.450 DEBG 93 Close extent 75
37374 Sep 22 23:15:05.451 DEBG 93 Close extent 75
37375 Sep 22 23:15:05.451 DEBG [2] It's time to notify for 93
37376 Sep 22 23:15:05.451 INFO Completion from [2] id:93 status:true
37377 Sep 22 23:15:05.451 INFO [94/752] Repair commands completed
37378 Sep 22 23:15:05.451 INFO Pop front: ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37379 Sep 22 23:15:05.451 INFO Sent repair work, now wait for resp
37380 Sep 22 23:15:05.451 INFO [0] received reconcile message
37381 Sep 22 23:15:05.451 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37382 Sep 22 23:15:05.451 INFO [0] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37383 Sep 22 23:15:05.451 INFO [0] Sending repair request ReconciliationId(94)
37384 Sep 22 23:15:05.451 INFO [1] received reconcile message
37385 Sep 22 23:15:05.451 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37386 Sep 22 23:15:05.451 INFO [1] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37387 Sep 22 23:15:05.451 INFO [1] No action required ReconciliationId(94)
37388 Sep 22 23:15:05.451 INFO [2] received reconcile message
37389 Sep 22 23:15:05.451 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(94), op: ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37390 Sep 22 23:15:05.451 INFO [2] client ExtentRepair { repair_id: ReconciliationId(94), extent_id: 75, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37391 Sep 22 23:15:05.451 INFO [2] No action required ReconciliationId(94)
37392 Sep 22 23:15:05.451 DEBG 94 Repair extent 75 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37393 Sep 22 23:15:05.451 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04B.copy"
37394 Sep 22 23:15:05.515 INFO accepted connection, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37395 Sep 22 23:15:05.515 TRCE incoming request, uri: /extent/75/files, method: GET, req_id: c8a73894-aa38-4869-8bcf-d5050c96838e, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37396 Sep 22 23:15:05.515 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/75/files, method: GET, req_id: c8a73894-aa38-4869-8bcf-d5050c96838e, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37397 Sep 22 23:15:05.515 INFO eid:75 Found repair files: ["04B", "04B.db"]
37398 Sep 22 23:15:05.516 TRCE incoming request, uri: /newextent/75/data, method: GET, req_id: 69ae22b0-2cb2-4ddc-ac03-774a417fdc84, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37399 Sep 22 23:15:05.516 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/75/data, method: GET, req_id: 69ae22b0-2cb2-4ddc-ac03-774a417fdc84, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37400 Sep 22 23:15:05.521 TRCE incoming request, uri: /newextent/75/db, method: GET, req_id: 017650c0-2c81-4539-b4a3-3feebbaed40c, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37401 Sep 22 23:15:05.521 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/75/db, method: GET, req_id: 017650c0-2c81-4539-b4a3-3feebbaed40c, remote_addr: 127.0.0.1:50873, local_addr: 127.0.0.1:46213, task: repair
37402 Sep 22 23:15:05.523 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04B.replace"
37403 Sep 22 23:15:05.523 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37404 Sep 22 23:15:05.523 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37405 Sep 22 23:15:05.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04B"
37406 Sep 22 23:15:05.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04B.db"
37407 Sep 22 23:15:05.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37408 Sep 22 23:15:05.524 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04B.completed"
37409 Sep 22 23:15:05.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37410 Sep 22 23:15:05.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37411 Sep 22 23:15:05.524 DEBG [0] It's time to notify for 94
37412 Sep 22 23:15:05.524 INFO Completion from [0] id:94 status:true
37413 Sep 22 23:15:05.524 INFO [95/752] Repair commands completed
37414 Sep 22 23:15:05.524 INFO Pop front: ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }, state: ClientData([New, New, New]) }
37415 Sep 22 23:15:05.524 INFO Sent repair work, now wait for resp
37416 Sep 22 23:15:05.524 INFO [0] received reconcile message
37417 Sep 22 23:15:05.524 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }, state: ClientData([InProgress, New, New]) }, : downstairs
37418 Sep 22 23:15:05.524 INFO [0] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }
37419 Sep 22 23:15:05.524 INFO [1] received reconcile message
37420 Sep 22 23:15:05.524 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37421 Sep 22 23:15:05.524 INFO [1] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }
37422 Sep 22 23:15:05.524 INFO [2] received reconcile message
37423 Sep 22 23:15:05.524 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(95), op: ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37424 Sep 22 23:15:05.525 INFO [2] client ExtentReopen { repair_id: ReconciliationId(95), extent_id: 75 }
37425 Sep 22 23:15:05.525 DEBG 95 Reopen extent 75
37426 Sep 22 23:15:05.525 DEBG 95 Reopen extent 75
37427 Sep 22 23:15:05.526 DEBG 95 Reopen extent 75
37428 Sep 22 23:15:05.526 DEBG [2] It's time to notify for 95
37429 Sep 22 23:15:05.526 INFO Completion from [2] id:95 status:true
37430 Sep 22 23:15:05.526 INFO [96/752] Repair commands completed
37431 Sep 22 23:15:05.526 INFO Pop front: ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37432 Sep 22 23:15:05.526 INFO Sent repair work, now wait for resp
37433 Sep 22 23:15:05.526 INFO [0] received reconcile message
37434 Sep 22 23:15:05.526 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37435 Sep 22 23:15:05.526 INFO [0] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37436 Sep 22 23:15:05.527 INFO [1] received reconcile message
37437 Sep 22 23:15:05.527 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37438 Sep 22 23:15:05.527 INFO [1] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37439 Sep 22 23:15:05.527 INFO [2] received reconcile message
37440 Sep 22 23:15:05.527 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(96), op: ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37441 Sep 22 23:15:05.527 INFO [2] client ExtentFlush { repair_id: ReconciliationId(96), extent_id: 68, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37442 Sep 22 23:15:05.527 DEBG 96 Flush extent 68 with f:2 g:2
37443 Sep 22 23:15:05.527 DEBG Flush just extent 68 with f:2 and g:2
37444 Sep 22 23:15:05.527 DEBG [1] It's time to notify for 96
37445 Sep 22 23:15:05.527 INFO Completion from [1] id:96 status:true
37446 Sep 22 23:15:05.527 INFO [97/752] Repair commands completed
37447 Sep 22 23:15:05.527 INFO Pop front: ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }, state: ClientData([New, New, New]) }
37448 Sep 22 23:15:05.527 INFO Sent repair work, now wait for resp
37449 Sep 22 23:15:05.527 INFO [0] received reconcile message
37450 Sep 22 23:15:05.527 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }, state: ClientData([InProgress, New, New]) }, : downstairs
37451 Sep 22 23:15:05.527 INFO [0] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }
37452 Sep 22 23:15:05.527 INFO [1] received reconcile message
37453 Sep 22 23:15:05.527 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37454 Sep 22 23:15:05.527 INFO [1] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }
37455 Sep 22 23:15:05.527 INFO [2] received reconcile message
37456 Sep 22 23:15:05.527 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(97), op: ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37457 Sep 22 23:15:05.527 INFO [2] client ExtentClose { repair_id: ReconciliationId(97), extent_id: 68 }
37458 Sep 22 23:15:05.527 DEBG 97 Close extent 68
37459 Sep 22 23:15:05.528 DEBG 97 Close extent 68
37460 Sep 22 23:15:05.528 DEBG 97 Close extent 68
37461 Sep 22 23:15:05.528 DEBG [2] It's time to notify for 97
37462 Sep 22 23:15:05.528 INFO Completion from [2] id:97 status:true
37463 Sep 22 23:15:05.528 INFO [98/752] Repair commands completed
37464 Sep 22 23:15:05.528 INFO Pop front: ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37465 Sep 22 23:15:05.528 INFO Sent repair work, now wait for resp
37466 Sep 22 23:15:05.528 INFO [0] received reconcile message
37467 Sep 22 23:15:05.528 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37468 Sep 22 23:15:05.528 INFO [0] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37469 Sep 22 23:15:05.528 INFO [0] Sending repair request ReconciliationId(98)
37470 Sep 22 23:15:05.529 INFO [1] received reconcile message
37471 Sep 22 23:15:05.529 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37472 Sep 22 23:15:05.529 INFO [1] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37473 Sep 22 23:15:05.529 INFO [1] No action required ReconciliationId(98)
37474 Sep 22 23:15:05.529 INFO [2] received reconcile message
37475 Sep 22 23:15:05.529 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(98), op: ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37476 Sep 22 23:15:05.529 INFO [2] client ExtentRepair { repair_id: ReconciliationId(98), extent_id: 68, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37477 Sep 22 23:15:05.529 INFO [2] No action required ReconciliationId(98)
37478 Sep 22 23:15:05.529 DEBG 98 Repair extent 68 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37479 Sep 22 23:15:05.529 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/044.copy"
37480 Sep 22 23:15:05.590 INFO accepted connection, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37481 Sep 22 23:15:05.591 TRCE incoming request, uri: /extent/68/files, method: GET, req_id: 41302677-ca33-48a0-9837-6aedfabdb11c, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37482 Sep 22 23:15:05.591 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/68/files, method: GET, req_id: 41302677-ca33-48a0-9837-6aedfabdb11c, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37483 Sep 22 23:15:05.591 INFO eid:68 Found repair files: ["044", "044.db"]
37484 Sep 22 23:15:05.591 TRCE incoming request, uri: /newextent/68/data, method: GET, req_id: 9b4ffae6-9ad6-4917-9440-507085965135, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37485 Sep 22 23:15:05.592 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/68/data, method: GET, req_id: 9b4ffae6-9ad6-4917-9440-507085965135, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37486 Sep 22 23:15:05.597 TRCE incoming request, uri: /newextent/68/db, method: GET, req_id: 8c9346ee-33e6-4938-b5e9-c56b76b88056, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37487 Sep 22 23:15:05.597 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/68/db, method: GET, req_id: 8c9346ee-33e6-4938-b5e9-c56b76b88056, remote_addr: 127.0.0.1:41162, local_addr: 127.0.0.1:46213, task: repair
37488 Sep 22 23:15:05.598 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/044.copy" to "/tmp/downstairs-vrx8aK6L/00/000/044.replace"
37489 Sep 22 23:15:05.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37490 Sep 22 23:15:05.599 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/044.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37491 Sep 22 23:15:05.599 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/044"
37492 Sep 22 23:15:05.599 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/044.db"
37493 Sep 22 23:15:05.599 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37494 Sep 22 23:15:05.599 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/044.replace" to "/tmp/downstairs-vrx8aK6L/00/000/044.completed"
37495 Sep 22 23:15:05.600 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37496 Sep 22 23:15:05.600 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37497 Sep 22 23:15:05.600 DEBG [0] It's time to notify for 98
37498 Sep 22 23:15:05.600 INFO Completion from [0] id:98 status:true
37499 Sep 22 23:15:05.600 INFO [99/752] Repair commands completed
37500 Sep 22 23:15:05.600 INFO Pop front: ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }, state: ClientData([New, New, New]) }
37501 Sep 22 23:15:05.600 INFO Sent repair work, now wait for resp
37502 Sep 22 23:15:05.600 INFO [0] received reconcile message
37503 Sep 22 23:15:05.600 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }, state: ClientData([InProgress, New, New]) }, : downstairs
37504 Sep 22 23:15:05.600 INFO [0] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }
37505 Sep 22 23:15:05.600 INFO [1] received reconcile message
37506 Sep 22 23:15:05.600 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37507 Sep 22 23:15:05.600 INFO [1] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }
37508 Sep 22 23:15:05.600 INFO [2] received reconcile message
37509 Sep 22 23:15:05.600 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(99), op: ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37510 Sep 22 23:15:05.600 INFO [2] client ExtentReopen { repair_id: ReconciliationId(99), extent_id: 68 }
37511 Sep 22 23:15:05.600 DEBG 99 Reopen extent 68
37512 Sep 22 23:15:05.601 DEBG 99 Reopen extent 68
37513 Sep 22 23:15:05.602 DEBG 99 Reopen extent 68
37514 Sep 22 23:15:05.602 DEBG [2] It's time to notify for 99
37515 Sep 22 23:15:05.602 INFO Completion from [2] id:99 status:true
37516 Sep 22 23:15:05.602 INFO [100/752] Repair commands completed
37517 Sep 22 23:15:05.602 INFO Pop front: ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37518 Sep 22 23:15:05.602 INFO Sent repair work, now wait for resp
37519 Sep 22 23:15:05.602 INFO [0] received reconcile message
37520 Sep 22 23:15:05.602 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37521 Sep 22 23:15:05.602 INFO [0] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37522 Sep 22 23:15:05.602 INFO [1] received reconcile message
37523 Sep 22 23:15:05.602 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37524 Sep 22 23:15:05.602 INFO [1] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37525 Sep 22 23:15:05.602 INFO [2] received reconcile message
37526 Sep 22 23:15:05.602 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(100), op: ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37527 Sep 22 23:15:05.602 INFO [2] client ExtentFlush { repair_id: ReconciliationId(100), extent_id: 35, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37528 Sep 22 23:15:05.603 DEBG 100 Flush extent 35 with f:2 g:2
37529 Sep 22 23:15:05.603 DEBG Flush just extent 35 with f:2 and g:2
37530 Sep 22 23:15:05.603 DEBG [1] It's time to notify for 100
37531 Sep 22 23:15:05.603 INFO Completion from [1] id:100 status:true
37532 Sep 22 23:15:05.603 INFO [101/752] Repair commands completed
37533 Sep 22 23:15:05.603 INFO Pop front: ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }, state: ClientData([New, New, New]) }
37534 Sep 22 23:15:05.603 INFO Sent repair work, now wait for resp
37535 Sep 22 23:15:05.603 INFO [0] received reconcile message
37536 Sep 22 23:15:05.603 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }, state: ClientData([InProgress, New, New]) }, : downstairs
37537 Sep 22 23:15:05.603 INFO [0] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }
37538 Sep 22 23:15:05.603 INFO [1] received reconcile message
37539 Sep 22 23:15:05.603 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37540 Sep 22 23:15:05.603 INFO [1] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }
37541 Sep 22 23:15:05.603 INFO [2] received reconcile message
37542 Sep 22 23:15:05.603 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(101), op: ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37543 Sep 22 23:15:05.603 INFO [2] client ExtentClose { repair_id: ReconciliationId(101), extent_id: 35 }
37544 Sep 22 23:15:05.603 DEBG 101 Close extent 35
37545 Sep 22 23:15:05.603 DEBG 101 Close extent 35
37546 Sep 22 23:15:05.604 DEBG 101 Close extent 35
37547 Sep 22 23:15:05.604 DEBG [2] It's time to notify for 101
37548 Sep 22 23:15:05.604 INFO Completion from [2] id:101 status:true
37549 Sep 22 23:15:05.604 INFO [102/752] Repair commands completed
37550 Sep 22 23:15:05.604 INFO Pop front: ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37551 Sep 22 23:15:05.604 INFO Sent repair work, now wait for resp
37552 Sep 22 23:15:05.604 INFO [0] received reconcile message
37553 Sep 22 23:15:05.604 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37554 Sep 22 23:15:05.604 INFO [0] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37555 Sep 22 23:15:05.604 INFO [0] Sending repair request ReconciliationId(102)
37556 Sep 22 23:15:05.604 INFO [1] received reconcile message
37557 Sep 22 23:15:05.604 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37558 Sep 22 23:15:05.604 INFO [1] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37559 Sep 22 23:15:05.604 INFO [1] No action required ReconciliationId(102)
37560 Sep 22 23:15:05.604 INFO [2] received reconcile message
37561 Sep 22 23:15:05.604 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(102), op: ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37562 Sep 22 23:15:05.604 INFO [2] client ExtentRepair { repair_id: ReconciliationId(102), extent_id: 35, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37563 Sep 22 23:15:05.604 INFO [2] No action required ReconciliationId(102)
37564 Sep 22 23:15:05.605 DEBG 102 Repair extent 35 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37565 Sep 22 23:15:05.605 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/023.copy"
37566 Sep 22 23:15:05.668 INFO accepted connection, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37567 Sep 22 23:15:05.668 TRCE incoming request, uri: /extent/35/files, method: GET, req_id: a6e931b8-27fe-4d9d-a6e7-bd37f9dc7efa, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37568 Sep 22 23:15:05.668 INFO request completed, latency_us: 205, response_code: 200, uri: /extent/35/files, method: GET, req_id: a6e931b8-27fe-4d9d-a6e7-bd37f9dc7efa, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37569 Sep 22 23:15:05.668 INFO eid:35 Found repair files: ["023", "023.db"]
37570 Sep 22 23:15:05.669 TRCE incoming request, uri: /newextent/35/data, method: GET, req_id: aa1818d1-bf02-4504-bbf9-09f20a161f3e, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37571 Sep 22 23:15:05.669 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/35/data, method: GET, req_id: aa1818d1-bf02-4504-bbf9-09f20a161f3e, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37572 Sep 22 23:15:05.674 TRCE incoming request, uri: /newextent/35/db, method: GET, req_id: 2840f5de-888b-4726-889c-afdf8551fe85, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37573 Sep 22 23:15:05.674 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/35/db, method: GET, req_id: 2840f5de-888b-4726-889c-afdf8551fe85, remote_addr: 127.0.0.1:48531, local_addr: 127.0.0.1:46213, task: repair
37574 Sep 22 23:15:05.675 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/023.copy" to "/tmp/downstairs-vrx8aK6L/00/000/023.replace"
37575 Sep 22 23:15:05.675 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37576 Sep 22 23:15:05.676 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/023.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37577 Sep 22 23:15:05.676 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/023"
37578 Sep 22 23:15:05.676 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/023.db"
37579 Sep 22 23:15:05.677 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37580 Sep 22 23:15:05.677 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/023.replace" to "/tmp/downstairs-vrx8aK6L/00/000/023.completed"
37581 Sep 22 23:15:05.677 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37582 Sep 22 23:15:05.677 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37583 Sep 22 23:15:05.677 DEBG [0] It's time to notify for 102
37584 Sep 22 23:15:05.677 INFO Completion from [0] id:102 status:true
37585 Sep 22 23:15:05.677 INFO [103/752] Repair commands completed
37586 Sep 22 23:15:05.677 INFO Pop front: ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }, state: ClientData([New, New, New]) }
37587 Sep 22 23:15:05.677 INFO Sent repair work, now wait for resp
37588 Sep 22 23:15:05.677 INFO [0] received reconcile message
37589 Sep 22 23:15:05.677 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }, state: ClientData([InProgress, New, New]) }, : downstairs
37590 Sep 22 23:15:05.677 INFO [0] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }
37591 Sep 22 23:15:05.677 INFO [1] received reconcile message
37592 Sep 22 23:15:05.677 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37593 Sep 22 23:15:05.677 INFO [1] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }
37594 Sep 22 23:15:05.677 INFO [2] received reconcile message
37595 Sep 22 23:15:05.677 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(103), op: ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37596 Sep 22 23:15:05.677 INFO [2] client ExtentReopen { repair_id: ReconciliationId(103), extent_id: 35 }
37597 Sep 22 23:15:05.677 DEBG 103 Reopen extent 35
37598 Sep 22 23:15:05.678 DEBG 103 Reopen extent 35
37599 Sep 22 23:15:05.678 DEBG 103 Reopen extent 35
37600 Sep 22 23:15:05.679 DEBG [2] It's time to notify for 103
37601 Sep 22 23:15:05.679 INFO Completion from [2] id:103 status:true
37602 Sep 22 23:15:05.679 INFO [104/752] Repair commands completed
37603 Sep 22 23:15:05.679 INFO Pop front: ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37604 Sep 22 23:15:05.679 INFO Sent repair work, now wait for resp
37605 Sep 22 23:15:05.679 INFO [0] received reconcile message
37606 Sep 22 23:15:05.679 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37607 Sep 22 23:15:05.679 INFO [0] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37608 Sep 22 23:15:05.679 INFO [1] received reconcile message
37609 Sep 22 23:15:05.679 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37610 Sep 22 23:15:05.679 INFO [1] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37611 Sep 22 23:15:05.679 INFO [2] received reconcile message
37612 Sep 22 23:15:05.679 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(104), op: ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37613 Sep 22 23:15:05.679 INFO [2] client ExtentFlush { repair_id: ReconciliationId(104), extent_id: 152, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37614 Sep 22 23:15:05.680 DEBG 104 Flush extent 152 with f:2 g:2
37615 Sep 22 23:15:05.680 DEBG Flush just extent 152 with f:2 and g:2
37616 Sep 22 23:15:05.680 DEBG [1] It's time to notify for 104
37617 Sep 22 23:15:05.680 INFO Completion from [1] id:104 status:true
37618 Sep 22 23:15:05.680 INFO [105/752] Repair commands completed
37619 Sep 22 23:15:05.680 INFO Pop front: ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }, state: ClientData([New, New, New]) }
37620 Sep 22 23:15:05.680 INFO Sent repair work, now wait for resp
37621 Sep 22 23:15:05.680 INFO [0] received reconcile message
37622 Sep 22 23:15:05.680 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }, state: ClientData([InProgress, New, New]) }, : downstairs
37623 Sep 22 23:15:05.680 INFO [0] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }
37624 Sep 22 23:15:05.680 INFO [1] received reconcile message
37625 Sep 22 23:15:05.680 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37626 Sep 22 23:15:05.680 INFO [1] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }
37627 Sep 22 23:15:05.680 INFO [2] received reconcile message
37628 Sep 22 23:15:05.680 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(105), op: ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37629 Sep 22 23:15:05.680 INFO [2] client ExtentClose { repair_id: ReconciliationId(105), extent_id: 152 }
37630 Sep 22 23:15:05.680 DEBG 105 Close extent 152
37631 Sep 22 23:15:05.680 DEBG 105 Close extent 152
37632 Sep 22 23:15:05.681 DEBG 105 Close extent 152
37633 Sep 22 23:15:05.681 DEBG [2] It's time to notify for 105
37634 Sep 22 23:15:05.681 INFO Completion from [2] id:105 status:true
37635 Sep 22 23:15:05.681 INFO [106/752] Repair commands completed
37636 Sep 22 23:15:05.681 INFO Pop front: ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37637 Sep 22 23:15:05.681 INFO Sent repair work, now wait for resp
37638 Sep 22 23:15:05.681 INFO [0] received reconcile message
37639 Sep 22 23:15:05.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37640 Sep 22 23:15:05.681 INFO [0] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37641 Sep 22 23:15:05.681 INFO [0] Sending repair request ReconciliationId(106)
37642 Sep 22 23:15:05.681 INFO [1] received reconcile message
37643 Sep 22 23:15:05.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37644 Sep 22 23:15:05.681 INFO [1] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37645 Sep 22 23:15:05.681 INFO [1] No action required ReconciliationId(106)
37646 Sep 22 23:15:05.681 INFO [2] received reconcile message
37647 Sep 22 23:15:05.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(106), op: ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37648 Sep 22 23:15:05.681 INFO [2] client ExtentRepair { repair_id: ReconciliationId(106), extent_id: 152, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37649 Sep 22 23:15:05.681 INFO [2] No action required ReconciliationId(106)
37650 Sep 22 23:15:05.682 DEBG 106 Repair extent 152 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37651 Sep 22 23:15:05.682 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/098.copy"
37652 Sep 22 23:15:05.746 INFO accepted connection, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37653 Sep 22 23:15:05.746 TRCE incoming request, uri: /extent/152/files, method: GET, req_id: 08a45306-4a04-415d-ad14-5524b5866519, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37654 Sep 22 23:15:05.746 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/152/files, method: GET, req_id: 08a45306-4a04-415d-ad14-5524b5866519, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37655 Sep 22 23:15:05.746 INFO eid:152 Found repair files: ["098", "098.db"]
37656 Sep 22 23:15:05.747 TRCE incoming request, uri: /newextent/152/data, method: GET, req_id: 29bc57a3-1e91-40d4-882a-ce664aa53141, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37657 Sep 22 23:15:05.747 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/152/data, method: GET, req_id: 29bc57a3-1e91-40d4-882a-ce664aa53141, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37658 Sep 22 23:15:05.752 TRCE incoming request, uri: /newextent/152/db, method: GET, req_id: 77c8cb2b-a51d-4554-93e7-15e83b6c7a57, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37659 Sep 22 23:15:05.752 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/152/db, method: GET, req_id: 77c8cb2b-a51d-4554-93e7-15e83b6c7a57, remote_addr: 127.0.0.1:51141, local_addr: 127.0.0.1:46213, task: repair
37660 Sep 22 23:15:05.753 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/098.copy" to "/tmp/downstairs-vrx8aK6L/00/000/098.replace"
37661 Sep 22 23:15:05.753 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37662 Sep 22 23:15:05.754 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/098.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37663 Sep 22 23:15:05.754 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/098"
37664 Sep 22 23:15:05.754 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/098.db"
37665 Sep 22 23:15:05.754 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37666 Sep 22 23:15:05.755 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/098.replace" to "/tmp/downstairs-vrx8aK6L/00/000/098.completed"
37667 Sep 22 23:15:05.755 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37668 Sep 22 23:15:05.755 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37669 Sep 22 23:15:05.755 DEBG [0] It's time to notify for 106
37670 Sep 22 23:15:05.755 INFO Completion from [0] id:106 status:true
37671 Sep 22 23:15:05.755 INFO [107/752] Repair commands completed
37672 Sep 22 23:15:05.755 INFO Pop front: ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }, state: ClientData([New, New, New]) }
37673 Sep 22 23:15:05.755 INFO Sent repair work, now wait for resp
37674 Sep 22 23:15:05.755 INFO [0] received reconcile message
37675 Sep 22 23:15:05.755 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }, state: ClientData([InProgress, New, New]) }, : downstairs
37676 Sep 22 23:15:05.755 INFO [0] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }
37677 Sep 22 23:15:05.755 INFO [1] received reconcile message
37678 Sep 22 23:15:05.755 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37679 Sep 22 23:15:05.755 INFO [1] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }
37680 Sep 22 23:15:05.755 INFO [2] received reconcile message
37681 Sep 22 23:15:05.755 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(107), op: ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37682 Sep 22 23:15:05.755 INFO [2] client ExtentReopen { repair_id: ReconciliationId(107), extent_id: 152 }
37683 Sep 22 23:15:05.755 DEBG 107 Reopen extent 152
37684 Sep 22 23:15:05.756 DEBG 107 Reopen extent 152
37685 Sep 22 23:15:05.756 DEBG 107 Reopen extent 152
37686 Sep 22 23:15:05.757 DEBG [2] It's time to notify for 107
37687 Sep 22 23:15:05.757 INFO Completion from [2] id:107 status:true
37688 Sep 22 23:15:05.757 INFO [108/752] Repair commands completed
37689 Sep 22 23:15:05.757 INFO Pop front: ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37690 Sep 22 23:15:05.757 INFO Sent repair work, now wait for resp
37691 Sep 22 23:15:05.757 INFO [0] received reconcile message
37692 Sep 22 23:15:05.757 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37693 Sep 22 23:15:05.757 INFO [0] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37694 Sep 22 23:15:05.757 INFO [1] received reconcile message
37695 Sep 22 23:15:05.757 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37696 Sep 22 23:15:05.757 INFO [1] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37697 Sep 22 23:15:05.757 INFO [2] received reconcile message
37698 Sep 22 23:15:05.757 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(108), op: ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37699 Sep 22 23:15:05.757 INFO [2] client ExtentFlush { repair_id: ReconciliationId(108), extent_id: 30, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37700 Sep 22 23:15:05.757 DEBG 108 Flush extent 30 with f:2 g:2
37701 Sep 22 23:15:05.757 DEBG Flush just extent 30 with f:2 and g:2
37702 Sep 22 23:15:05.758 DEBG [1] It's time to notify for 108
37703 Sep 22 23:15:05.758 INFO Completion from [1] id:108 status:true
37704 Sep 22 23:15:05.758 INFO [109/752] Repair commands completed
37705 Sep 22 23:15:05.758 INFO Pop front: ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }, state: ClientData([New, New, New]) }
37706 Sep 22 23:15:05.758 INFO Sent repair work, now wait for resp
37707 Sep 22 23:15:05.758 INFO [0] received reconcile message
37708 Sep 22 23:15:05.758 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }, state: ClientData([InProgress, New, New]) }, : downstairs
37709 Sep 22 23:15:05.758 INFO [0] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }
37710 Sep 22 23:15:05.758 INFO [1] received reconcile message
37711 Sep 22 23:15:05.758 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37712 Sep 22 23:15:05.758 INFO [1] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }
37713 Sep 22 23:15:05.758 INFO [2] received reconcile message
37714 Sep 22 23:15:05.758 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(109), op: ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37715 Sep 22 23:15:05.758 INFO [2] client ExtentClose { repair_id: ReconciliationId(109), extent_id: 30 }
37716 Sep 22 23:15:05.758 DEBG 109 Close extent 30
37717 Sep 22 23:15:05.758 DEBG 109 Close extent 30
37718 Sep 22 23:15:05.759 DEBG 109 Close extent 30
37719 Sep 22 23:15:05.759 DEBG [2] It's time to notify for 109
37720 Sep 22 23:15:05.759 INFO Completion from [2] id:109 status:true
37721 Sep 22 23:15:05.759 INFO [110/752] Repair commands completed
37722 Sep 22 23:15:05.759 INFO Pop front: ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37723 Sep 22 23:15:05.759 INFO Sent repair work, now wait for resp
37724 Sep 22 23:15:05.759 INFO [0] received reconcile message
37725 Sep 22 23:15:05.759 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37726 Sep 22 23:15:05.759 INFO [0] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37727 Sep 22 23:15:05.759 INFO [0] Sending repair request ReconciliationId(110)
37728 Sep 22 23:15:05.759 INFO [1] received reconcile message
37729 Sep 22 23:15:05.759 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37730 Sep 22 23:15:05.759 INFO [1] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37731 Sep 22 23:15:05.759 INFO [1] No action required ReconciliationId(110)
37732 Sep 22 23:15:05.759 INFO [2] received reconcile message
37733 Sep 22 23:15:05.759 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(110), op: ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37734 Sep 22 23:15:05.759 INFO [2] client ExtentRepair { repair_id: ReconciliationId(110), extent_id: 30, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37735 Sep 22 23:15:05.759 INFO [2] No action required ReconciliationId(110)
37736 Sep 22 23:15:05.759 DEBG 110 Repair extent 30 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37737 Sep 22 23:15:05.759 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01E.copy"
37738 Sep 22 23:15:05.822 INFO accepted connection, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37739 Sep 22 23:15:05.822 TRCE incoming request, uri: /extent/30/files, method: GET, req_id: ab773c06-00a1-4b82-8bb5-85f3f75d287b, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37740 Sep 22 23:15:05.822 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/30/files, method: GET, req_id: ab773c06-00a1-4b82-8bb5-85f3f75d287b, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37741 Sep 22 23:15:05.822 INFO eid:30 Found repair files: ["01E", "01E.db"]
37742 Sep 22 23:15:05.823 TRCE incoming request, uri: /newextent/30/data, method: GET, req_id: b70813dd-2a37-4774-b2cc-29667788bef5, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37743 Sep 22 23:15:05.823 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/30/data, method: GET, req_id: b70813dd-2a37-4774-b2cc-29667788bef5, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37744 Sep 22 23:15:05.828 TRCE incoming request, uri: /newextent/30/db, method: GET, req_id: 89d2e979-157c-46b8-bc4e-9de7c0777eea, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37745 Sep 22 23:15:05.828 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/30/db, method: GET, req_id: 89d2e979-157c-46b8-bc4e-9de7c0777eea, remote_addr: 127.0.0.1:58609, local_addr: 127.0.0.1:46213, task: repair
37746 Sep 22 23:15:05.829 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01E.replace"
37747 Sep 22 23:15:05.830 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37748 Sep 22 23:15:05.830 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37749 Sep 22 23:15:05.831 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01E"
37750 Sep 22 23:15:05.831 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01E.db"
37751 Sep 22 23:15:05.831 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37752 Sep 22 23:15:05.831 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01E.completed"
37753 Sep 22 23:15:05.831 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37754 Sep 22 23:15:05.831 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37755 Sep 22 23:15:05.831 DEBG [0] It's time to notify for 110
37756 Sep 22 23:15:05.831 INFO Completion from [0] id:110 status:true
37757 Sep 22 23:15:05.831 INFO [111/752] Repair commands completed
37758 Sep 22 23:15:05.831 INFO Pop front: ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }, state: ClientData([New, New, New]) }
37759 Sep 22 23:15:05.831 INFO Sent repair work, now wait for resp
37760 Sep 22 23:15:05.831 INFO [0] received reconcile message
37761 Sep 22 23:15:05.831 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }, state: ClientData([InProgress, New, New]) }, : downstairs
37762 Sep 22 23:15:05.831 INFO [0] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }
37763 Sep 22 23:15:05.831 INFO [1] received reconcile message
37764 Sep 22 23:15:05.831 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37765 Sep 22 23:15:05.831 INFO [1] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }
37766 Sep 22 23:15:05.831 INFO [2] received reconcile message
37767 Sep 22 23:15:05.831 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(111), op: ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37768 Sep 22 23:15:05.831 INFO [2] client ExtentReopen { repair_id: ReconciliationId(111), extent_id: 30 }
37769 Sep 22 23:15:05.832 DEBG 111 Reopen extent 30
37770 Sep 22 23:15:05.832 DEBG 111 Reopen extent 30
37771 Sep 22 23:15:05.833 DEBG 111 Reopen extent 30
37772 Sep 22 23:15:05.833 DEBG [2] It's time to notify for 111
37773 Sep 22 23:15:05.833 INFO Completion from [2] id:111 status:true
37774 Sep 22 23:15:05.833 INFO [112/752] Repair commands completed
37775 Sep 22 23:15:05.833 INFO Pop front: ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37776 Sep 22 23:15:05.833 INFO Sent repair work, now wait for resp
37777 Sep 22 23:15:05.833 INFO [0] received reconcile message
37778 Sep 22 23:15:05.833 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37779 Sep 22 23:15:05.833 INFO [0] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37780 Sep 22 23:15:05.833 INFO [1] received reconcile message
37781 Sep 22 23:15:05.833 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37782 Sep 22 23:15:05.833 INFO [1] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37783 Sep 22 23:15:05.834 INFO [2] received reconcile message
37784 Sep 22 23:15:05.834 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(112), op: ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37785 Sep 22 23:15:05.834 INFO [2] client ExtentFlush { repair_id: ReconciliationId(112), extent_id: 95, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37786 Sep 22 23:15:05.834 DEBG 112 Flush extent 95 with f:2 g:2
37787 Sep 22 23:15:05.834 DEBG Flush just extent 95 with f:2 and g:2
37788 Sep 22 23:15:05.834 DEBG [1] It's time to notify for 112
37789 Sep 22 23:15:05.834 INFO Completion from [1] id:112 status:true
37790 Sep 22 23:15:05.834 INFO [113/752] Repair commands completed
37791 Sep 22 23:15:05.834 INFO Pop front: ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }, state: ClientData([New, New, New]) }
37792 Sep 22 23:15:05.834 INFO Sent repair work, now wait for resp
37793 Sep 22 23:15:05.834 INFO [0] received reconcile message
37794 Sep 22 23:15:05.834 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }, state: ClientData([InProgress, New, New]) }, : downstairs
37795 Sep 22 23:15:05.834 INFO [0] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }
37796 Sep 22 23:15:05.834 INFO [1] received reconcile message
37797 Sep 22 23:15:05.834 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37798 Sep 22 23:15:05.834 INFO [1] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }
37799 Sep 22 23:15:05.834 INFO [2] received reconcile message
37800 Sep 22 23:15:05.834 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(113), op: ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37801 Sep 22 23:15:05.834 INFO [2] client ExtentClose { repair_id: ReconciliationId(113), extent_id: 95 }
37802 Sep 22 23:15:05.834 DEBG 113 Close extent 95
37803 Sep 22 23:15:05.834 DEBG 113 Close extent 95
37804 Sep 22 23:15:05.835 DEBG 113 Close extent 95
37805 Sep 22 23:15:05.835 DEBG [2] It's time to notify for 113
37806 Sep 22 23:15:05.835 INFO Completion from [2] id:113 status:true
37807 Sep 22 23:15:05.835 INFO [114/752] Repair commands completed
37808 Sep 22 23:15:05.835 INFO Pop front: ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37809 Sep 22 23:15:05.835 INFO Sent repair work, now wait for resp
37810 Sep 22 23:15:05.835 INFO [0] received reconcile message
37811 Sep 22 23:15:05.835 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37812 Sep 22 23:15:05.835 INFO [0] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37813 Sep 22 23:15:05.835 INFO [0] Sending repair request ReconciliationId(114)
37814 Sep 22 23:15:05.835 INFO [1] received reconcile message
37815 Sep 22 23:15:05.835 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37816 Sep 22 23:15:05.835 INFO [1] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37817 Sep 22 23:15:05.835 INFO [1] No action required ReconciliationId(114)
37818 Sep 22 23:15:05.836 INFO [2] received reconcile message
37819 Sep 22 23:15:05.836 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(114), op: ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37820 Sep 22 23:15:05.836 INFO [2] client ExtentRepair { repair_id: ReconciliationId(114), extent_id: 95, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37821 Sep 22 23:15:05.836 INFO [2] No action required ReconciliationId(114)
37822 Sep 22 23:15:05.836 DEBG 114 Repair extent 95 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37823 Sep 22 23:15:05.836 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05F.copy"
37824 Sep 22 23:15:05.882 ERRO [2] job id 1076 saw error GenericError("test error")
37825 Sep 22 23:15:05.897 INFO accepted connection, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37826 Sep 22 23:15:05.897 TRCE incoming request, uri: /extent/95/files, method: GET, req_id: 43f0faaf-fb49-47c9-9e68-feb992573ed0, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37827 Sep 22 23:15:05.898 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/95/files, method: GET, req_id: 43f0faaf-fb49-47c9-9e68-feb992573ed0, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37828 Sep 22 23:15:05.898 INFO eid:95 Found repair files: ["05F", "05F.db"]
37829 Sep 22 23:15:05.898 TRCE incoming request, uri: /newextent/95/data, method: GET, req_id: b73a1ad7-2e44-4f57-865b-7422ba56062c, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37830 Sep 22 23:15:05.898 INFO request completed, latency_us: 250, response_code: 200, uri: /newextent/95/data, method: GET, req_id: b73a1ad7-2e44-4f57-865b-7422ba56062c, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37831 Sep 22 23:15:05.903 TRCE incoming request, uri: /newextent/95/db, method: GET, req_id: 5e8503d4-e230-4e25-a949-0586fdc821a0, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37832 Sep 22 23:15:05.904 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/95/db, method: GET, req_id: 5e8503d4-e230-4e25-a949-0586fdc821a0, remote_addr: 127.0.0.1:60926, local_addr: 127.0.0.1:46213, task: repair
37833 Sep 22 23:15:05.905 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05F.replace"
37834 Sep 22 23:15:05.905 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37835 Sep 22 23:15:05.906 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37836 Sep 22 23:15:05.906 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05F"
37837 Sep 22 23:15:05.906 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05F.db"
37838 Sep 22 23:15:05.906 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37839 Sep 22 23:15:05.906 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05F.completed"
37840 Sep 22 23:15:05.906 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37841 Sep 22 23:15:05.906 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37842 Sep 22 23:15:05.906 DEBG [0] It's time to notify for 114
37843 Sep 22 23:15:05.906 INFO Completion from [0] id:114 status:true
37844 Sep 22 23:15:05.906 INFO [115/752] Repair commands completed
37845 Sep 22 23:15:05.906 INFO Pop front: ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }, state: ClientData([New, New, New]) }
37846 Sep 22 23:15:05.906 INFO Sent repair work, now wait for resp
37847 Sep 22 23:15:05.906 INFO [0] received reconcile message
37848 Sep 22 23:15:05.906 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }, state: ClientData([InProgress, New, New]) }, : downstairs
37849 Sep 22 23:15:05.906 INFO [0] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }
37850 Sep 22 23:15:05.907 INFO [1] received reconcile message
37851 Sep 22 23:15:05.907 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37852 Sep 22 23:15:05.907 INFO [1] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }
37853 Sep 22 23:15:05.907 INFO [2] received reconcile message
37854 Sep 22 23:15:05.907 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(115), op: ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37855 Sep 22 23:15:05.907 INFO [2] client ExtentReopen { repair_id: ReconciliationId(115), extent_id: 95 }
37856 Sep 22 23:15:05.907 DEBG 115 Reopen extent 95
37857 Sep 22 23:15:05.907 DEBG 115 Reopen extent 95
37858 Sep 22 23:15:05.908 DEBG 115 Reopen extent 95
37859 Sep 22 23:15:05.908 DEBG [2] It's time to notify for 115
37860 Sep 22 23:15:05.908 INFO Completion from [2] id:115 status:true
37861 Sep 22 23:15:05.908 INFO [116/752] Repair commands completed
37862 Sep 22 23:15:05.908 INFO Pop front: ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37863 Sep 22 23:15:05.909 INFO Sent repair work, now wait for resp
37864 Sep 22 23:15:05.909 INFO [0] received reconcile message
37865 Sep 22 23:15:05.909 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37866 Sep 22 23:15:05.909 INFO [0] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37867 Sep 22 23:15:05.909 INFO [1] received reconcile message
37868 Sep 22 23:15:05.909 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37869 Sep 22 23:15:05.909 INFO [1] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37870 Sep 22 23:15:05.909 INFO [2] received reconcile message
37871 Sep 22 23:15:05.909 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(116), op: ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37872 Sep 22 23:15:05.909 INFO [2] client ExtentFlush { repair_id: ReconciliationId(116), extent_id: 133, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37873 Sep 22 23:15:05.909 DEBG 116 Flush extent 133 with f:2 g:2
37874 Sep 22 23:15:05.909 DEBG Flush just extent 133 with f:2 and g:2
37875 Sep 22 23:15:05.909 DEBG [1] It's time to notify for 116
37876 Sep 22 23:15:05.909 INFO Completion from [1] id:116 status:true
37877 Sep 22 23:15:05.909 INFO [117/752] Repair commands completed
37878 Sep 22 23:15:05.909 INFO Pop front: ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }, state: ClientData([New, New, New]) }
37879 Sep 22 23:15:05.909 INFO Sent repair work, now wait for resp
37880 Sep 22 23:15:05.909 INFO [0] received reconcile message
37881 Sep 22 23:15:05.909 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }, state: ClientData([InProgress, New, New]) }, : downstairs
37882 Sep 22 23:15:05.909 INFO [0] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }
37883 Sep 22 23:15:05.909 INFO [1] received reconcile message
37884 Sep 22 23:15:05.909 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37885 Sep 22 23:15:05.909 INFO [1] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }
37886 Sep 22 23:15:05.909 INFO [2] received reconcile message
37887 Sep 22 23:15:05.909 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(117), op: ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37888 Sep 22 23:15:05.909 INFO [2] client ExtentClose { repair_id: ReconciliationId(117), extent_id: 133 }
37889 Sep 22 23:15:05.909 DEBG 117 Close extent 133
37890 Sep 22 23:15:05.910 DEBG 117 Close extent 133
37891 Sep 22 23:15:05.910 DEBG 117 Close extent 133
37892 Sep 22 23:15:05.910 DEBG [2] It's time to notify for 117
37893 Sep 22 23:15:05.910 INFO Completion from [2] id:117 status:true
37894 Sep 22 23:15:05.910 INFO [118/752] Repair commands completed
37895 Sep 22 23:15:05.910 INFO Pop front: ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37896 Sep 22 23:15:05.910 INFO Sent repair work, now wait for resp
37897 Sep 22 23:15:05.910 INFO [0] received reconcile message
37898 Sep 22 23:15:05.911 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37899 Sep 22 23:15:05.911 INFO [0] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37900 Sep 22 23:15:05.911 INFO [0] Sending repair request ReconciliationId(118)
37901 Sep 22 23:15:05.911 INFO [1] received reconcile message
37902 Sep 22 23:15:05.911 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37903 Sep 22 23:15:05.911 INFO [1] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37904 Sep 22 23:15:05.911 INFO [1] No action required ReconciliationId(118)
37905 Sep 22 23:15:05.911 INFO [2] received reconcile message
37906 Sep 22 23:15:05.911 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(118), op: ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37907 Sep 22 23:15:05.911 INFO [2] client ExtentRepair { repair_id: ReconciliationId(118), extent_id: 133, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37908 Sep 22 23:15:05.911 INFO [2] No action required ReconciliationId(118)
37909 Sep 22 23:15:05.911 DEBG 118 Repair extent 133 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37910 Sep 22 23:15:05.911 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/085.copy"
37911 Sep 22 23:15:05.974 INFO accepted connection, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37912 Sep 22 23:15:05.974 TRCE incoming request, uri: /extent/133/files, method: GET, req_id: 0a5eb246-43d6-4bc7-bed1-d611e74eb8b4, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37913 Sep 22 23:15:05.974 INFO request completed, latency_us: 190, response_code: 200, uri: /extent/133/files, method: GET, req_id: 0a5eb246-43d6-4bc7-bed1-d611e74eb8b4, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37914 Sep 22 23:15:05.974 INFO eid:133 Found repair files: ["085", "085.db"]
37915 Sep 22 23:15:05.975 TRCE incoming request, uri: /newextent/133/data, method: GET, req_id: f9397405-79ae-42b0-8746-19d45fa1caf1, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37916 Sep 22 23:15:05.975 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/133/data, method: GET, req_id: f9397405-79ae-42b0-8746-19d45fa1caf1, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37917 Sep 22 23:15:05.980 TRCE incoming request, uri: /newextent/133/db, method: GET, req_id: 1b96b039-b3b0-470b-999d-f1a5f633f865, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37918 Sep 22 23:15:05.980 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/133/db, method: GET, req_id: 1b96b039-b3b0-470b-999d-f1a5f633f865, remote_addr: 127.0.0.1:58418, local_addr: 127.0.0.1:46213, task: repair
37919 Sep 22 23:15:05.981 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/085.copy" to "/tmp/downstairs-vrx8aK6L/00/000/085.replace"
37920 Sep 22 23:15:05.981 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37921 Sep 22 23:15:05.982 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/085.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
37922 Sep 22 23:15:05.982 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/085"
37923 Sep 22 23:15:05.982 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/085.db"
37924 Sep 22 23:15:05.982 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37925 Sep 22 23:15:05.982 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/085.replace" to "/tmp/downstairs-vrx8aK6L/00/000/085.completed"
37926 Sep 22 23:15:05.982 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37927 Sep 22 23:15:05.983 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
37928 Sep 22 23:15:05.983 DEBG [0] It's time to notify for 118
37929 Sep 22 23:15:05.983 INFO Completion from [0] id:118 status:true
37930 Sep 22 23:15:05.983 INFO [119/752] Repair commands completed
37931 Sep 22 23:15:05.983 INFO Pop front: ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }, state: ClientData([New, New, New]) }
37932 Sep 22 23:15:05.983 INFO Sent repair work, now wait for resp
37933 Sep 22 23:15:05.983 INFO [0] received reconcile message
37934 Sep 22 23:15:05.983 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }, state: ClientData([InProgress, New, New]) }, : downstairs
37935 Sep 22 23:15:05.983 INFO [0] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }
37936 Sep 22 23:15:05.983 INFO [1] received reconcile message
37937 Sep 22 23:15:05.983 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37938 Sep 22 23:15:05.983 INFO [1] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }
37939 Sep 22 23:15:05.983 INFO [2] received reconcile message
37940 Sep 22 23:15:05.983 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(119), op: ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37941 Sep 22 23:15:05.983 INFO [2] client ExtentReopen { repair_id: ReconciliationId(119), extent_id: 133 }
37942 Sep 22 23:15:05.983 DEBG 119 Reopen extent 133
37943 Sep 22 23:15:05.984 DEBG 119 Reopen extent 133
37944 Sep 22 23:15:05.984 DEBG 119 Reopen extent 133
37945 Sep 22 23:15:05.985 DEBG [2] It's time to notify for 119
37946 Sep 22 23:15:05.985 INFO Completion from [2] id:119 status:true
37947 Sep 22 23:15:05.985 INFO [120/752] Repair commands completed
37948 Sep 22 23:15:05.985 INFO Pop front: ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
37949 Sep 22 23:15:05.985 INFO Sent repair work, now wait for resp
37950 Sep 22 23:15:05.985 INFO [0] received reconcile message
37951 Sep 22 23:15:05.985 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
37952 Sep 22 23:15:05.985 INFO [0] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37953 Sep 22 23:15:05.985 INFO [1] received reconcile message
37954 Sep 22 23:15:05.985 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
37955 Sep 22 23:15:05.985 INFO [1] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37956 Sep 22 23:15:05.985 INFO [2] received reconcile message
37957 Sep 22 23:15:05.985 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(120), op: ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
37958 Sep 22 23:15:05.985 INFO [2] client ExtentFlush { repair_id: ReconciliationId(120), extent_id: 174, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
37959 Sep 22 23:15:05.985 DEBG 120 Flush extent 174 with f:2 g:2
37960 Sep 22 23:15:05.985 DEBG Flush just extent 174 with f:2 and g:2
37961 Sep 22 23:15:05.985 DEBG [1] It's time to notify for 120
37962 Sep 22 23:15:05.985 INFO Completion from [1] id:120 status:true
37963 Sep 22 23:15:05.985 INFO [121/752] Repair commands completed
37964 Sep 22 23:15:05.986 INFO Pop front: ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }, state: ClientData([New, New, New]) }
37965 Sep 22 23:15:05.986 INFO Sent repair work, now wait for resp
37966 Sep 22 23:15:05.986 INFO [0] received reconcile message
37967 Sep 22 23:15:05.986 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }, state: ClientData([InProgress, New, New]) }, : downstairs
37968 Sep 22 23:15:05.986 INFO [0] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }
37969 Sep 22 23:15:05.986 INFO [1] received reconcile message
37970 Sep 22 23:15:05.986 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37971 Sep 22 23:15:05.986 INFO [1] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }
37972 Sep 22 23:15:05.986 INFO [2] received reconcile message
37973 Sep 22 23:15:05.986 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(121), op: ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
37974 Sep 22 23:15:05.986 INFO [2] client ExtentClose { repair_id: ReconciliationId(121), extent_id: 174 }
37975 Sep 22 23:15:05.986 DEBG 121 Close extent 174
37976 Sep 22 23:15:05.986 DEBG 121 Close extent 174
37977 Sep 22 23:15:05.986 DEBG 121 Close extent 174
37978 Sep 22 23:15:05.987 DEBG [2] It's time to notify for 121
37979 Sep 22 23:15:05.987 INFO Completion from [2] id:121 status:true
37980 Sep 22 23:15:05.987 INFO [122/752] Repair commands completed
37981 Sep 22 23:15:05.987 INFO Pop front: ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
37982 Sep 22 23:15:05.987 INFO Sent repair work, now wait for resp
37983 Sep 22 23:15:05.987 INFO [0] received reconcile message
37984 Sep 22 23:15:05.987 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
37985 Sep 22 23:15:05.987 INFO [0] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37986 Sep 22 23:15:05.987 INFO [0] Sending repair request ReconciliationId(122)
37987 Sep 22 23:15:05.987 INFO [1] received reconcile message
37988 Sep 22 23:15:05.987 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
37989 Sep 22 23:15:05.987 INFO [1] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37990 Sep 22 23:15:05.987 INFO [1] No action required ReconciliationId(122)
37991 Sep 22 23:15:05.987 INFO [2] received reconcile message
37992 Sep 22 23:15:05.987 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(122), op: ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
37993 Sep 22 23:15:05.987 INFO [2] client ExtentRepair { repair_id: ReconciliationId(122), extent_id: 174, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
37994 Sep 22 23:15:05.987 INFO [2] No action required ReconciliationId(122)
37995 Sep 22 23:15:05.987 DEBG 122 Repair extent 174 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
37996 Sep 22 23:15:05.987 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AE.copy"
37997 Sep 22 23:15:06.052 INFO accepted connection, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
37998 Sep 22 23:15:06.053 TRCE incoming request, uri: /extent/174/files, method: GET, req_id: 6e6a229c-fab3-40f0-91f7-1ab317ea06e7, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
37999 Sep 22 23:15:06.053 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/174/files, method: GET, req_id: 6e6a229c-fab3-40f0-91f7-1ab317ea06e7, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
38000 Sep 22 23:15:06.053 INFO eid:174 Found repair files: ["0AE", "0AE.db"]
38001 Sep 22 23:15:06.054 TRCE incoming request, uri: /newextent/174/data, method: GET, req_id: 580c52a5-d683-4da7-bb69-9e5ea0287288, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
38002 Sep 22 23:15:06.054 INFO request completed, latency_us: 328, response_code: 200, uri: /newextent/174/data, method: GET, req_id: 580c52a5-d683-4da7-bb69-9e5ea0287288, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
38003 Sep 22 23:15:06.059 TRCE incoming request, uri: /newextent/174/db, method: GET, req_id: 558bc2b1-2afa-4a95-8f69-556a9d2929c4, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
38004 Sep 22 23:15:06.059 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/174/db, method: GET, req_id: 558bc2b1-2afa-4a95-8f69-556a9d2929c4, remote_addr: 127.0.0.1:54826, local_addr: 127.0.0.1:46213, task: repair
38005 Sep 22 23:15:06.061 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AE.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AE.replace"
38006 Sep 22 23:15:06.061 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38007 Sep 22 23:15:06.061 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AE.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38008 Sep 22 23:15:06.062 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AE"
38009 Sep 22 23:15:06.062 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AE.db"
38010 Sep 22 23:15:06.062 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38011 Sep 22 23:15:06.062 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AE.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AE.completed"
38012 Sep 22 23:15:06.062 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38013 Sep 22 23:15:06.062 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38014 Sep 22 23:15:06.062 DEBG [0] It's time to notify for 122
38015 Sep 22 23:15:06.062 INFO Completion from [0] id:122 status:true
38016 Sep 22 23:15:06.062 INFO [123/752] Repair commands completed
38017 Sep 22 23:15:06.062 INFO Pop front: ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }, state: ClientData([New, New, New]) }
38018 Sep 22 23:15:06.062 INFO Sent repair work, now wait for resp
38019 Sep 22 23:15:06.062 INFO [0] received reconcile message
38020 Sep 22 23:15:06.062 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }, state: ClientData([InProgress, New, New]) }, : downstairs
38021 Sep 22 23:15:06.062 INFO [0] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }
38022 Sep 22 23:15:06.063 INFO [1] received reconcile message
38023 Sep 22 23:15:06.063 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38024 Sep 22 23:15:06.063 INFO [1] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }
38025 Sep 22 23:15:06.063 INFO [2] received reconcile message
38026 Sep 22 23:15:06.063 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(123), op: ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38027 Sep 22 23:15:06.063 INFO [2] client ExtentReopen { repair_id: ReconciliationId(123), extent_id: 174 }
38028 Sep 22 23:15:06.063 DEBG 123 Reopen extent 174
38029 Sep 22 23:15:06.064 DEBG 123 Reopen extent 174
38030 Sep 22 23:15:06.064 DEBG 123 Reopen extent 174
38031 Sep 22 23:15:06.065 DEBG [2] It's time to notify for 123
38032 Sep 22 23:15:06.065 INFO Completion from [2] id:123 status:true
38033 Sep 22 23:15:06.065 INFO [124/752] Repair commands completed
38034 Sep 22 23:15:06.065 INFO Pop front: ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38035 Sep 22 23:15:06.065 INFO Sent repair work, now wait for resp
38036 Sep 22 23:15:06.065 INFO [0] received reconcile message
38037 Sep 22 23:15:06.065 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38038 Sep 22 23:15:06.065 INFO [0] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38039 Sep 22 23:15:06.065 INFO [1] received reconcile message
38040 Sep 22 23:15:06.065 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38041 Sep 22 23:15:06.065 INFO [1] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38042 Sep 22 23:15:06.065 INFO [2] received reconcile message
38043 Sep 22 23:15:06.065 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(124), op: ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38044 Sep 22 23:15:06.065 INFO [2] client ExtentFlush { repair_id: ReconciliationId(124), extent_id: 158, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38045 Sep 22 23:15:06.065 DEBG 124 Flush extent 158 with f:2 g:2
38046 Sep 22 23:15:06.065 DEBG Flush just extent 158 with f:2 and g:2
38047 Sep 22 23:15:06.065 DEBG [1] It's time to notify for 124
38048 Sep 22 23:15:06.065 INFO Completion from [1] id:124 status:true
38049 Sep 22 23:15:06.065 INFO [125/752] Repair commands completed
38050 Sep 22 23:15:06.065 INFO Pop front: ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }, state: ClientData([New, New, New]) }
38051 Sep 22 23:15:06.065 INFO Sent repair work, now wait for resp
38052 Sep 22 23:15:06.065 INFO [0] received reconcile message
38053 Sep 22 23:15:06.065 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }, state: ClientData([InProgress, New, New]) }, : downstairs
38054 Sep 22 23:15:06.065 INFO [0] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }
38055 Sep 22 23:15:06.065 INFO [1] received reconcile message
38056 Sep 22 23:15:06.065 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38057 Sep 22 23:15:06.065 INFO [1] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }
38058 Sep 22 23:15:06.065 INFO [2] received reconcile message
38059 Sep 22 23:15:06.065 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(125), op: ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38060 Sep 22 23:15:06.065 INFO [2] client ExtentClose { repair_id: ReconciliationId(125), extent_id: 158 }
38061 Sep 22 23:15:06.066 DEBG 125 Close extent 158
38062 Sep 22 23:15:06.066 DEBG 125 Close extent 158
38063 Sep 22 23:15:06.066 DEBG 125 Close extent 158
38064 Sep 22 23:15:06.067 DEBG [2] It's time to notify for 125
38065 Sep 22 23:15:06.067 INFO Completion from [2] id:125 status:true
38066 Sep 22 23:15:06.067 INFO [126/752] Repair commands completed
38067 Sep 22 23:15:06.067 INFO Pop front: ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38068 Sep 22 23:15:06.067 INFO Sent repair work, now wait for resp
38069 Sep 22 23:15:06.067 INFO [0] received reconcile message
38070 Sep 22 23:15:06.067 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38071 Sep 22 23:15:06.067 INFO [0] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38072 Sep 22 23:15:06.067 INFO [0] Sending repair request ReconciliationId(126)
38073 Sep 22 23:15:06.067 INFO [1] received reconcile message
38074 Sep 22 23:15:06.067 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38075 Sep 22 23:15:06.067 INFO [1] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38076 Sep 22 23:15:06.067 INFO [1] No action required ReconciliationId(126)
38077 Sep 22 23:15:06.067 INFO [2] received reconcile message
38078 Sep 22 23:15:06.067 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(126), op: ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38079 Sep 22 23:15:06.067 INFO [2] client ExtentRepair { repair_id: ReconciliationId(126), extent_id: 158, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38080 Sep 22 23:15:06.067 INFO [2] No action required ReconciliationId(126)
38081 Sep 22 23:15:06.067 DEBG 126 Repair extent 158 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38082 Sep 22 23:15:06.067 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09E.copy"
38083 Sep 22 23:15:06.130 INFO accepted connection, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38084 Sep 22 23:15:06.131 TRCE incoming request, uri: /extent/158/files, method: GET, req_id: 07473e78-377a-4306-8408-2a80e40ed040, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38085 Sep 22 23:15:06.131 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/158/files, method: GET, req_id: 07473e78-377a-4306-8408-2a80e40ed040, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38086 Sep 22 23:15:06.131 INFO eid:158 Found repair files: ["09E", "09E.db"]
38087 Sep 22 23:15:06.131 TRCE incoming request, uri: /newextent/158/data, method: GET, req_id: 8083baac-42c2-4a1a-8ac5-b5458e5d21d4, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38088 Sep 22 23:15:06.132 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/158/data, method: GET, req_id: 8083baac-42c2-4a1a-8ac5-b5458e5d21d4, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38089 Sep 22 23:15:06.137 TRCE incoming request, uri: /newextent/158/db, method: GET, req_id: 387bcfc2-7c61-4172-86b9-b2f06a5801ab, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38090 Sep 22 23:15:06.137 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/158/db, method: GET, req_id: 387bcfc2-7c61-4172-86b9-b2f06a5801ab, remote_addr: 127.0.0.1:43707, local_addr: 127.0.0.1:46213, task: repair
38091 Sep 22 23:15:06.138 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09E.replace"
38092 Sep 22 23:15:06.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38093 Sep 22 23:15:06.139 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38094 Sep 22 23:15:06.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09E"
38095 Sep 22 23:15:06.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09E.db"
38096 Sep 22 23:15:06.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38097 Sep 22 23:15:06.139 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09E.completed"
38098 Sep 22 23:15:06.139 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38099 Sep 22 23:15:06.140 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38100 Sep 22 23:15:06.140 DEBG [0] It's time to notify for 126
38101 Sep 22 23:15:06.140 INFO Completion from [0] id:126 status:true
38102 Sep 22 23:15:06.140 INFO [127/752] Repair commands completed
38103 Sep 22 23:15:06.140 INFO Pop front: ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }, state: ClientData([New, New, New]) }
38104 Sep 22 23:15:06.140 INFO Sent repair work, now wait for resp
38105 Sep 22 23:15:06.140 INFO [0] received reconcile message
38106 Sep 22 23:15:06.140 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }, state: ClientData([InProgress, New, New]) }, : downstairs
38107 Sep 22 23:15:06.140 INFO [0] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }
38108 Sep 22 23:15:06.140 INFO [1] received reconcile message
38109 Sep 22 23:15:06.140 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38110 Sep 22 23:15:06.140 INFO [1] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }
38111 Sep 22 23:15:06.140 INFO [2] received reconcile message
38112 Sep 22 23:15:06.140 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(127), op: ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38113 Sep 22 23:15:06.140 INFO [2] client ExtentReopen { repair_id: ReconciliationId(127), extent_id: 158 }
38114 Sep 22 23:15:06.140 DEBG 127 Reopen extent 158
38115 Sep 22 23:15:06.141 DEBG 127 Reopen extent 158
38116 Sep 22 23:15:06.141 DEBG 127 Reopen extent 158
38117 Sep 22 23:15:06.142 DEBG [2] It's time to notify for 127
38118 Sep 22 23:15:06.142 INFO Completion from [2] id:127 status:true
38119 Sep 22 23:15:06.142 INFO [128/752] Repair commands completed
38120 Sep 22 23:15:06.142 INFO Pop front: ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38121 Sep 22 23:15:06.142 INFO Sent repair work, now wait for resp
38122 Sep 22 23:15:06.142 INFO [0] received reconcile message
38123 Sep 22 23:15:06.142 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38124 Sep 22 23:15:06.142 INFO [0] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38125 Sep 22 23:15:06.142 INFO [1] received reconcile message
38126 Sep 22 23:15:06.142 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38127 Sep 22 23:15:06.142 INFO [1] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38128 Sep 22 23:15:06.142 INFO [2] received reconcile message
38129 Sep 22 23:15:06.142 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(128), op: ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38130 Sep 22 23:15:06.142 INFO [2] client ExtentFlush { repair_id: ReconciliationId(128), extent_id: 106, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38131 Sep 22 23:15:06.142 DEBG 128 Flush extent 106 with f:2 g:2
38132 Sep 22 23:15:06.142 DEBG Flush just extent 106 with f:2 and g:2
38133 Sep 22 23:15:06.143 DEBG [1] It's time to notify for 128
38134 Sep 22 23:15:06.143 INFO Completion from [1] id:128 status:true
38135 Sep 22 23:15:06.143 INFO [129/752] Repair commands completed
38136 Sep 22 23:15:06.143 INFO Pop front: ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }, state: ClientData([New, New, New]) }
38137 Sep 22 23:15:06.143 INFO Sent repair work, now wait for resp
38138 Sep 22 23:15:06.143 INFO [0] received reconcile message
38139 Sep 22 23:15:06.143 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }, state: ClientData([InProgress, New, New]) }, : downstairs
38140 Sep 22 23:15:06.143 INFO [0] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }
38141 Sep 22 23:15:06.143 INFO [1] received reconcile message
38142 Sep 22 23:15:06.143 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38143 Sep 22 23:15:06.143 INFO [1] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }
38144 Sep 22 23:15:06.143 INFO [2] received reconcile message
38145 Sep 22 23:15:06.143 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(129), op: ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38146 Sep 22 23:15:06.143 INFO [2] client ExtentClose { repair_id: ReconciliationId(129), extent_id: 106 }
38147 Sep 22 23:15:06.143 DEBG 129 Close extent 106
38148 Sep 22 23:15:06.143 DEBG 129 Close extent 106
38149 Sep 22 23:15:06.144 DEBG 129 Close extent 106
38150 Sep 22 23:15:06.144 DEBG [2] It's time to notify for 129
38151 Sep 22 23:15:06.144 INFO Completion from [2] id:129 status:true
38152 Sep 22 23:15:06.144 INFO [130/752] Repair commands completed
38153 Sep 22 23:15:06.144 INFO Pop front: ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38154 Sep 22 23:15:06.144 INFO Sent repair work, now wait for resp
38155 Sep 22 23:15:06.144 INFO [0] received reconcile message
38156 Sep 22 23:15:06.144 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38157 Sep 22 23:15:06.144 INFO [0] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38158 Sep 22 23:15:06.144 INFO [0] Sending repair request ReconciliationId(130)
38159 Sep 22 23:15:06.144 INFO [1] received reconcile message
38160 Sep 22 23:15:06.144 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38161 Sep 22 23:15:06.144 INFO [1] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38162 Sep 22 23:15:06.144 INFO [1] No action required ReconciliationId(130)
38163 Sep 22 23:15:06.144 INFO [2] received reconcile message
38164 Sep 22 23:15:06.144 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(130), op: ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38165 Sep 22 23:15:06.144 INFO [2] client ExtentRepair { repair_id: ReconciliationId(130), extent_id: 106, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38166 Sep 22 23:15:06.144 INFO [2] No action required ReconciliationId(130)
38167 Sep 22 23:15:06.144 DEBG 130 Repair extent 106 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38168 Sep 22 23:15:06.144 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06A.copy"
38169 Sep 22 23:15:06.207 INFO accepted connection, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38170 Sep 22 23:15:06.207 TRCE incoming request, uri: /extent/106/files, method: GET, req_id: bae3674c-eeb2-425c-b529-928a3b001ebe, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38171 Sep 22 23:15:06.207 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/106/files, method: GET, req_id: bae3674c-eeb2-425c-b529-928a3b001ebe, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38172 Sep 22 23:15:06.207 INFO eid:106 Found repair files: ["06A", "06A.db"]
38173 Sep 22 23:15:06.208 TRCE incoming request, uri: /newextent/106/data, method: GET, req_id: 677b09ac-53df-4224-bb3c-e4fc52115168, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38174 Sep 22 23:15:06.208 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/106/data, method: GET, req_id: 677b09ac-53df-4224-bb3c-e4fc52115168, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38175 Sep 22 23:15:06.213 TRCE incoming request, uri: /newextent/106/db, method: GET, req_id: ed513e4f-50d4-4754-9851-690fc21c323f, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38176 Sep 22 23:15:06.213 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/106/db, method: GET, req_id: ed513e4f-50d4-4754-9851-690fc21c323f, remote_addr: 127.0.0.1:57444, local_addr: 127.0.0.1:46213, task: repair
38177 Sep 22 23:15:06.214 DEBG up_ds_listen was notified
38178 Sep 22 23:15:06.214 DEBG up_ds_listen process 1075
38179 Sep 22 23:15:06.214 DEBG [A] ack job 1075:76, : downstairs
38180 Sep 22 23:15:06.214 DEBG up_ds_listen checked 1 jobs, back to waiting
38181 Sep 22 23:15:06.214 WARN returning error on flush!
38182 Sep 22 23:15:06.214 DEBG Flush :1075 extent_limit None deps:[JobId(1074), JobId(1073)] res:false f:27 g:1
38183 Sep 22 23:15:06.214 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06A.replace"
38184 Sep 22 23:15:06.214 INFO [lossy] skipping 1076
38185 Sep 22 23:15:06.214 INFO [lossy] skipping 1075
38186 Sep 22 23:15:06.214 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38187 Sep 22 23:15:06.214 DEBG Flush :1075 extent_limit None deps:[JobId(1074), JobId(1073)] res:true f:27 g:1
38188 Sep 22 23:15:06.215 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38189 Sep 22 23:15:06.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06A"
38190 Sep 22 23:15:06.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06A.db"
38191 Sep 22 23:15:06.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38192 Sep 22 23:15:06.215 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06A.completed"
38193 Sep 22 23:15:06.215 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38194 Sep 22 23:15:06.216 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38195 Sep 22 23:15:06.216 DEBG [0] It's time to notify for 130
38196 Sep 22 23:15:06.216 INFO Completion from [0] id:130 status:true
38197 Sep 22 23:15:06.216 INFO [131/752] Repair commands completed
38198 Sep 22 23:15:06.216 INFO Pop front: ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }, state: ClientData([New, New, New]) }
38199 Sep 22 23:15:06.216 INFO Sent repair work, now wait for resp
38200 Sep 22 23:15:06.216 INFO [0] received reconcile message
38201 Sep 22 23:15:06.216 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }, state: ClientData([InProgress, New, New]) }, : downstairs
38202 Sep 22 23:15:06.216 INFO [0] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }
38203 Sep 22 23:15:06.216 INFO [1] received reconcile message
38204 Sep 22 23:15:06.216 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38205 Sep 22 23:15:06.216 INFO [1] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }
38206 Sep 22 23:15:06.216 INFO [2] received reconcile message
38207 Sep 22 23:15:06.216 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(131), op: ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38208 Sep 22 23:15:06.216 INFO [2] client ExtentReopen { repair_id: ReconciliationId(131), extent_id: 106 }
38209 Sep 22 23:15:06.216 DEBG 131 Reopen extent 106
38210 Sep 22 23:15:06.217 DEBG 131 Reopen extent 106
38211 Sep 22 23:15:06.217 DEBG 131 Reopen extent 106
38212 Sep 22 23:15:06.218 DEBG [2] It's time to notify for 131
38213 Sep 22 23:15:06.218 INFO Completion from [2] id:131 status:true
38214 Sep 22 23:15:06.218 INFO [132/752] Repair commands completed
38215 Sep 22 23:15:06.218 INFO Pop front: ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38216 Sep 22 23:15:06.218 INFO Sent repair work, now wait for resp
38217 Sep 22 23:15:06.218 INFO [0] received reconcile message
38218 Sep 22 23:15:06.218 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38219 Sep 22 23:15:06.218 INFO [0] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38220 Sep 22 23:15:06.218 INFO [1] received reconcile message
38221 Sep 22 23:15:06.218 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38222 Sep 22 23:15:06.218 INFO [1] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38223 Sep 22 23:15:06.218 INFO [2] received reconcile message
38224 Sep 22 23:15:06.218 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(132), op: ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38225 Sep 22 23:15:06.218 INFO [2] client ExtentFlush { repair_id: ReconciliationId(132), extent_id: 103, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38226 Sep 22 23:15:06.218 DEBG 132 Flush extent 103 with f:2 g:2
38227 Sep 22 23:15:06.218 DEBG Flush just extent 103 with f:2 and g:2
38228 Sep 22 23:15:06.218 DEBG [1] It's time to notify for 132
38229 Sep 22 23:15:06.219 INFO Completion from [1] id:132 status:true
38230 Sep 22 23:15:06.219 INFO [133/752] Repair commands completed
38231 Sep 22 23:15:06.219 INFO Pop front: ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }, state: ClientData([New, New, New]) }
38232 Sep 22 23:15:06.219 INFO Sent repair work, now wait for resp
38233 Sep 22 23:15:06.219 INFO [0] received reconcile message
38234 Sep 22 23:15:06.219 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }, state: ClientData([InProgress, New, New]) }, : downstairs
38235 Sep 22 23:15:06.219 INFO [0] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }
38236 Sep 22 23:15:06.219 INFO [1] received reconcile message
38237 Sep 22 23:15:06.219 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38238 Sep 22 23:15:06.219 INFO [1] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }
38239 Sep 22 23:15:06.219 INFO [2] received reconcile message
38240 Sep 22 23:15:06.219 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(133), op: ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38241 Sep 22 23:15:06.219 INFO [2] client ExtentClose { repair_id: ReconciliationId(133), extent_id: 103 }
38242 Sep 22 23:15:06.219 DEBG 133 Close extent 103
38243 Sep 22 23:15:06.219 DEBG 133 Close extent 103
38244 Sep 22 23:15:06.219 DEBG 133 Close extent 103
38245 Sep 22 23:15:06.220 DEBG [2] It's time to notify for 133
38246 Sep 22 23:15:06.220 INFO Completion from [2] id:133 status:true
38247 Sep 22 23:15:06.220 INFO [134/752] Repair commands completed
38248 Sep 22 23:15:06.220 INFO Pop front: ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38249 Sep 22 23:15:06.220 INFO Sent repair work, now wait for resp
38250 Sep 22 23:15:06.220 INFO [0] received reconcile message
38251 Sep 22 23:15:06.220 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38252 Sep 22 23:15:06.220 INFO [0] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38253 Sep 22 23:15:06.220 INFO [0] Sending repair request ReconciliationId(134)
38254 Sep 22 23:15:06.220 INFO [1] received reconcile message
38255 Sep 22 23:15:06.220 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38256 Sep 22 23:15:06.220 INFO [1] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38257 Sep 22 23:15:06.220 INFO [1] No action required ReconciliationId(134)
38258 Sep 22 23:15:06.220 INFO [2] received reconcile message
38259 Sep 22 23:15:06.220 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(134), op: ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38260 Sep 22 23:15:06.220 INFO [2] client ExtentRepair { repair_id: ReconciliationId(134), extent_id: 103, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38261 Sep 22 23:15:06.220 INFO [2] No action required ReconciliationId(134)
38262 Sep 22 23:15:06.220 DEBG Read :1076 deps:[JobId(1075)] res:true
38263 Sep 22 23:15:06.220 DEBG 134 Repair extent 103 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38264 Sep 22 23:15:06.220 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/067.copy"
38265 Sep 22 23:15:06.242 DEBG IO Flush 1077 has deps [JobId(1076), JobId(1075)]
38266 Sep 22 23:15:06.242 ERRO [0] job id 1075 saw error GenericError("test error")
38267 Sep 22 23:15:06.242 DEBG [rc] retire 1075 clears [JobId(1074), JobId(1075)], : downstairs
38268 Sep 22 23:15:06.245 DEBG Flush :1077 extent_limit None deps:[JobId(1076), JobId(1075)] res:true f:28 g:1
38269 Sep 22 23:15:06.245 INFO [lossy] sleeping 1 second
38270 Sep 22 23:15:06.284 INFO accepted connection, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38271 Sep 22 23:15:06.285 TRCE incoming request, uri: /extent/103/files, method: GET, req_id: 53d79daa-7aa2-4f32-bf22-27f30c1d5c40, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38272 Sep 22 23:15:06.285 INFO request completed, latency_us: 217, response_code: 200, uri: /extent/103/files, method: GET, req_id: 53d79daa-7aa2-4f32-bf22-27f30c1d5c40, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38273 Sep 22 23:15:06.285 INFO eid:103 Found repair files: ["067", "067.db"]
38274 Sep 22 23:15:06.285 TRCE incoming request, uri: /newextent/103/data, method: GET, req_id: 71fa9259-f306-49a6-b849-12df81cf107c, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38275 Sep 22 23:15:06.286 INFO request completed, latency_us: 343, response_code: 200, uri: /newextent/103/data, method: GET, req_id: 71fa9259-f306-49a6-b849-12df81cf107c, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38276 Sep 22 23:15:06.291 TRCE incoming request, uri: /newextent/103/db, method: GET, req_id: 357afc85-6eff-4b88-a925-f96c5501af67, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38277 Sep 22 23:15:06.291 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/103/db, method: GET, req_id: 357afc85-6eff-4b88-a925-f96c5501af67, remote_addr: 127.0.0.1:44956, local_addr: 127.0.0.1:46213, task: repair
38278 Sep 22 23:15:06.292 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/067.copy" to "/tmp/downstairs-vrx8aK6L/00/000/067.replace"
38279 Sep 22 23:15:06.292 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38280 Sep 22 23:15:06.293 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/067.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38281 Sep 22 23:15:06.293 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/067"
38282 Sep 22 23:15:06.293 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/067.db"
38283 Sep 22 23:15:06.293 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38284 Sep 22 23:15:06.293 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/067.replace" to "/tmp/downstairs-vrx8aK6L/00/000/067.completed"
38285 Sep 22 23:15:06.294 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38286 Sep 22 23:15:06.294 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38287 Sep 22 23:15:06.294 DEBG [0] It's time to notify for 134
38288 Sep 22 23:15:06.294 INFO Completion from [0] id:134 status:true
38289 Sep 22 23:15:06.294 INFO [135/752] Repair commands completed
38290 Sep 22 23:15:06.294 INFO Pop front: ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }, state: ClientData([New, New, New]) }
38291 Sep 22 23:15:06.294 INFO Sent repair work, now wait for resp
38292 Sep 22 23:15:06.294 INFO [0] received reconcile message
38293 Sep 22 23:15:06.294 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }, state: ClientData([InProgress, New, New]) }, : downstairs
38294 Sep 22 23:15:06.294 INFO [0] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }
38295 Sep 22 23:15:06.294 INFO [1] received reconcile message
38296 Sep 22 23:15:06.294 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38297 Sep 22 23:15:06.294 INFO [1] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }
38298 Sep 22 23:15:06.294 INFO [2] received reconcile message
38299 Sep 22 23:15:06.294 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(135), op: ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38300 Sep 22 23:15:06.294 INFO [2] client ExtentReopen { repair_id: ReconciliationId(135), extent_id: 103 }
38301 Sep 22 23:15:06.294 DEBG 135 Reopen extent 103
38302 Sep 22 23:15:06.295 DEBG 135 Reopen extent 103
38303 Sep 22 23:15:06.296 DEBG 135 Reopen extent 103
38304 Sep 22 23:15:06.296 DEBG [2] It's time to notify for 135
38305 Sep 22 23:15:06.296 INFO Completion from [2] id:135 status:true
38306 Sep 22 23:15:06.296 INFO [136/752] Repair commands completed
38307 Sep 22 23:15:06.296 INFO Pop front: ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38308 Sep 22 23:15:06.296 INFO Sent repair work, now wait for resp
38309 Sep 22 23:15:06.296 INFO [0] received reconcile message
38310 Sep 22 23:15:06.296 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38311 Sep 22 23:15:06.296 INFO [0] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38312 Sep 22 23:15:06.296 INFO [1] received reconcile message
38313 Sep 22 23:15:06.296 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38314 Sep 22 23:15:06.296 INFO [1] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38315 Sep 22 23:15:06.296 INFO [2] received reconcile message
38316 Sep 22 23:15:06.296 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(136), op: ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38317 Sep 22 23:15:06.296 INFO [2] client ExtentFlush { repair_id: ReconciliationId(136), extent_id: 80, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38318 Sep 22 23:15:06.297 DEBG 136 Flush extent 80 with f:2 g:2
38319 Sep 22 23:15:06.297 DEBG Flush just extent 80 with f:2 and g:2
38320 Sep 22 23:15:06.297 DEBG [1] It's time to notify for 136
38321 Sep 22 23:15:06.297 INFO Completion from [1] id:136 status:true
38322 Sep 22 23:15:06.297 INFO [137/752] Repair commands completed
38323 Sep 22 23:15:06.297 INFO Pop front: ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }, state: ClientData([New, New, New]) }
38324 Sep 22 23:15:06.297 INFO Sent repair work, now wait for resp
38325 Sep 22 23:15:06.297 INFO [0] received reconcile message
38326 Sep 22 23:15:06.297 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }, state: ClientData([InProgress, New, New]) }, : downstairs
38327 Sep 22 23:15:06.297 INFO [0] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }
38328 Sep 22 23:15:06.297 INFO [1] received reconcile message
38329 Sep 22 23:15:06.297 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38330 Sep 22 23:15:06.297 INFO [1] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }
38331 Sep 22 23:15:06.297 INFO [2] received reconcile message
38332 Sep 22 23:15:06.297 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(137), op: ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38333 Sep 22 23:15:06.297 INFO [2] client ExtentClose { repair_id: ReconciliationId(137), extent_id: 80 }
38334 Sep 22 23:15:06.297 DEBG 137 Close extent 80
38335 Sep 22 23:15:06.297 DEBG 137 Close extent 80
38336 Sep 22 23:15:06.298 DEBG 137 Close extent 80
38337 Sep 22 23:15:06.298 DEBG [2] It's time to notify for 137
38338 Sep 22 23:15:06.298 INFO Completion from [2] id:137 status:true
38339 Sep 22 23:15:06.298 INFO [138/752] Repair commands completed
38340 Sep 22 23:15:06.298 INFO Pop front: ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38341 Sep 22 23:15:06.298 INFO Sent repair work, now wait for resp
38342 Sep 22 23:15:06.298 INFO [0] received reconcile message
38343 Sep 22 23:15:06.298 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38344 Sep 22 23:15:06.298 INFO [0] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38345 Sep 22 23:15:06.298 INFO [0] Sending repair request ReconciliationId(138)
38346 Sep 22 23:15:06.298 INFO [1] received reconcile message
38347 Sep 22 23:15:06.298 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38348 Sep 22 23:15:06.298 INFO [1] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38349 Sep 22 23:15:06.298 INFO [1] No action required ReconciliationId(138)
38350 Sep 22 23:15:06.298 INFO [2] received reconcile message
38351 Sep 22 23:15:06.298 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(138), op: ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38352 Sep 22 23:15:06.298 INFO [2] client ExtentRepair { repair_id: ReconciliationId(138), extent_id: 80, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38353 Sep 22 23:15:06.298 INFO [2] No action required ReconciliationId(138)
38354 Sep 22 23:15:06.299 DEBG 138 Repair extent 80 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38355 Sep 22 23:15:06.299 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/050.copy"
38356 Sep 22 23:15:06.362 INFO accepted connection, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38357 Sep 22 23:15:06.362 TRCE incoming request, uri: /extent/80/files, method: GET, req_id: dc886983-f442-477d-b9a0-0f17f9de4aa5, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38358 Sep 22 23:15:06.362 INFO request completed, latency_us: 220, response_code: 200, uri: /extent/80/files, method: GET, req_id: dc886983-f442-477d-b9a0-0f17f9de4aa5, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38359 Sep 22 23:15:06.363 INFO eid:80 Found repair files: ["050", "050.db"]
38360 Sep 22 23:15:06.363 TRCE incoming request, uri: /newextent/80/data, method: GET, req_id: 2d573c60-d451-4a48-97bf-43e41496f446, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38361 Sep 22 23:15:06.363 INFO request completed, latency_us: 259, response_code: 200, uri: /newextent/80/data, method: GET, req_id: 2d573c60-d451-4a48-97bf-43e41496f446, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38362 Sep 22 23:15:06.368 TRCE incoming request, uri: /newextent/80/db, method: GET, req_id: a12e1f31-7297-458f-9bdd-6f9fb6d7b903, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38363 Sep 22 23:15:06.368 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/80/db, method: GET, req_id: a12e1f31-7297-458f-9bdd-6f9fb6d7b903, remote_addr: 127.0.0.1:45732, local_addr: 127.0.0.1:46213, task: repair
38364 Sep 22 23:15:06.369 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/050.copy" to "/tmp/downstairs-vrx8aK6L/00/000/050.replace"
38365 Sep 22 23:15:06.369 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38366 Sep 22 23:15:06.370 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/050.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38367 Sep 22 23:15:06.371 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/050"
38368 Sep 22 23:15:06.371 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/050.db"
38369 Sep 22 23:15:06.371 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38370 Sep 22 23:15:06.371 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/050.replace" to "/tmp/downstairs-vrx8aK6L/00/000/050.completed"
38371 Sep 22 23:15:06.371 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38372 Sep 22 23:15:06.371 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38373 Sep 22 23:15:06.371 DEBG [0] It's time to notify for 138
38374 Sep 22 23:15:06.371 INFO Completion from [0] id:138 status:true
38375 Sep 22 23:15:06.371 INFO [139/752] Repair commands completed
38376 Sep 22 23:15:06.371 INFO Pop front: ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }, state: ClientData([New, New, New]) }
38377 Sep 22 23:15:06.371 INFO Sent repair work, now wait for resp
38378 Sep 22 23:15:06.371 INFO [0] received reconcile message
38379 Sep 22 23:15:06.371 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }, state: ClientData([InProgress, New, New]) }, : downstairs
38380 Sep 22 23:15:06.371 INFO [0] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }
38381 Sep 22 23:15:06.371 INFO [1] received reconcile message
38382 Sep 22 23:15:06.371 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38383 Sep 22 23:15:06.371 INFO [1] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }
38384 Sep 22 23:15:06.371 INFO [2] received reconcile message
38385 Sep 22 23:15:06.371 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(139), op: ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38386 Sep 22 23:15:06.371 INFO [2] client ExtentReopen { repair_id: ReconciliationId(139), extent_id: 80 }
38387 Sep 22 23:15:06.372 DEBG 139 Reopen extent 80
38388 Sep 22 23:15:06.372 DEBG 139 Reopen extent 80
38389 Sep 22 23:15:06.373 DEBG 139 Reopen extent 80
38390 Sep 22 23:15:06.373 DEBG [2] It's time to notify for 139
38391 Sep 22 23:15:06.373 INFO Completion from [2] id:139 status:true
38392 Sep 22 23:15:06.373 INFO [140/752] Repair commands completed
38393 Sep 22 23:15:06.373 INFO Pop front: ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38394 Sep 22 23:15:06.373 INFO Sent repair work, now wait for resp
38395 Sep 22 23:15:06.374 INFO [0] received reconcile message
38396 Sep 22 23:15:06.374 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38397 Sep 22 23:15:06.374 INFO [0] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38398 Sep 22 23:15:06.374 INFO [1] received reconcile message
38399 Sep 22 23:15:06.374 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38400 Sep 22 23:15:06.374 INFO [1] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38401 Sep 22 23:15:06.374 INFO [2] received reconcile message
38402 Sep 22 23:15:06.374 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(140), op: ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38403 Sep 22 23:15:06.374 INFO [2] client ExtentFlush { repair_id: ReconciliationId(140), extent_id: 177, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38404 Sep 22 23:15:06.374 DEBG 140 Flush extent 177 with f:2 g:2
38405 Sep 22 23:15:06.374 DEBG Flush just extent 177 with f:2 and g:2
38406 Sep 22 23:15:06.374 DEBG [1] It's time to notify for 140
38407 Sep 22 23:15:06.374 INFO Completion from [1] id:140 status:true
38408 Sep 22 23:15:06.374 INFO [141/752] Repair commands completed
38409 Sep 22 23:15:06.374 INFO Pop front: ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }, state: ClientData([New, New, New]) }
38410 Sep 22 23:15:06.374 INFO Sent repair work, now wait for resp
38411 Sep 22 23:15:06.374 INFO [0] received reconcile message
38412 Sep 22 23:15:06.374 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }, state: ClientData([InProgress, New, New]) }, : downstairs
38413 Sep 22 23:15:06.374 INFO [0] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }
38414 Sep 22 23:15:06.374 INFO [1] received reconcile message
38415 Sep 22 23:15:06.374 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38416 Sep 22 23:15:06.374 INFO [1] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }
38417 Sep 22 23:15:06.374 INFO [2] received reconcile message
38418 Sep 22 23:15:06.374 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(141), op: ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38419 Sep 22 23:15:06.374 INFO [2] client ExtentClose { repair_id: ReconciliationId(141), extent_id: 177 }
38420 Sep 22 23:15:06.374 DEBG 141 Close extent 177
38421 Sep 22 23:15:06.375 DEBG 141 Close extent 177
38422 Sep 22 23:15:06.375 DEBG 141 Close extent 177
38423 Sep 22 23:15:06.375 DEBG [2] It's time to notify for 141
38424 Sep 22 23:15:06.375 INFO Completion from [2] id:141 status:true
38425 Sep 22 23:15:06.375 INFO [142/752] Repair commands completed
38426 Sep 22 23:15:06.375 INFO Pop front: ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38427 Sep 22 23:15:06.375 INFO Sent repair work, now wait for resp
38428 Sep 22 23:15:06.375 INFO [0] received reconcile message
38429 Sep 22 23:15:06.375 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38430 Sep 22 23:15:06.376 INFO [0] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38431 Sep 22 23:15:06.376 INFO [0] Sending repair request ReconciliationId(142)
38432 Sep 22 23:15:06.376 INFO [1] received reconcile message
38433 Sep 22 23:15:06.376 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38434 Sep 22 23:15:06.376 INFO [1] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38435 Sep 22 23:15:06.376 INFO [1] No action required ReconciliationId(142)
38436 Sep 22 23:15:06.376 INFO [2] received reconcile message
38437 Sep 22 23:15:06.376 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(142), op: ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38438 Sep 22 23:15:06.376 INFO [2] client ExtentRepair { repair_id: ReconciliationId(142), extent_id: 177, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38439 Sep 22 23:15:06.376 INFO [2] No action required ReconciliationId(142)
38440 Sep 22 23:15:06.376 DEBG 142 Repair extent 177 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38441 Sep 22 23:15:06.376 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B1.copy"
38442 Sep 22 23:15:06.439 INFO accepted connection, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38443 Sep 22 23:15:06.439 TRCE incoming request, uri: /extent/177/files, method: GET, req_id: fa4f6e7b-68a9-47f3-b000-be14676a3687, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38444 Sep 22 23:15:06.440 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/177/files, method: GET, req_id: fa4f6e7b-68a9-47f3-b000-be14676a3687, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38445 Sep 22 23:15:06.440 INFO eid:177 Found repair files: ["0B1", "0B1.db"]
38446 Sep 22 23:15:06.440 TRCE incoming request, uri: /newextent/177/data, method: GET, req_id: 1563ffb4-af56-4ec4-873a-73496fb38fc6, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38447 Sep 22 23:15:06.440 INFO request completed, latency_us: 253, response_code: 200, uri: /newextent/177/data, method: GET, req_id: 1563ffb4-af56-4ec4-873a-73496fb38fc6, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38448 Sep 22 23:15:06.445 TRCE incoming request, uri: /newextent/177/db, method: GET, req_id: b7bfc40c-b8f2-4ec8-aca1-66d056995dde, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38449 Sep 22 23:15:06.445 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/177/db, method: GET, req_id: b7bfc40c-b8f2-4ec8-aca1-66d056995dde, remote_addr: 127.0.0.1:51983, local_addr: 127.0.0.1:46213, task: repair
38450 Sep 22 23:15:06.447 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B1.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B1.replace"
38451 Sep 22 23:15:06.447 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38452 Sep 22 23:15:06.447 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B1.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38453 Sep 22 23:15:06.448 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B1"
38454 Sep 22 23:15:06.448 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B1.db"
38455 Sep 22 23:15:06.448 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38456 Sep 22 23:15:06.448 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B1.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B1.completed"
38457 Sep 22 23:15:06.448 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38458 Sep 22 23:15:06.448 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38459 Sep 22 23:15:06.448 DEBG [0] It's time to notify for 142
38460 Sep 22 23:15:06.448 INFO Completion from [0] id:142 status:true
38461 Sep 22 23:15:06.448 INFO [143/752] Repair commands completed
38462 Sep 22 23:15:06.448 INFO Pop front: ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }, state: ClientData([New, New, New]) }
38463 Sep 22 23:15:06.448 INFO Sent repair work, now wait for resp
38464 Sep 22 23:15:06.448 INFO [0] received reconcile message
38465 Sep 22 23:15:06.448 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }, state: ClientData([InProgress, New, New]) }, : downstairs
38466 Sep 22 23:15:06.448 INFO [0] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }
38467 Sep 22 23:15:06.448 INFO [1] received reconcile message
38468 Sep 22 23:15:06.448 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38469 Sep 22 23:15:06.448 INFO [1] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }
38470 Sep 22 23:15:06.448 INFO [2] received reconcile message
38471 Sep 22 23:15:06.448 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(143), op: ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38472 Sep 22 23:15:06.448 INFO [2] client ExtentReopen { repair_id: ReconciliationId(143), extent_id: 177 }
38473 Sep 22 23:15:06.449 DEBG 143 Reopen extent 177
38474 Sep 22 23:15:06.449 DEBG 143 Reopen extent 177
38475 Sep 22 23:15:06.450 DEBG 143 Reopen extent 177
38476 Sep 22 23:15:06.450 DEBG [2] It's time to notify for 143
38477 Sep 22 23:15:06.450 INFO Completion from [2] id:143 status:true
38478 Sep 22 23:15:06.450 INFO [144/752] Repair commands completed
38479 Sep 22 23:15:06.450 INFO Pop front: ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38480 Sep 22 23:15:06.450 INFO Sent repair work, now wait for resp
38481 Sep 22 23:15:06.450 INFO [0] received reconcile message
38482 Sep 22 23:15:06.450 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38483 Sep 22 23:15:06.450 INFO [0] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38484 Sep 22 23:15:06.450 INFO [1] received reconcile message
38485 Sep 22 23:15:06.450 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38486 Sep 22 23:15:06.450 INFO [1] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38487 Sep 22 23:15:06.451 INFO [2] received reconcile message
38488 Sep 22 23:15:06.451 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(144), op: ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38489 Sep 22 23:15:06.451 INFO [2] client ExtentFlush { repair_id: ReconciliationId(144), extent_id: 84, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38490 Sep 22 23:15:06.451 DEBG 144 Flush extent 84 with f:2 g:2
38491 Sep 22 23:15:06.451 DEBG Flush just extent 84 with f:2 and g:2
38492 Sep 22 23:15:06.451 DEBG [1] It's time to notify for 144
38493 Sep 22 23:15:06.451 INFO Completion from [1] id:144 status:true
38494 Sep 22 23:15:06.451 INFO [145/752] Repair commands completed
38495 Sep 22 23:15:06.451 INFO Pop front: ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }, state: ClientData([New, New, New]) }
38496 Sep 22 23:15:06.451 INFO Sent repair work, now wait for resp
38497 Sep 22 23:15:06.451 INFO [0] received reconcile message
38498 Sep 22 23:15:06.451 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }, state: ClientData([InProgress, New, New]) }, : downstairs
38499 Sep 22 23:15:06.451 INFO [0] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }
38500 Sep 22 23:15:06.451 INFO [1] received reconcile message
38501 Sep 22 23:15:06.451 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38502 Sep 22 23:15:06.451 INFO [1] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }
38503 Sep 22 23:15:06.451 INFO [2] received reconcile message
38504 Sep 22 23:15:06.451 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(145), op: ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38505 Sep 22 23:15:06.451 INFO [2] client ExtentClose { repair_id: ReconciliationId(145), extent_id: 84 }
38506 Sep 22 23:15:06.451 DEBG 145 Close extent 84
38507 Sep 22 23:15:06.451 DEBG 145 Close extent 84
38508 Sep 22 23:15:06.452 DEBG 145 Close extent 84
38509 Sep 22 23:15:06.452 DEBG [2] It's time to notify for 145
38510 Sep 22 23:15:06.452 INFO Completion from [2] id:145 status:true
38511 Sep 22 23:15:06.452 INFO [146/752] Repair commands completed
38512 Sep 22 23:15:06.452 INFO Pop front: ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38513 Sep 22 23:15:06.452 INFO Sent repair work, now wait for resp
38514 Sep 22 23:15:06.452 INFO [0] received reconcile message
38515 Sep 22 23:15:06.452 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38516 Sep 22 23:15:06.452 INFO [0] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38517 Sep 22 23:15:06.452 INFO [0] Sending repair request ReconciliationId(146)
38518 Sep 22 23:15:06.452 INFO [1] received reconcile message
38519 Sep 22 23:15:06.452 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38520 Sep 22 23:15:06.452 INFO [1] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38521 Sep 22 23:15:06.452 INFO [1] No action required ReconciliationId(146)
38522 Sep 22 23:15:06.453 INFO [2] received reconcile message
38523 Sep 22 23:15:06.453 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(146), op: ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38524 Sep 22 23:15:06.453 INFO [2] client ExtentRepair { repair_id: ReconciliationId(146), extent_id: 84, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38525 Sep 22 23:15:06.453 INFO [2] No action required ReconciliationId(146)
38526 Sep 22 23:15:06.453 DEBG 146 Repair extent 84 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38527 Sep 22 23:15:06.453 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/054.copy"
38528 Sep 22 23:15:06.516 INFO accepted connection, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38529 Sep 22 23:15:06.516 TRCE incoming request, uri: /extent/84/files, method: GET, req_id: 52bd89b2-8419-4987-a6d3-095edf180ca8, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38530 Sep 22 23:15:06.517 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/84/files, method: GET, req_id: 52bd89b2-8419-4987-a6d3-095edf180ca8, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38531 Sep 22 23:15:06.517 INFO eid:84 Found repair files: ["054", "054.db"]
38532 Sep 22 23:15:06.517 TRCE incoming request, uri: /newextent/84/data, method: GET, req_id: 11f42127-b083-4337-820e-647f55d9b7fd, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38533 Sep 22 23:15:06.517 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/84/data, method: GET, req_id: 11f42127-b083-4337-820e-647f55d9b7fd, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38534 Sep 22 23:15:06.522 TRCE incoming request, uri: /newextent/84/db, method: GET, req_id: ed78afb8-8c12-4c0f-a465-38275bf30c55, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38535 Sep 22 23:15:06.523 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/84/db, method: GET, req_id: ed78afb8-8c12-4c0f-a465-38275bf30c55, remote_addr: 127.0.0.1:65235, local_addr: 127.0.0.1:46213, task: repair
38536 Sep 22 23:15:06.524 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/054.copy" to "/tmp/downstairs-vrx8aK6L/00/000/054.replace"
38537 Sep 22 23:15:06.524 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38538 Sep 22 23:15:06.524 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/054.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38539 Sep 22 23:15:06.525 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/054"
38540 Sep 22 23:15:06.525 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/054.db"
38541 Sep 22 23:15:06.525 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38542 Sep 22 23:15:06.525 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/054.replace" to "/tmp/downstairs-vrx8aK6L/00/000/054.completed"
38543 Sep 22 23:15:06.525 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38544 Sep 22 23:15:06.525 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38545 Sep 22 23:15:06.525 DEBG [0] It's time to notify for 146
38546 Sep 22 23:15:06.525 INFO Completion from [0] id:146 status:true
38547 Sep 22 23:15:06.525 INFO [147/752] Repair commands completed
38548 Sep 22 23:15:06.525 INFO Pop front: ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }, state: ClientData([New, New, New]) }
38549 Sep 22 23:15:06.525 INFO Sent repair work, now wait for resp
38550 Sep 22 23:15:06.525 INFO [0] received reconcile message
38551 Sep 22 23:15:06.525 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }, state: ClientData([InProgress, New, New]) }, : downstairs
38552 Sep 22 23:15:06.525 INFO [0] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }
38553 Sep 22 23:15:06.525 INFO [1] received reconcile message
38554 Sep 22 23:15:06.525 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38555 Sep 22 23:15:06.525 INFO [1] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }
38556 Sep 22 23:15:06.525 INFO [2] received reconcile message
38557 Sep 22 23:15:06.525 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(147), op: ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38558 Sep 22 23:15:06.526 INFO [2] client ExtentReopen { repair_id: ReconciliationId(147), extent_id: 84 }
38559 Sep 22 23:15:06.526 DEBG 147 Reopen extent 84
38560 Sep 22 23:15:06.526 DEBG 147 Reopen extent 84
38561 Sep 22 23:15:06.527 DEBG 147 Reopen extent 84
38562 Sep 22 23:15:06.527 DEBG [2] It's time to notify for 147
38563 Sep 22 23:15:06.527 INFO Completion from [2] id:147 status:true
38564 Sep 22 23:15:06.527 INFO [148/752] Repair commands completed
38565 Sep 22 23:15:06.527 INFO Pop front: ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38566 Sep 22 23:15:06.527 INFO Sent repair work, now wait for resp
38567 Sep 22 23:15:06.527 INFO [0] received reconcile message
38568 Sep 22 23:15:06.527 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38569 Sep 22 23:15:06.527 INFO [0] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38570 Sep 22 23:15:06.527 INFO [1] received reconcile message
38571 Sep 22 23:15:06.528 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38572 Sep 22 23:15:06.528 INFO [1] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38573 Sep 22 23:15:06.528 INFO [2] received reconcile message
38574 Sep 22 23:15:06.528 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(148), op: ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38575 Sep 22 23:15:06.528 INFO [2] client ExtentFlush { repair_id: ReconciliationId(148), extent_id: 91, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38576 Sep 22 23:15:06.528 DEBG 148 Flush extent 91 with f:2 g:2
38577 Sep 22 23:15:06.528 DEBG Flush just extent 91 with f:2 and g:2
38578 Sep 22 23:15:06.528 DEBG [1] It's time to notify for 148
38579 Sep 22 23:15:06.528 INFO Completion from [1] id:148 status:true
38580 Sep 22 23:15:06.528 INFO [149/752] Repair commands completed
38581 Sep 22 23:15:06.528 INFO Pop front: ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }, state: ClientData([New, New, New]) }
38582 Sep 22 23:15:06.528 INFO Sent repair work, now wait for resp
38583 Sep 22 23:15:06.528 INFO [0] received reconcile message
38584 Sep 22 23:15:06.528 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }, state: ClientData([InProgress, New, New]) }, : downstairs
38585 Sep 22 23:15:06.528 INFO [0] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }
38586 Sep 22 23:15:06.528 INFO [1] received reconcile message
38587 Sep 22 23:15:06.528 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38588 Sep 22 23:15:06.528 INFO [1] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }
38589 Sep 22 23:15:06.528 INFO [2] received reconcile message
38590 Sep 22 23:15:06.528 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(149), op: ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38591 Sep 22 23:15:06.528 INFO [2] client ExtentClose { repair_id: ReconciliationId(149), extent_id: 91 }
38592 Sep 22 23:15:06.528 DEBG 149 Close extent 91
38593 Sep 22 23:15:06.529 DEBG 149 Close extent 91
38594 Sep 22 23:15:06.529 DEBG 149 Close extent 91
38595 Sep 22 23:15:06.529 DEBG [2] It's time to notify for 149
38596 Sep 22 23:15:06.529 INFO Completion from [2] id:149 status:true
38597 Sep 22 23:15:06.529 INFO [150/752] Repair commands completed
38598 Sep 22 23:15:06.529 INFO Pop front: ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38599 Sep 22 23:15:06.529 INFO Sent repair work, now wait for resp
38600 Sep 22 23:15:06.529 INFO [0] received reconcile message
38601 Sep 22 23:15:06.529 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38602 Sep 22 23:15:06.529 INFO [0] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38603 Sep 22 23:15:06.529 INFO [0] Sending repair request ReconciliationId(150)
38604 Sep 22 23:15:06.530 INFO [1] received reconcile message
38605 Sep 22 23:15:06.530 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38606 Sep 22 23:15:06.530 INFO [1] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38607 Sep 22 23:15:06.530 INFO [1] No action required ReconciliationId(150)
38608 Sep 22 23:15:06.530 INFO [2] received reconcile message
38609 Sep 22 23:15:06.530 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(150), op: ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38610 Sep 22 23:15:06.530 INFO [2] client ExtentRepair { repair_id: ReconciliationId(150), extent_id: 91, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38611 Sep 22 23:15:06.530 INFO [2] No action required ReconciliationId(150)
38612 Sep 22 23:15:06.530 DEBG 150 Repair extent 91 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38613 Sep 22 23:15:06.530 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05B.copy"
38614 Sep 22 23:15:06.594 INFO accepted connection, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38615 Sep 22 23:15:06.594 TRCE incoming request, uri: /extent/91/files, method: GET, req_id: 186f8ffd-0269-47e2-b743-690c971979c6, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38616 Sep 22 23:15:06.595 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/91/files, method: GET, req_id: 186f8ffd-0269-47e2-b743-690c971979c6, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38617 Sep 22 23:15:06.595 INFO eid:91 Found repair files: ["05B", "05B.db"]
38618 Sep 22 23:15:06.595 TRCE incoming request, uri: /newextent/91/data, method: GET, req_id: 967527ba-384d-4767-9af9-84252d59611f, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38619 Sep 22 23:15:06.595 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/91/data, method: GET, req_id: 967527ba-384d-4767-9af9-84252d59611f, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38620 Sep 22 23:15:06.600 TRCE incoming request, uri: /newextent/91/db, method: GET, req_id: 63c3aaa1-23d4-4073-9c80-965816ea0c63, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38621 Sep 22 23:15:06.600 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/91/db, method: GET, req_id: 63c3aaa1-23d4-4073-9c80-965816ea0c63, remote_addr: 127.0.0.1:56542, local_addr: 127.0.0.1:46213, task: repair
38622 Sep 22 23:15:06.602 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05B.replace"
38623 Sep 22 23:15:06.602 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38624 Sep 22 23:15:06.602 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38625 Sep 22 23:15:06.603 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05B"
38626 Sep 22 23:15:06.603 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05B.db"
38627 Sep 22 23:15:06.603 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38628 Sep 22 23:15:06.603 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05B.completed"
38629 Sep 22 23:15:06.603 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38630 Sep 22 23:15:06.603 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38631 Sep 22 23:15:06.603 DEBG [0] It's time to notify for 150
38632 Sep 22 23:15:06.603 INFO Completion from [0] id:150 status:true
38633 Sep 22 23:15:06.603 INFO [151/752] Repair commands completed
38634 Sep 22 23:15:06.603 INFO Pop front: ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }, state: ClientData([New, New, New]) }
38635 Sep 22 23:15:06.603 INFO Sent repair work, now wait for resp
38636 Sep 22 23:15:06.603 INFO [0] received reconcile message
38637 Sep 22 23:15:06.603 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }, state: ClientData([InProgress, New, New]) }, : downstairs
38638 Sep 22 23:15:06.603 INFO [0] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }
38639 Sep 22 23:15:06.603 INFO [1] received reconcile message
38640 Sep 22 23:15:06.603 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38641 Sep 22 23:15:06.603 INFO [1] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }
38642 Sep 22 23:15:06.603 INFO [2] received reconcile message
38643 Sep 22 23:15:06.603 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(151), op: ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38644 Sep 22 23:15:06.603 INFO [2] client ExtentReopen { repair_id: ReconciliationId(151), extent_id: 91 }
38645 Sep 22 23:15:06.604 DEBG 151 Reopen extent 91
38646 Sep 22 23:15:06.604 DEBG 151 Reopen extent 91
38647 Sep 22 23:15:06.605 DEBG 151 Reopen extent 91
38648 Sep 22 23:15:06.605 DEBG [2] It's time to notify for 151
38649 Sep 22 23:15:06.605 INFO Completion from [2] id:151 status:true
38650 Sep 22 23:15:06.605 INFO [152/752] Repair commands completed
38651 Sep 22 23:15:06.605 INFO Pop front: ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38652 Sep 22 23:15:06.605 INFO Sent repair work, now wait for resp
38653 Sep 22 23:15:06.605 INFO [0] received reconcile message
38654 Sep 22 23:15:06.605 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38655 Sep 22 23:15:06.605 INFO [0] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38656 Sep 22 23:15:06.605 INFO [1] received reconcile message
38657 Sep 22 23:15:06.605 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38658 Sep 22 23:15:06.605 INFO [1] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38659 Sep 22 23:15:06.605 INFO [2] received reconcile message
38660 Sep 22 23:15:06.606 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(152), op: ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38661 Sep 22 23:15:06.606 INFO [2] client ExtentFlush { repair_id: ReconciliationId(152), extent_id: 2, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38662 Sep 22 23:15:06.606 DEBG 152 Flush extent 2 with f:2 g:2
38663 Sep 22 23:15:06.606 DEBG Flush just extent 2 with f:2 and g:2
38664 Sep 22 23:15:06.606 DEBG [1] It's time to notify for 152
38665 Sep 22 23:15:06.606 INFO Completion from [1] id:152 status:true
38666 Sep 22 23:15:06.606 INFO [153/752] Repair commands completed
38667 Sep 22 23:15:06.606 INFO Pop front: ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }, state: ClientData([New, New, New]) }
38668 Sep 22 23:15:06.606 INFO Sent repair work, now wait for resp
38669 Sep 22 23:15:06.606 INFO [0] received reconcile message
38670 Sep 22 23:15:06.606 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38671 Sep 22 23:15:06.606 INFO [0] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }
38672 Sep 22 23:15:06.606 INFO [1] received reconcile message
38673 Sep 22 23:15:06.606 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38674 Sep 22 23:15:06.606 INFO [1] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }
38675 Sep 22 23:15:06.606 INFO [2] received reconcile message
38676 Sep 22 23:15:06.606 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(153), op: ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38677 Sep 22 23:15:06.606 INFO [2] client ExtentClose { repair_id: ReconciliationId(153), extent_id: 2 }
38678 Sep 22 23:15:06.606 DEBG 153 Close extent 2
38679 Sep 22 23:15:06.606 DEBG 153 Close extent 2
38680 Sep 22 23:15:06.607 DEBG 153 Close extent 2
38681 Sep 22 23:15:06.607 DEBG [2] It's time to notify for 153
38682 Sep 22 23:15:06.607 INFO Completion from [2] id:153 status:true
38683 Sep 22 23:15:06.607 INFO [154/752] Repair commands completed
38684 Sep 22 23:15:06.607 INFO Pop front: ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38685 Sep 22 23:15:06.607 INFO Sent repair work, now wait for resp
38686 Sep 22 23:15:06.607 INFO [0] received reconcile message
38687 Sep 22 23:15:06.607 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38688 Sep 22 23:15:06.607 INFO [0] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38689 Sep 22 23:15:06.607 INFO [0] Sending repair request ReconciliationId(154)
38690 Sep 22 23:15:06.607 INFO [1] received reconcile message
38691 Sep 22 23:15:06.607 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38692 Sep 22 23:15:06.607 INFO [1] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38693 Sep 22 23:15:06.607 INFO [1] No action required ReconciliationId(154)
38694 Sep 22 23:15:06.607 INFO [2] received reconcile message
38695 Sep 22 23:15:06.607 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(154), op: ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38696 Sep 22 23:15:06.608 INFO [2] client ExtentRepair { repair_id: ReconciliationId(154), extent_id: 2, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38697 Sep 22 23:15:06.608 INFO [2] No action required ReconciliationId(154)
38698 Sep 22 23:15:06.608 DEBG 154 Repair extent 2 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38699 Sep 22 23:15:06.608 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/002.copy"
38700 Sep 22 23:15:06.622 DEBG [2] Read AckReady 1076, : downstairs
38701 Sep 22 23:15:06.623 DEBG up_ds_listen was notified
38702 Sep 22 23:15:06.623 DEBG up_ds_listen process 1076
38703 Sep 22 23:15:06.623 DEBG [A] ack job 1076:77, : downstairs
38704 Sep 22 23:15:06.672 INFO accepted connection, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38705 Sep 22 23:15:06.672 TRCE incoming request, uri: /extent/2/files, method: GET, req_id: 2d4d3eb2-542d-4858-a034-8ce67eb076e4, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38706 Sep 22 23:15:06.672 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/2/files, method: GET, req_id: 2d4d3eb2-542d-4858-a034-8ce67eb076e4, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38707 Sep 22 23:15:06.672 INFO eid:2 Found repair files: ["002", "002.db"]
38708 Sep 22 23:15:06.673 TRCE incoming request, uri: /newextent/2/data, method: GET, req_id: 85a83a53-d586-477c-ac09-dcbcc062a82a, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38709 Sep 22 23:15:06.673 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/2/data, method: GET, req_id: 85a83a53-d586-477c-ac09-dcbcc062a82a, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38710 Sep 22 23:15:06.676 DEBG up_ds_listen checked 1 jobs, back to waiting
38711 Sep 22 23:15:06.678 TRCE incoming request, uri: /newextent/2/db, method: GET, req_id: d3ff304f-acbe-4218-bcc7-3d6b04cf2f68, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38712 Sep 22 23:15:06.678 DEBG Flush :1077 extent_limit None deps:[JobId(1076), JobId(1075)] res:true f:28 g:1
38713 Sep 22 23:15:06.678 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/2/db, method: GET, req_id: d3ff304f-acbe-4218-bcc7-3d6b04cf2f68, remote_addr: 127.0.0.1:56428, local_addr: 127.0.0.1:46213, task: repair
38714 Sep 22 23:15:06.679 INFO [lossy] sleeping 1 second
38715 Sep 22 23:15:06.680 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/002.copy" to "/tmp/downstairs-vrx8aK6L/00/000/002.replace"
38716 Sep 22 23:15:06.680 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38717 Sep 22 23:15:06.680 DEBG Flush :1077 extent_limit None deps:[JobId(1076), JobId(1075)] res:true f:28 g:1
38718 Sep 22 23:15:06.680 INFO [lossy] sleeping 1 second
38719 Sep 22 23:15:06.680 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/002.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38720 Sep 22 23:15:06.681 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/002"
38721 Sep 22 23:15:06.681 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/002.db"
38722 Sep 22 23:15:06.681 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38723 Sep 22 23:15:06.681 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/002.replace" to "/tmp/downstairs-vrx8aK6L/00/000/002.completed"
38724 Sep 22 23:15:06.681 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38725 Sep 22 23:15:06.681 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38726 Sep 22 23:15:06.681 DEBG [0] It's time to notify for 154
38727 Sep 22 23:15:06.681 INFO Completion from [0] id:154 status:true
38728 Sep 22 23:15:06.681 INFO [155/752] Repair commands completed
38729 Sep 22 23:15:06.681 INFO Pop front: ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }, state: ClientData([New, New, New]) }
38730 Sep 22 23:15:06.681 INFO Sent repair work, now wait for resp
38731 Sep 22 23:15:06.681 INFO [0] received reconcile message
38732 Sep 22 23:15:06.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38733 Sep 22 23:15:06.681 INFO [0] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }
38734 Sep 22 23:15:06.681 INFO [1] received reconcile message
38735 Sep 22 23:15:06.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38736 Sep 22 23:15:06.681 INFO [1] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }
38737 Sep 22 23:15:06.681 INFO [2] received reconcile message
38738 Sep 22 23:15:06.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(155), op: ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38739 Sep 22 23:15:06.681 INFO [2] client ExtentReopen { repair_id: ReconciliationId(155), extent_id: 2 }
38740 Sep 22 23:15:06.682 DEBG 155 Reopen extent 2
38741 Sep 22 23:15:06.682 DEBG 155 Reopen extent 2
38742 Sep 22 23:15:06.683 DEBG 155 Reopen extent 2
38743 Sep 22 23:15:06.683 DEBG [2] It's time to notify for 155
38744 Sep 22 23:15:06.683 INFO Completion from [2] id:155 status:true
38745 Sep 22 23:15:06.683 INFO [156/752] Repair commands completed
38746 Sep 22 23:15:06.683 INFO Pop front: ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38747 Sep 22 23:15:06.683 INFO Sent repair work, now wait for resp
38748 Sep 22 23:15:06.683 INFO [0] received reconcile message
38749 Sep 22 23:15:06.683 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38750 Sep 22 23:15:06.683 INFO [0] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38751 Sep 22 23:15:06.683 INFO [1] received reconcile message
38752 Sep 22 23:15:06.683 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38753 Sep 22 23:15:06.683 INFO [1] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38754 Sep 22 23:15:06.684 INFO [2] received reconcile message
38755 Sep 22 23:15:06.684 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(156), op: ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38756 Sep 22 23:15:06.684 INFO [2] client ExtentFlush { repair_id: ReconciliationId(156), extent_id: 99, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38757 Sep 22 23:15:06.684 DEBG 156 Flush extent 99 with f:2 g:2
38758 Sep 22 23:15:06.684 DEBG Flush just extent 99 with f:2 and g:2
38759 Sep 22 23:15:06.684 DEBG [1] It's time to notify for 156
38760 Sep 22 23:15:06.684 INFO Completion from [1] id:156 status:true
38761 Sep 22 23:15:06.684 INFO [157/752] Repair commands completed
38762 Sep 22 23:15:06.684 INFO Pop front: ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }, state: ClientData([New, New, New]) }
38763 Sep 22 23:15:06.684 INFO Sent repair work, now wait for resp
38764 Sep 22 23:15:06.684 INFO [0] received reconcile message
38765 Sep 22 23:15:06.684 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }, state: ClientData([InProgress, New, New]) }, : downstairs
38766 Sep 22 23:15:06.684 INFO [0] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }
38767 Sep 22 23:15:06.684 INFO [1] received reconcile message
38768 Sep 22 23:15:06.684 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38769 Sep 22 23:15:06.684 INFO [1] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }
38770 Sep 22 23:15:06.684 INFO [2] received reconcile message
38771 Sep 22 23:15:06.684 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(157), op: ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38772 Sep 22 23:15:06.684 INFO [2] client ExtentClose { repair_id: ReconciliationId(157), extent_id: 99 }
38773 Sep 22 23:15:06.684 DEBG 157 Close extent 99
38774 Sep 22 23:15:06.684 DEBG 157 Close extent 99
38775 Sep 22 23:15:06.685 DEBG 157 Close extent 99
38776 Sep 22 23:15:06.685 DEBG [2] It's time to notify for 157
38777 Sep 22 23:15:06.685 INFO Completion from [2] id:157 status:true
38778 Sep 22 23:15:06.685 INFO [158/752] Repair commands completed
38779 Sep 22 23:15:06.685 INFO Pop front: ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38780 Sep 22 23:15:06.685 INFO Sent repair work, now wait for resp
38781 Sep 22 23:15:06.685 INFO [0] received reconcile message
38782 Sep 22 23:15:06.685 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38783 Sep 22 23:15:06.685 INFO [0] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38784 Sep 22 23:15:06.685 INFO [0] Sending repair request ReconciliationId(158)
38785 Sep 22 23:15:06.685 INFO [1] received reconcile message
38786 Sep 22 23:15:06.685 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38787 Sep 22 23:15:06.685 INFO [1] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38788 Sep 22 23:15:06.685 INFO [1] No action required ReconciliationId(158)
38789 Sep 22 23:15:06.686 INFO [2] received reconcile message
38790 Sep 22 23:15:06.686 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(158), op: ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38791 Sep 22 23:15:06.686 INFO [2] client ExtentRepair { repair_id: ReconciliationId(158), extent_id: 99, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38792 Sep 22 23:15:06.686 INFO [2] No action required ReconciliationId(158)
38793 Sep 22 23:15:06.686 DEBG 158 Repair extent 99 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38794 Sep 22 23:15:06.686 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/063.copy"
38795 Sep 22 23:15:06.749 INFO accepted connection, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38796 Sep 22 23:15:06.750 TRCE incoming request, uri: /extent/99/files, method: GET, req_id: 9a61a459-8283-4c21-a850-25ef26bfd0d4, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38797 Sep 22 23:15:06.750 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/99/files, method: GET, req_id: 9a61a459-8283-4c21-a850-25ef26bfd0d4, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38798 Sep 22 23:15:06.750 INFO eid:99 Found repair files: ["063", "063.db"]
38799 Sep 22 23:15:06.750 TRCE incoming request, uri: /newextent/99/data, method: GET, req_id: 199b8185-6db8-4c26-b4ee-72c0a4add789, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38800 Sep 22 23:15:06.751 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/99/data, method: GET, req_id: 199b8185-6db8-4c26-b4ee-72c0a4add789, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38801 Sep 22 23:15:06.756 TRCE incoming request, uri: /newextent/99/db, method: GET, req_id: 1c8bb198-56ef-4157-8d00-545923156001, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38802 Sep 22 23:15:06.756 INFO request completed, latency_us: 288, response_code: 200, uri: /newextent/99/db, method: GET, req_id: 1c8bb198-56ef-4157-8d00-545923156001, remote_addr: 127.0.0.1:52616, local_addr: 127.0.0.1:46213, task: repair
38803 Sep 22 23:15:06.757 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/063.copy" to "/tmp/downstairs-vrx8aK6L/00/000/063.replace"
38804 Sep 22 23:15:06.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38805 Sep 22 23:15:06.758 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/063.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38806 Sep 22 23:15:06.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/063"
38807 Sep 22 23:15:06.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/063.db"
38808 Sep 22 23:15:06.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38809 Sep 22 23:15:06.758 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/063.replace" to "/tmp/downstairs-vrx8aK6L/00/000/063.completed"
38810 Sep 22 23:15:06.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38811 Sep 22 23:15:06.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38812 Sep 22 23:15:06.758 DEBG [0] It's time to notify for 158
38813 Sep 22 23:15:06.759 INFO Completion from [0] id:158 status:true
38814 Sep 22 23:15:06.759 INFO [159/752] Repair commands completed
38815 Sep 22 23:15:06.759 INFO Pop front: ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }, state: ClientData([New, New, New]) }
38816 Sep 22 23:15:06.759 INFO Sent repair work, now wait for resp
38817 Sep 22 23:15:06.759 INFO [0] received reconcile message
38818 Sep 22 23:15:06.759 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }, state: ClientData([InProgress, New, New]) }, : downstairs
38819 Sep 22 23:15:06.759 INFO [0] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }
38820 Sep 22 23:15:06.759 INFO [1] received reconcile message
38821 Sep 22 23:15:06.759 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38822 Sep 22 23:15:06.759 INFO [1] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }
38823 Sep 22 23:15:06.759 INFO [2] received reconcile message
38824 Sep 22 23:15:06.759 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(159), op: ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38825 Sep 22 23:15:06.759 INFO [2] client ExtentReopen { repair_id: ReconciliationId(159), extent_id: 99 }
38826 Sep 22 23:15:06.759 DEBG 159 Reopen extent 99
38827 Sep 22 23:15:06.760 DEBG 159 Reopen extent 99
38828 Sep 22 23:15:06.760 DEBG 159 Reopen extent 99
38829 Sep 22 23:15:06.761 DEBG [2] It's time to notify for 159
38830 Sep 22 23:15:06.761 INFO Completion from [2] id:159 status:true
38831 Sep 22 23:15:06.761 INFO [160/752] Repair commands completed
38832 Sep 22 23:15:06.761 INFO Pop front: ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38833 Sep 22 23:15:06.761 INFO Sent repair work, now wait for resp
38834 Sep 22 23:15:06.761 INFO [0] received reconcile message
38835 Sep 22 23:15:06.761 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38836 Sep 22 23:15:06.761 INFO [0] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38837 Sep 22 23:15:06.761 INFO [1] received reconcile message
38838 Sep 22 23:15:06.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38839 Sep 22 23:15:06.761 INFO [1] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38840 Sep 22 23:15:06.761 INFO [2] received reconcile message
38841 Sep 22 23:15:06.761 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(160), op: ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38842 Sep 22 23:15:06.761 INFO [2] client ExtentFlush { repair_id: ReconciliationId(160), extent_id: 110, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38843 Sep 22 23:15:06.761 DEBG 160 Flush extent 110 with f:2 g:2
38844 Sep 22 23:15:06.761 DEBG Flush just extent 110 with f:2 and g:2
38845 Sep 22 23:15:06.761 DEBG [1] It's time to notify for 160
38846 Sep 22 23:15:06.761 INFO Completion from [1] id:160 status:true
38847 Sep 22 23:15:06.761 INFO [161/752] Repair commands completed
38848 Sep 22 23:15:06.761 INFO Pop front: ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }, state: ClientData([New, New, New]) }
38849 Sep 22 23:15:06.761 INFO Sent repair work, now wait for resp
38850 Sep 22 23:15:06.761 INFO [0] received reconcile message
38851 Sep 22 23:15:06.761 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }, state: ClientData([InProgress, New, New]) }, : downstairs
38852 Sep 22 23:15:06.761 INFO [0] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }
38853 Sep 22 23:15:06.761 INFO [1] received reconcile message
38854 Sep 22 23:15:06.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38855 Sep 22 23:15:06.761 INFO [1] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }
38856 Sep 22 23:15:06.762 INFO [2] received reconcile message
38857 Sep 22 23:15:06.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(161), op: ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38858 Sep 22 23:15:06.762 INFO [2] client ExtentClose { repair_id: ReconciliationId(161), extent_id: 110 }
38859 Sep 22 23:15:06.762 DEBG 161 Close extent 110
38860 Sep 22 23:15:06.762 DEBG 161 Close extent 110
38861 Sep 22 23:15:06.762 DEBG 161 Close extent 110
38862 Sep 22 23:15:06.763 DEBG [2] It's time to notify for 161
38863 Sep 22 23:15:06.763 INFO Completion from [2] id:161 status:true
38864 Sep 22 23:15:06.763 INFO [162/752] Repair commands completed
38865 Sep 22 23:15:06.763 INFO Pop front: ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38866 Sep 22 23:15:06.763 INFO Sent repair work, now wait for resp
38867 Sep 22 23:15:06.763 INFO [0] received reconcile message
38868 Sep 22 23:15:06.763 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38869 Sep 22 23:15:06.763 INFO [0] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38870 Sep 22 23:15:06.763 INFO [0] Sending repair request ReconciliationId(162)
38871 Sep 22 23:15:06.763 INFO [1] received reconcile message
38872 Sep 22 23:15:06.763 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38873 Sep 22 23:15:06.763 INFO [1] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38874 Sep 22 23:15:06.763 INFO [1] No action required ReconciliationId(162)
38875 Sep 22 23:15:06.763 INFO [2] received reconcile message
38876 Sep 22 23:15:06.763 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(162), op: ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38877 Sep 22 23:15:06.763 INFO [2] client ExtentRepair { repair_id: ReconciliationId(162), extent_id: 110, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38878 Sep 22 23:15:06.763 INFO [2] No action required ReconciliationId(162)
38879 Sep 22 23:15:06.763 DEBG 162 Repair extent 110 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38880 Sep 22 23:15:06.763 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06E.copy"
38881 Sep 22 23:15:06.776 DEBG IO Read 1078 has deps [JobId(1077)]
38882 Sep 22 23:15:06.827 INFO accepted connection, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38883 Sep 22 23:15:06.827 TRCE incoming request, uri: /extent/110/files, method: GET, req_id: 385f0e90-4f86-4603-9219-f316826e61e6, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38884 Sep 22 23:15:06.827 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/110/files, method: GET, req_id: 385f0e90-4f86-4603-9219-f316826e61e6, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38885 Sep 22 23:15:06.827 INFO eid:110 Found repair files: ["06E", "06E.db"]
38886 Sep 22 23:15:06.828 TRCE incoming request, uri: /newextent/110/data, method: GET, req_id: a4bf169f-0f71-4487-a3c0-14bcca017142, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38887 Sep 22 23:15:06.828 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/110/data, method: GET, req_id: a4bf169f-0f71-4487-a3c0-14bcca017142, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38888 Sep 22 23:15:06.833 TRCE incoming request, uri: /newextent/110/db, method: GET, req_id: ea6d063a-69b1-4e35-a9c9-59e06a3c63ea, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38889 Sep 22 23:15:06.833 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/110/db, method: GET, req_id: ea6d063a-69b1-4e35-a9c9-59e06a3c63ea, remote_addr: 127.0.0.1:59853, local_addr: 127.0.0.1:46213, task: repair
38890 Sep 22 23:15:06.834 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06E.replace"
38891 Sep 22 23:15:06.834 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38892 Sep 22 23:15:06.835 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38893 Sep 22 23:15:06.835 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06E"
38894 Sep 22 23:15:06.835 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06E.db"
38895 Sep 22 23:15:06.835 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38896 Sep 22 23:15:06.835 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06E.completed"
38897 Sep 22 23:15:06.835 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38898 Sep 22 23:15:06.836 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38899 Sep 22 23:15:06.836 DEBG [0] It's time to notify for 162
38900 Sep 22 23:15:06.836 INFO Completion from [0] id:162 status:true
38901 Sep 22 23:15:06.836 INFO [163/752] Repair commands completed
38902 Sep 22 23:15:06.836 INFO Pop front: ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }, state: ClientData([New, New, New]) }
38903 Sep 22 23:15:06.836 INFO Sent repair work, now wait for resp
38904 Sep 22 23:15:06.836 INFO [0] received reconcile message
38905 Sep 22 23:15:06.836 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }, state: ClientData([InProgress, New, New]) }, : downstairs
38906 Sep 22 23:15:06.836 INFO [0] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }
38907 Sep 22 23:15:06.836 INFO [1] received reconcile message
38908 Sep 22 23:15:06.836 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38909 Sep 22 23:15:06.836 INFO [1] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }
38910 Sep 22 23:15:06.836 INFO [2] received reconcile message
38911 Sep 22 23:15:06.836 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(163), op: ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38912 Sep 22 23:15:06.836 INFO [2] client ExtentReopen { repair_id: ReconciliationId(163), extent_id: 110 }
38913 Sep 22 23:15:06.836 DEBG 163 Reopen extent 110
38914 Sep 22 23:15:06.837 DEBG 163 Reopen extent 110
38915 Sep 22 23:15:06.837 DEBG 163 Reopen extent 110
38916 Sep 22 23:15:06.838 DEBG [2] It's time to notify for 163
38917 Sep 22 23:15:06.838 INFO Completion from [2] id:163 status:true
38918 Sep 22 23:15:06.838 INFO [164/752] Repair commands completed
38919 Sep 22 23:15:06.838 INFO Pop front: ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
38920 Sep 22 23:15:06.838 INFO Sent repair work, now wait for resp
38921 Sep 22 23:15:06.838 INFO [0] received reconcile message
38922 Sep 22 23:15:06.838 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
38923 Sep 22 23:15:06.838 INFO [0] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38924 Sep 22 23:15:06.838 INFO [1] received reconcile message
38925 Sep 22 23:15:06.838 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
38926 Sep 22 23:15:06.838 INFO [1] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38927 Sep 22 23:15:06.838 INFO [2] received reconcile message
38928 Sep 22 23:15:06.838 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(164), op: ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
38929 Sep 22 23:15:06.838 INFO [2] client ExtentFlush { repair_id: ReconciliationId(164), extent_id: 121, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
38930 Sep 22 23:15:06.838 DEBG 164 Flush extent 121 with f:2 g:2
38931 Sep 22 23:15:06.838 DEBG Flush just extent 121 with f:2 and g:2
38932 Sep 22 23:15:06.839 DEBG [1] It's time to notify for 164
38933 Sep 22 23:15:06.839 INFO Completion from [1] id:164 status:true
38934 Sep 22 23:15:06.839 INFO [165/752] Repair commands completed
38935 Sep 22 23:15:06.839 INFO Pop front: ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }, state: ClientData([New, New, New]) }
38936 Sep 22 23:15:06.839 INFO Sent repair work, now wait for resp
38937 Sep 22 23:15:06.839 INFO [0] received reconcile message
38938 Sep 22 23:15:06.839 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }, state: ClientData([InProgress, New, New]) }, : downstairs
38939 Sep 22 23:15:06.839 INFO [0] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }
38940 Sep 22 23:15:06.839 INFO [1] received reconcile message
38941 Sep 22 23:15:06.839 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38942 Sep 22 23:15:06.839 INFO [1] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }
38943 Sep 22 23:15:06.839 INFO [2] received reconcile message
38944 Sep 22 23:15:06.839 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(165), op: ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38945 Sep 22 23:15:06.839 INFO [2] client ExtentClose { repair_id: ReconciliationId(165), extent_id: 121 }
38946 Sep 22 23:15:06.839 DEBG 165 Close extent 121
38947 Sep 22 23:15:06.839 DEBG 165 Close extent 121
38948 Sep 22 23:15:06.840 DEBG 165 Close extent 121
38949 Sep 22 23:15:06.840 DEBG [2] It's time to notify for 165
38950 Sep 22 23:15:06.840 INFO Completion from [2] id:165 status:true
38951 Sep 22 23:15:06.840 INFO [166/752] Repair commands completed
38952 Sep 22 23:15:06.840 INFO Pop front: ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
38953 Sep 22 23:15:06.840 INFO Sent repair work, now wait for resp
38954 Sep 22 23:15:06.840 INFO [0] received reconcile message
38955 Sep 22 23:15:06.840 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
38956 Sep 22 23:15:06.840 INFO [0] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38957 Sep 22 23:15:06.840 INFO [0] Sending repair request ReconciliationId(166)
38958 Sep 22 23:15:06.840 INFO [1] received reconcile message
38959 Sep 22 23:15:06.840 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38960 Sep 22 23:15:06.840 INFO [1] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38961 Sep 22 23:15:06.840 INFO [1] No action required ReconciliationId(166)
38962 Sep 22 23:15:06.840 INFO [2] received reconcile message
38963 Sep 22 23:15:06.840 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(166), op: ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
38964 Sep 22 23:15:06.840 INFO [2] client ExtentRepair { repair_id: ReconciliationId(166), extent_id: 121, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
38965 Sep 22 23:15:06.840 INFO [2] No action required ReconciliationId(166)
38966 Sep 22 23:15:06.840 DEBG 166 Repair extent 121 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
38967 Sep 22 23:15:06.841 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/079.copy"
38968 Sep 22 23:15:06.905 INFO accepted connection, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38969 Sep 22 23:15:06.905 TRCE incoming request, uri: /extent/121/files, method: GET, req_id: 08c5172a-08f8-4385-a78b-23ad62c90619, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38970 Sep 22 23:15:06.905 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/121/files, method: GET, req_id: 08c5172a-08f8-4385-a78b-23ad62c90619, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38971 Sep 22 23:15:06.906 INFO eid:121 Found repair files: ["079", "079.db"]
38972 Sep 22 23:15:06.906 TRCE incoming request, uri: /newextent/121/data, method: GET, req_id: 61c8684b-cf99-40a4-9c70-50e247ff89b3, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38973 Sep 22 23:15:06.906 INFO request completed, latency_us: 263, response_code: 200, uri: /newextent/121/data, method: GET, req_id: 61c8684b-cf99-40a4-9c70-50e247ff89b3, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38974 Sep 22 23:15:06.912 TRCE incoming request, uri: /newextent/121/db, method: GET, req_id: 425c9b04-a2a5-495e-8d62-5c097744efc0, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38975 Sep 22 23:15:06.912 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/121/db, method: GET, req_id: 425c9b04-a2a5-495e-8d62-5c097744efc0, remote_addr: 127.0.0.1:41663, local_addr: 127.0.0.1:46213, task: repair
38976 Sep 22 23:15:06.913 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/079.copy" to "/tmp/downstairs-vrx8aK6L/00/000/079.replace"
38977 Sep 22 23:15:06.913 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38978 Sep 22 23:15:06.914 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/079.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
38979 Sep 22 23:15:06.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/079"
38980 Sep 22 23:15:06.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/079.db"
38981 Sep 22 23:15:06.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38982 Sep 22 23:15:06.914 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/079.replace" to "/tmp/downstairs-vrx8aK6L/00/000/079.completed"
38983 Sep 22 23:15:06.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38984 Sep 22 23:15:06.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
38985 Sep 22 23:15:06.914 DEBG [0] It's time to notify for 166
38986 Sep 22 23:15:06.915 INFO Completion from [0] id:166 status:true
38987 Sep 22 23:15:06.915 INFO [167/752] Repair commands completed
38988 Sep 22 23:15:06.915 INFO Pop front: ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }, state: ClientData([New, New, New]) }
38989 Sep 22 23:15:06.915 INFO Sent repair work, now wait for resp
38990 Sep 22 23:15:06.915 INFO [0] received reconcile message
38991 Sep 22 23:15:06.915 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }, state: ClientData([InProgress, New, New]) }, : downstairs
38992 Sep 22 23:15:06.915 INFO [0] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }
38993 Sep 22 23:15:06.915 INFO [1] received reconcile message
38994 Sep 22 23:15:06.915 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
38995 Sep 22 23:15:06.915 INFO [1] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }
38996 Sep 22 23:15:06.915 INFO [2] received reconcile message
38997 Sep 22 23:15:06.915 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(167), op: ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
38998 Sep 22 23:15:06.915 INFO [2] client ExtentReopen { repair_id: ReconciliationId(167), extent_id: 121 }
38999 Sep 22 23:15:06.915 DEBG 167 Reopen extent 121
39000 Sep 22 23:15:06.916 DEBG 167 Reopen extent 121
39001 Sep 22 23:15:06.916 DEBG 167 Reopen extent 121
39002 Sep 22 23:15:06.917 DEBG [2] It's time to notify for 167
39003 Sep 22 23:15:06.917 INFO Completion from [2] id:167 status:true
39004 Sep 22 23:15:06.917 INFO [168/752] Repair commands completed
39005 Sep 22 23:15:06.917 INFO Pop front: ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39006 Sep 22 23:15:06.917 INFO Sent repair work, now wait for resp
39007 Sep 22 23:15:06.917 INFO [0] received reconcile message
39008 Sep 22 23:15:06.917 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39009 Sep 22 23:15:06.917 INFO [0] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39010 Sep 22 23:15:06.917 INFO [1] received reconcile message
39011 Sep 22 23:15:06.917 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39012 Sep 22 23:15:06.917 INFO [1] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39013 Sep 22 23:15:06.917 INFO [2] received reconcile message
39014 Sep 22 23:15:06.917 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(168), op: ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39015 Sep 22 23:15:06.917 INFO [2] client ExtentFlush { repair_id: ReconciliationId(168), extent_id: 157, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39016 Sep 22 23:15:06.917 DEBG 168 Flush extent 157 with f:2 g:2
39017 Sep 22 23:15:06.917 DEBG Flush just extent 157 with f:2 and g:2
39018 Sep 22 23:15:06.917 DEBG [1] It's time to notify for 168
39019 Sep 22 23:15:06.917 INFO Completion from [1] id:168 status:true
39020 Sep 22 23:15:06.917 INFO [169/752] Repair commands completed
39021 Sep 22 23:15:06.917 INFO Pop front: ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }, state: ClientData([New, New, New]) }
39022 Sep 22 23:15:06.917 INFO Sent repair work, now wait for resp
39023 Sep 22 23:15:06.917 INFO [0] received reconcile message
39024 Sep 22 23:15:06.917 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }, state: ClientData([InProgress, New, New]) }, : downstairs
39025 Sep 22 23:15:06.917 INFO [0] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }
39026 Sep 22 23:15:06.918 INFO [1] received reconcile message
39027 Sep 22 23:15:06.918 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39028 Sep 22 23:15:06.918 INFO [1] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }
39029 Sep 22 23:15:06.918 INFO [2] received reconcile message
39030 Sep 22 23:15:06.918 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(169), op: ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39031 Sep 22 23:15:06.918 INFO [2] client ExtentClose { repair_id: ReconciliationId(169), extent_id: 157 }
39032 Sep 22 23:15:06.918 DEBG 169 Close extent 157
39033 Sep 22 23:15:06.918 DEBG 169 Close extent 157
39034 Sep 22 23:15:06.918 DEBG 169 Close extent 157
39035 Sep 22 23:15:06.919 DEBG [2] It's time to notify for 169
39036 Sep 22 23:15:06.919 INFO Completion from [2] id:169 status:true
39037 Sep 22 23:15:06.919 INFO [170/752] Repair commands completed
39038 Sep 22 23:15:06.919 INFO Pop front: ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39039 Sep 22 23:15:06.919 INFO Sent repair work, now wait for resp
39040 Sep 22 23:15:06.919 INFO [0] received reconcile message
39041 Sep 22 23:15:06.919 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39042 Sep 22 23:15:06.919 INFO [0] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39043 Sep 22 23:15:06.919 INFO [0] Sending repair request ReconciliationId(170)
39044 Sep 22 23:15:06.919 INFO [1] received reconcile message
39045 Sep 22 23:15:06.919 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39046 Sep 22 23:15:06.919 INFO [1] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39047 Sep 22 23:15:06.919 INFO [1] No action required ReconciliationId(170)
39048 Sep 22 23:15:06.919 INFO [2] received reconcile message
39049 Sep 22 23:15:06.919 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(170), op: ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39050 Sep 22 23:15:06.919 INFO [2] client ExtentRepair { repair_id: ReconciliationId(170), extent_id: 157, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39051 Sep 22 23:15:06.919 INFO [2] No action required ReconciliationId(170)
39052 Sep 22 23:15:06.919 DEBG 170 Repair extent 157 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39053 Sep 22 23:15:06.919 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09D.copy"
39054 Sep 22 23:15:06.983 INFO accepted connection, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39055 Sep 22 23:15:06.983 TRCE incoming request, uri: /extent/157/files, method: GET, req_id: 57449d40-5f30-430a-a517-823cfe9c5743, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39056 Sep 22 23:15:06.983 INFO request completed, latency_us: 210, response_code: 200, uri: /extent/157/files, method: GET, req_id: 57449d40-5f30-430a-a517-823cfe9c5743, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39057 Sep 22 23:15:06.984 INFO eid:157 Found repair files: ["09D", "09D.db"]
39058 Sep 22 23:15:06.984 TRCE incoming request, uri: /newextent/157/data, method: GET, req_id: 33aa0d1a-c9ec-4060-a461-6399cd950aa7, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39059 Sep 22 23:15:06.984 INFO request completed, latency_us: 257, response_code: 200, uri: /newextent/157/data, method: GET, req_id: 33aa0d1a-c9ec-4060-a461-6399cd950aa7, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39060 Sep 22 23:15:06.989 TRCE incoming request, uri: /newextent/157/db, method: GET, req_id: c20aeb78-ed74-4e94-b3c7-b784fd4fceda, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39061 Sep 22 23:15:06.989 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/157/db, method: GET, req_id: c20aeb78-ed74-4e94-b3c7-b784fd4fceda, remote_addr: 127.0.0.1:51237, local_addr: 127.0.0.1:46213, task: repair
39062 Sep 22 23:15:06.991 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09D.replace"
39063 Sep 22 23:15:06.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39064 Sep 22 23:15:06.992 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39065 Sep 22 23:15:06.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09D"
39066 Sep 22 23:15:06.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09D.db"
39067 Sep 22 23:15:06.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39068 Sep 22 23:15:06.992 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09D.completed"
39069 Sep 22 23:15:06.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39070 Sep 22 23:15:06.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39071 Sep 22 23:15:06.992 DEBG [0] It's time to notify for 170
39072 Sep 22 23:15:06.992 INFO Completion from [0] id:170 status:true
39073 Sep 22 23:15:06.992 INFO [171/752] Repair commands completed
39074 Sep 22 23:15:06.992 INFO Pop front: ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }, state: ClientData([New, New, New]) }
39075 Sep 22 23:15:06.992 INFO Sent repair work, now wait for resp
39076 Sep 22 23:15:06.992 INFO [0] received reconcile message
39077 Sep 22 23:15:06.992 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }, state: ClientData([InProgress, New, New]) }, : downstairs
39078 Sep 22 23:15:06.992 INFO [0] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }
39079 Sep 22 23:15:06.993 INFO [1] received reconcile message
39080 Sep 22 23:15:06.993 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39081 Sep 22 23:15:06.993 INFO [1] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }
39082 Sep 22 23:15:06.993 INFO [2] received reconcile message
39083 Sep 22 23:15:06.993 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(171), op: ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39084 Sep 22 23:15:06.993 INFO [2] client ExtentReopen { repair_id: ReconciliationId(171), extent_id: 157 }
39085 Sep 22 23:15:06.993 DEBG 171 Reopen extent 157
39086 Sep 22 23:15:06.993 DEBG 171 Reopen extent 157
39087 Sep 22 23:15:06.994 DEBG 171 Reopen extent 157
39088 Sep 22 23:15:06.995 DEBG [2] It's time to notify for 171
39089 Sep 22 23:15:06.995 INFO Completion from [2] id:171 status:true
39090 Sep 22 23:15:06.995 INFO [172/752] Repair commands completed
39091 Sep 22 23:15:06.995 INFO Pop front: ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39092 Sep 22 23:15:06.995 INFO Sent repair work, now wait for resp
39093 Sep 22 23:15:06.995 INFO [0] received reconcile message
39094 Sep 22 23:15:06.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39095 Sep 22 23:15:06.995 INFO [0] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39096 Sep 22 23:15:06.995 INFO [1] received reconcile message
39097 Sep 22 23:15:06.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39098 Sep 22 23:15:06.995 INFO [1] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39099 Sep 22 23:15:06.995 INFO [2] received reconcile message
39100 Sep 22 23:15:06.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(172), op: ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39101 Sep 22 23:15:06.995 INFO [2] client ExtentFlush { repair_id: ReconciliationId(172), extent_id: 57, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39102 Sep 22 23:15:06.995 DEBG 172 Flush extent 57 with f:2 g:2
39103 Sep 22 23:15:06.995 DEBG Flush just extent 57 with f:2 and g:2
39104 Sep 22 23:15:06.995 DEBG [1] It's time to notify for 172
39105 Sep 22 23:15:06.995 INFO Completion from [1] id:172 status:true
39106 Sep 22 23:15:06.995 INFO [173/752] Repair commands completed
39107 Sep 22 23:15:06.995 INFO Pop front: ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }, state: ClientData([New, New, New]) }
39108 Sep 22 23:15:06.995 INFO Sent repair work, now wait for resp
39109 Sep 22 23:15:06.995 INFO [0] received reconcile message
39110 Sep 22 23:15:06.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }, state: ClientData([InProgress, New, New]) }, : downstairs
39111 Sep 22 23:15:06.995 INFO [0] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }
39112 Sep 22 23:15:06.995 INFO [1] received reconcile message
39113 Sep 22 23:15:06.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39114 Sep 22 23:15:06.995 INFO [1] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }
39115 Sep 22 23:15:06.995 INFO [2] received reconcile message
39116 Sep 22 23:15:06.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(173), op: ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39117 Sep 22 23:15:06.995 INFO [2] client ExtentClose { repair_id: ReconciliationId(173), extent_id: 57 }
39118 Sep 22 23:15:06.996 DEBG 173 Close extent 57
39119 Sep 22 23:15:06.996 DEBG 173 Close extent 57
39120 Sep 22 23:15:06.996 DEBG 173 Close extent 57
39121 Sep 22 23:15:06.996 DEBG [2] It's time to notify for 173
39122 Sep 22 23:15:06.997 INFO Completion from [2] id:173 status:true
39123 Sep 22 23:15:06.997 INFO [174/752] Repair commands completed
39124 Sep 22 23:15:06.997 INFO Pop front: ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39125 Sep 22 23:15:06.997 INFO Sent repair work, now wait for resp
39126 Sep 22 23:15:06.997 INFO [0] received reconcile message
39127 Sep 22 23:15:06.997 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39128 Sep 22 23:15:06.997 INFO [0] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39129 Sep 22 23:15:06.997 INFO [0] Sending repair request ReconciliationId(174)
39130 Sep 22 23:15:06.997 INFO [1] received reconcile message
39131 Sep 22 23:15:06.997 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39132 Sep 22 23:15:06.997 INFO [1] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39133 Sep 22 23:15:06.997 INFO [1] No action required ReconciliationId(174)
39134 Sep 22 23:15:06.997 INFO [2] received reconcile message
39135 Sep 22 23:15:06.997 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(174), op: ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39136 Sep 22 23:15:06.997 INFO [2] client ExtentRepair { repair_id: ReconciliationId(174), extent_id: 57, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39137 Sep 22 23:15:06.997 INFO [2] No action required ReconciliationId(174)
39138 Sep 22 23:15:06.997 DEBG 174 Repair extent 57 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39139 Sep 22 23:15:06.997 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/039.copy"
39140 Sep 22 23:15:07.061 INFO accepted connection, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39141 Sep 22 23:15:07.062 TRCE incoming request, uri: /extent/57/files, method: GET, req_id: 1bbf7f2f-ca70-4b4a-9ad8-cd82197f11cc, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39142 Sep 22 23:15:07.062 INFO request completed, latency_us: 253, response_code: 200, uri: /extent/57/files, method: GET, req_id: 1bbf7f2f-ca70-4b4a-9ad8-cd82197f11cc, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39143 Sep 22 23:15:07.062 INFO eid:57 Found repair files: ["039", "039.db"]
39144 Sep 22 23:15:07.063 TRCE incoming request, uri: /newextent/57/data, method: GET, req_id: 145e4961-4452-4bea-b77c-b94a12da60cf, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39145 Sep 22 23:15:07.063 INFO request completed, latency_us: 336, response_code: 200, uri: /newextent/57/data, method: GET, req_id: 145e4961-4452-4bea-b77c-b94a12da60cf, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39146 Sep 22 23:15:07.068 TRCE incoming request, uri: /newextent/57/db, method: GET, req_id: 6285ffce-be82-4c28-ae47-f064788a27cd, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39147 Sep 22 23:15:07.068 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/57/db, method: GET, req_id: 6285ffce-be82-4c28-ae47-f064788a27cd, remote_addr: 127.0.0.1:51549, local_addr: 127.0.0.1:46213, task: repair
39148 Sep 22 23:15:07.069 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/039.copy" to "/tmp/downstairs-vrx8aK6L/00/000/039.replace"
39149 Sep 22 23:15:07.069 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39150 Sep 22 23:15:07.070 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/039.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39151 Sep 22 23:15:07.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/039"
39152 Sep 22 23:15:07.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/039.db"
39153 Sep 22 23:15:07.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39154 Sep 22 23:15:07.070 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/039.replace" to "/tmp/downstairs-vrx8aK6L/00/000/039.completed"
39155 Sep 22 23:15:07.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39156 Sep 22 23:15:07.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39157 Sep 22 23:15:07.071 DEBG [0] It's time to notify for 174
39158 Sep 22 23:15:07.071 INFO Completion from [0] id:174 status:true
39159 Sep 22 23:15:07.071 INFO [175/752] Repair commands completed
39160 Sep 22 23:15:07.071 INFO Pop front: ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }, state: ClientData([New, New, New]) }
39161 Sep 22 23:15:07.071 INFO Sent repair work, now wait for resp
39162 Sep 22 23:15:07.071 INFO [0] received reconcile message
39163 Sep 22 23:15:07.071 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }, state: ClientData([InProgress, New, New]) }, : downstairs
39164 Sep 22 23:15:07.071 INFO [0] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }
39165 Sep 22 23:15:07.071 INFO [1] received reconcile message
39166 Sep 22 23:15:07.071 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39167 Sep 22 23:15:07.071 INFO [1] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }
39168 Sep 22 23:15:07.071 INFO [2] received reconcile message
39169 Sep 22 23:15:07.071 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(175), op: ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39170 Sep 22 23:15:07.071 INFO [2] client ExtentReopen { repair_id: ReconciliationId(175), extent_id: 57 }
39171 Sep 22 23:15:07.071 DEBG 175 Reopen extent 57
39172 Sep 22 23:15:07.072 DEBG 175 Reopen extent 57
39173 Sep 22 23:15:07.072 DEBG 175 Reopen extent 57
39174 Sep 22 23:15:07.073 DEBG [2] It's time to notify for 175
39175 Sep 22 23:15:07.073 INFO Completion from [2] id:175 status:true
39176 Sep 22 23:15:07.073 INFO [176/752] Repair commands completed
39177 Sep 22 23:15:07.073 INFO Pop front: ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39178 Sep 22 23:15:07.073 INFO Sent repair work, now wait for resp
39179 Sep 22 23:15:07.073 INFO [0] received reconcile message
39180 Sep 22 23:15:07.073 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39181 Sep 22 23:15:07.073 INFO [0] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39182 Sep 22 23:15:07.073 INFO [1] received reconcile message
39183 Sep 22 23:15:07.073 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39184 Sep 22 23:15:07.073 INFO [1] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39185 Sep 22 23:15:07.073 INFO [2] received reconcile message
39186 Sep 22 23:15:07.073 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(176), op: ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39187 Sep 22 23:15:07.073 INFO [2] client ExtentFlush { repair_id: ReconciliationId(176), extent_id: 32, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39188 Sep 22 23:15:07.073 DEBG 176 Flush extent 32 with f:2 g:2
39189 Sep 22 23:15:07.073 DEBG Flush just extent 32 with f:2 and g:2
39190 Sep 22 23:15:07.074 DEBG [1] It's time to notify for 176
39191 Sep 22 23:15:07.074 INFO Completion from [1] id:176 status:true
39192 Sep 22 23:15:07.074 INFO [177/752] Repair commands completed
39193 Sep 22 23:15:07.074 INFO Pop front: ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }, state: ClientData([New, New, New]) }
39194 Sep 22 23:15:07.074 INFO Sent repair work, now wait for resp
39195 Sep 22 23:15:07.074 INFO [0] received reconcile message
39196 Sep 22 23:15:07.074 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }, state: ClientData([InProgress, New, New]) }, : downstairs
39197 Sep 22 23:15:07.074 INFO [0] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }
39198 Sep 22 23:15:07.074 INFO [1] received reconcile message
39199 Sep 22 23:15:07.074 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39200 Sep 22 23:15:07.074 INFO [1] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }
39201 Sep 22 23:15:07.074 INFO [2] received reconcile message
39202 Sep 22 23:15:07.074 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(177), op: ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39203 Sep 22 23:15:07.074 INFO [2] client ExtentClose { repair_id: ReconciliationId(177), extent_id: 32 }
39204 Sep 22 23:15:07.074 DEBG 177 Close extent 32
39205 Sep 22 23:15:07.074 DEBG 177 Close extent 32
39206 Sep 22 23:15:07.075 DEBG 177 Close extent 32
39207 Sep 22 23:15:07.075 DEBG [2] It's time to notify for 177
39208 Sep 22 23:15:07.075 INFO Completion from [2] id:177 status:true
39209 Sep 22 23:15:07.075 INFO [178/752] Repair commands completed
39210 Sep 22 23:15:07.075 INFO Pop front: ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39211 Sep 22 23:15:07.075 INFO Sent repair work, now wait for resp
39212 Sep 22 23:15:07.075 INFO [0] received reconcile message
39213 Sep 22 23:15:07.075 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39214 Sep 22 23:15:07.075 INFO [0] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39215 Sep 22 23:15:07.075 INFO [0] Sending repair request ReconciliationId(178)
39216 Sep 22 23:15:07.075 INFO [1] received reconcile message
39217 Sep 22 23:15:07.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39218 Sep 22 23:15:07.075 INFO [1] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39219 Sep 22 23:15:07.075 INFO [1] No action required ReconciliationId(178)
39220 Sep 22 23:15:07.075 INFO [2] received reconcile message
39221 Sep 22 23:15:07.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(178), op: ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39222 Sep 22 23:15:07.075 INFO [2] client ExtentRepair { repair_id: ReconciliationId(178), extent_id: 32, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39223 Sep 22 23:15:07.075 INFO [2] No action required ReconciliationId(178)
39224 Sep 22 23:15:07.075 DEBG 178 Repair extent 32 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39225 Sep 22 23:15:07.076 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/020.copy"
39226 Sep 22 23:15:07.137 INFO accepted connection, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39227 Sep 22 23:15:07.137 TRCE incoming request, uri: /extent/32/files, method: GET, req_id: c3686191-e1cd-42af-91be-659bfa4e7518, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39228 Sep 22 23:15:07.138 INFO request completed, latency_us: 243, response_code: 200, uri: /extent/32/files, method: GET, req_id: c3686191-e1cd-42af-91be-659bfa4e7518, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39229 Sep 22 23:15:07.138 INFO eid:32 Found repair files: ["020", "020.db"]
39230 Sep 22 23:15:07.138 TRCE incoming request, uri: /newextent/32/data, method: GET, req_id: 37b947fc-bc57-4f99-98b4-7730b6dfc953, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39231 Sep 22 23:15:07.139 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/32/data, method: GET, req_id: 37b947fc-bc57-4f99-98b4-7730b6dfc953, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39232 Sep 22 23:15:07.144 TRCE incoming request, uri: /newextent/32/db, method: GET, req_id: 62eac362-1dae-441b-84bd-df708f6f9424, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39233 Sep 22 23:15:07.144 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/32/db, method: GET, req_id: 62eac362-1dae-441b-84bd-df708f6f9424, remote_addr: 127.0.0.1:62704, local_addr: 127.0.0.1:46213, task: repair
39234 Sep 22 23:15:07.145 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/020.copy" to "/tmp/downstairs-vrx8aK6L/00/000/020.replace"
39235 Sep 22 23:15:07.145 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39236 Sep 22 23:15:07.146 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/020.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39237 Sep 22 23:15:07.146 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/020"
39238 Sep 22 23:15:07.147 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/020.db"
39239 Sep 22 23:15:07.147 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39240 Sep 22 23:15:07.147 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/020.replace" to "/tmp/downstairs-vrx8aK6L/00/000/020.completed"
39241 Sep 22 23:15:07.147 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39242 Sep 22 23:15:07.147 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39243 Sep 22 23:15:07.147 DEBG [0] It's time to notify for 178
39244 Sep 22 23:15:07.147 INFO Completion from [0] id:178 status:true
39245 Sep 22 23:15:07.147 INFO [179/752] Repair commands completed
39246 Sep 22 23:15:07.147 INFO Pop front: ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }, state: ClientData([New, New, New]) }
39247 Sep 22 23:15:07.147 INFO Sent repair work, now wait for resp
39248 Sep 22 23:15:07.147 INFO [0] received reconcile message
39249 Sep 22 23:15:07.147 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }, state: ClientData([InProgress, New, New]) }, : downstairs
39250 Sep 22 23:15:07.147 INFO [0] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }
39251 Sep 22 23:15:07.147 INFO [1] received reconcile message
39252 Sep 22 23:15:07.147 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39253 Sep 22 23:15:07.147 INFO [1] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }
39254 Sep 22 23:15:07.147 INFO [2] received reconcile message
39255 Sep 22 23:15:07.147 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(179), op: ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39256 Sep 22 23:15:07.147 INFO [2] client ExtentReopen { repair_id: ReconciliationId(179), extent_id: 32 }
39257 Sep 22 23:15:07.147 DEBG 179 Reopen extent 32
39258 Sep 22 23:15:07.148 DEBG 179 Reopen extent 32
39259 Sep 22 23:15:07.149 DEBG 179 Reopen extent 32
39260 Sep 22 23:15:07.149 DEBG [2] It's time to notify for 179
39261 Sep 22 23:15:07.149 INFO Completion from [2] id:179 status:true
39262 Sep 22 23:15:07.149 INFO [180/752] Repair commands completed
39263 Sep 22 23:15:07.149 INFO Pop front: ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39264 Sep 22 23:15:07.149 INFO Sent repair work, now wait for resp
39265 Sep 22 23:15:07.149 INFO [0] received reconcile message
39266 Sep 22 23:15:07.149 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39267 Sep 22 23:15:07.150 INFO [0] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39268 Sep 22 23:15:07.150 INFO [1] received reconcile message
39269 Sep 22 23:15:07.150 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39270 Sep 22 23:15:07.150 INFO [1] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39271 Sep 22 23:15:07.150 INFO [2] received reconcile message
39272 Sep 22 23:15:07.150 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(180), op: ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39273 Sep 22 23:15:07.150 INFO [2] client ExtentFlush { repair_id: ReconciliationId(180), extent_id: 60, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39274 Sep 22 23:15:07.150 DEBG 180 Flush extent 60 with f:2 g:2
39275 Sep 22 23:15:07.150 DEBG Flush just extent 60 with f:2 and g:2
39276 Sep 22 23:15:07.150 DEBG [1] It's time to notify for 180
39277 Sep 22 23:15:07.150 INFO Completion from [1] id:180 status:true
39278 Sep 22 23:15:07.150 INFO [181/752] Repair commands completed
39279 Sep 22 23:15:07.150 INFO Pop front: ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }, state: ClientData([New, New, New]) }
39280 Sep 22 23:15:07.150 INFO Sent repair work, now wait for resp
39281 Sep 22 23:15:07.150 INFO [0] received reconcile message
39282 Sep 22 23:15:07.150 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }, state: ClientData([InProgress, New, New]) }, : downstairs
39283 Sep 22 23:15:07.150 INFO [0] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }
39284 Sep 22 23:15:07.150 INFO [1] received reconcile message
39285 Sep 22 23:15:07.150 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39286 Sep 22 23:15:07.150 INFO [1] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }
39287 Sep 22 23:15:07.150 INFO [2] received reconcile message
39288 Sep 22 23:15:07.150 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(181), op: ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39289 Sep 22 23:15:07.150 INFO [2] client ExtentClose { repair_id: ReconciliationId(181), extent_id: 60 }
39290 Sep 22 23:15:07.150 DEBG 181 Close extent 60
39291 Sep 22 23:15:07.151 DEBG 181 Close extent 60
39292 Sep 22 23:15:07.151 DEBG 181 Close extent 60
39293 Sep 22 23:15:07.151 DEBG [2] It's time to notify for 181
39294 Sep 22 23:15:07.151 INFO Completion from [2] id:181 status:true
39295 Sep 22 23:15:07.151 INFO [182/752] Repair commands completed
39296 Sep 22 23:15:07.151 INFO Pop front: ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39297 Sep 22 23:15:07.151 INFO Sent repair work, now wait for resp
39298 Sep 22 23:15:07.151 INFO [0] received reconcile message
39299 Sep 22 23:15:07.151 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39300 Sep 22 23:15:07.152 INFO [0] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39301 Sep 22 23:15:07.152 INFO [0] Sending repair request ReconciliationId(182)
39302 Sep 22 23:15:07.152 INFO [1] received reconcile message
39303 Sep 22 23:15:07.152 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39304 Sep 22 23:15:07.152 INFO [1] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39305 Sep 22 23:15:07.152 INFO [1] No action required ReconciliationId(182)
39306 Sep 22 23:15:07.152 INFO [2] received reconcile message
39307 Sep 22 23:15:07.152 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(182), op: ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39308 Sep 22 23:15:07.152 INFO [2] client ExtentRepair { repair_id: ReconciliationId(182), extent_id: 60, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39309 Sep 22 23:15:07.152 INFO [2] No action required ReconciliationId(182)
39310 Sep 22 23:15:07.152 DEBG 182 Repair extent 60 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39311 Sep 22 23:15:07.152 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03C.copy"
39312 Sep 22 23:15:07.214 INFO accepted connection, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39313 Sep 22 23:15:07.214 TRCE incoming request, uri: /extent/60/files, method: GET, req_id: 3dc11b21-77bb-4afe-b2ed-9445947b807e, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39314 Sep 22 23:15:07.214 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/60/files, method: GET, req_id: 3dc11b21-77bb-4afe-b2ed-9445947b807e, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39315 Sep 22 23:15:07.214 INFO eid:60 Found repair files: ["03C", "03C.db"]
39316 Sep 22 23:15:07.215 TRCE incoming request, uri: /newextent/60/data, method: GET, req_id: 477c6cb1-62c0-427a-83b7-af96b54c3e15, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39317 Sep 22 23:15:07.215 INFO request completed, latency_us: 321, response_code: 200, uri: /newextent/60/data, method: GET, req_id: 477c6cb1-62c0-427a-83b7-af96b54c3e15, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39318 Sep 22 23:15:07.220 TRCE incoming request, uri: /newextent/60/db, method: GET, req_id: 4745aaea-af7f-4bfa-8baf-83a3fc8a5512, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39319 Sep 22 23:15:07.220 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/60/db, method: GET, req_id: 4745aaea-af7f-4bfa-8baf-83a3fc8a5512, remote_addr: 127.0.0.1:50194, local_addr: 127.0.0.1:46213, task: repair
39320 Sep 22 23:15:07.221 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03C.replace"
39321 Sep 22 23:15:07.222 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39322 Sep 22 23:15:07.222 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39323 Sep 22 23:15:07.223 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03C"
39324 Sep 22 23:15:07.223 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03C.db"
39325 Sep 22 23:15:07.223 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39326 Sep 22 23:15:07.223 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03C.completed"
39327 Sep 22 23:15:07.223 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39328 Sep 22 23:15:07.223 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39329 Sep 22 23:15:07.223 DEBG [0] It's time to notify for 182
39330 Sep 22 23:15:07.223 INFO Completion from [0] id:182 status:true
39331 Sep 22 23:15:07.223 INFO [183/752] Repair commands completed
39332 Sep 22 23:15:07.223 INFO Pop front: ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }, state: ClientData([New, New, New]) }
39333 Sep 22 23:15:07.223 INFO Sent repair work, now wait for resp
39334 Sep 22 23:15:07.223 INFO [0] received reconcile message
39335 Sep 22 23:15:07.223 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }, state: ClientData([InProgress, New, New]) }, : downstairs
39336 Sep 22 23:15:07.223 INFO [0] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }
39337 Sep 22 23:15:07.223 INFO [1] received reconcile message
39338 Sep 22 23:15:07.223 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39339 Sep 22 23:15:07.224 INFO [1] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }
39340 Sep 22 23:15:07.224 INFO [2] received reconcile message
39341 Sep 22 23:15:07.224 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(183), op: ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39342 Sep 22 23:15:07.224 INFO [2] client ExtentReopen { repair_id: ReconciliationId(183), extent_id: 60 }
39343 Sep 22 23:15:07.224 DEBG 183 Reopen extent 60
39344 Sep 22 23:15:07.224 DEBG 183 Reopen extent 60
39345 Sep 22 23:15:07.225 DEBG 183 Reopen extent 60
39346 Sep 22 23:15:07.225 DEBG [2] It's time to notify for 183
39347 Sep 22 23:15:07.226 INFO Completion from [2] id:183 status:true
39348 Sep 22 23:15:07.226 INFO [184/752] Repair commands completed
39349 Sep 22 23:15:07.226 INFO Pop front: ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39350 Sep 22 23:15:07.226 INFO Sent repair work, now wait for resp
39351 Sep 22 23:15:07.226 INFO [0] received reconcile message
39352 Sep 22 23:15:07.226 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39353 Sep 22 23:15:07.226 INFO [0] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39354 Sep 22 23:15:07.226 INFO [1] received reconcile message
39355 Sep 22 23:15:07.226 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39356 Sep 22 23:15:07.226 INFO [1] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39357 Sep 22 23:15:07.226 INFO [2] received reconcile message
39358 Sep 22 23:15:07.226 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(184), op: ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39359 Sep 22 23:15:07.226 INFO [2] client ExtentFlush { repair_id: ReconciliationId(184), extent_id: 7, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39360 Sep 22 23:15:07.226 DEBG 184 Flush extent 7 with f:2 g:2
39361 Sep 22 23:15:07.226 DEBG Flush just extent 7 with f:2 and g:2
39362 Sep 22 23:15:07.226 DEBG [1] It's time to notify for 184
39363 Sep 22 23:15:07.226 INFO Completion from [1] id:184 status:true
39364 Sep 22 23:15:07.226 INFO [185/752] Repair commands completed
39365 Sep 22 23:15:07.226 INFO Pop front: ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }, state: ClientData([New, New, New]) }
39366 Sep 22 23:15:07.226 INFO Sent repair work, now wait for resp
39367 Sep 22 23:15:07.226 INFO [0] received reconcile message
39368 Sep 22 23:15:07.226 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }, state: ClientData([InProgress, New, New]) }, : downstairs
39369 Sep 22 23:15:07.226 INFO [0] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }
39370 Sep 22 23:15:07.226 INFO [1] received reconcile message
39371 Sep 22 23:15:07.226 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39372 Sep 22 23:15:07.226 INFO [1] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }
39373 Sep 22 23:15:07.226 INFO [2] received reconcile message
39374 Sep 22 23:15:07.226 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(185), op: ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39375 Sep 22 23:15:07.226 INFO [2] client ExtentClose { repair_id: ReconciliationId(185), extent_id: 7 }
39376 Sep 22 23:15:07.227 DEBG 185 Close extent 7
39377 Sep 22 23:15:07.227 DEBG 185 Close extent 7
39378 Sep 22 23:15:07.227 DEBG 185 Close extent 7
39379 Sep 22 23:15:07.227 DEBG [2] It's time to notify for 185
39380 Sep 22 23:15:07.228 INFO Completion from [2] id:185 status:true
39381 Sep 22 23:15:07.228 INFO [186/752] Repair commands completed
39382 Sep 22 23:15:07.228 INFO Pop front: ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39383 Sep 22 23:15:07.228 INFO Sent repair work, now wait for resp
39384 Sep 22 23:15:07.228 INFO [0] received reconcile message
39385 Sep 22 23:15:07.228 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39386 Sep 22 23:15:07.228 INFO [0] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39387 Sep 22 23:15:07.228 INFO [0] Sending repair request ReconciliationId(186)
39388 Sep 22 23:15:07.228 INFO [1] received reconcile message
39389 Sep 22 23:15:07.228 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39390 Sep 22 23:15:07.228 INFO [1] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39391 Sep 22 23:15:07.228 INFO [1] No action required ReconciliationId(186)
39392 Sep 22 23:15:07.228 INFO [2] received reconcile message
39393 Sep 22 23:15:07.228 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(186), op: ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39394 Sep 22 23:15:07.228 INFO [2] client ExtentRepair { repair_id: ReconciliationId(186), extent_id: 7, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39395 Sep 22 23:15:07.228 INFO [2] No action required ReconciliationId(186)
39396 Sep 22 23:15:07.228 DEBG 186 Repair extent 7 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39397 Sep 22 23:15:07.228 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/007.copy"
39398 Sep 22 23:15:07.293 INFO accepted connection, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39399 Sep 22 23:15:07.293 TRCE incoming request, uri: /extent/7/files, method: GET, req_id: 8fee3a6c-e887-4386-ae7e-725bb42a67f3, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39400 Sep 22 23:15:07.293 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/7/files, method: GET, req_id: 8fee3a6c-e887-4386-ae7e-725bb42a67f3, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39401 Sep 22 23:15:07.293 INFO eid:7 Found repair files: ["007", "007.db"]
39402 Sep 22 23:15:07.294 TRCE incoming request, uri: /newextent/7/data, method: GET, req_id: 4be306dd-74f7-4fe4-88a5-b0085100dc07, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39403 Sep 22 23:15:07.294 INFO request completed, latency_us: 313, response_code: 200, uri: /newextent/7/data, method: GET, req_id: 4be306dd-74f7-4fe4-88a5-b0085100dc07, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39404 Sep 22 23:15:07.299 TRCE incoming request, uri: /newextent/7/db, method: GET, req_id: 205f8a30-cbd7-454a-a9e9-c2192a511053, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39405 Sep 22 23:15:07.299 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/7/db, method: GET, req_id: 205f8a30-cbd7-454a-a9e9-c2192a511053, remote_addr: 127.0.0.1:40181, local_addr: 127.0.0.1:46213, task: repair
39406 Sep 22 23:15:07.301 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/007.copy" to "/tmp/downstairs-vrx8aK6L/00/000/007.replace"
39407 Sep 22 23:15:07.301 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39408 Sep 22 23:15:07.302 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/007.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39409 Sep 22 23:15:07.302 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/007"
39410 Sep 22 23:15:07.302 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/007.db"
39411 Sep 22 23:15:07.302 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39412 Sep 22 23:15:07.302 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/007.replace" to "/tmp/downstairs-vrx8aK6L/00/000/007.completed"
39413 Sep 22 23:15:07.302 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39414 Sep 22 23:15:07.302 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39415 Sep 22 23:15:07.302 DEBG [0] It's time to notify for 186
39416 Sep 22 23:15:07.302 INFO Completion from [0] id:186 status:true
39417 Sep 22 23:15:07.302 INFO [187/752] Repair commands completed
39418 Sep 22 23:15:07.302 INFO Pop front: ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }, state: ClientData([New, New, New]) }
39419 Sep 22 23:15:07.302 INFO Sent repair work, now wait for resp
39420 Sep 22 23:15:07.302 INFO [0] received reconcile message
39421 Sep 22 23:15:07.302 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }, state: ClientData([InProgress, New, New]) }, : downstairs
39422 Sep 22 23:15:07.302 INFO [0] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }
39423 Sep 22 23:15:07.303 INFO [1] received reconcile message
39424 Sep 22 23:15:07.303 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39425 Sep 22 23:15:07.303 INFO [1] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }
39426 Sep 22 23:15:07.303 INFO [2] received reconcile message
39427 Sep 22 23:15:07.303 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(187), op: ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39428 Sep 22 23:15:07.303 INFO [2] client ExtentReopen { repair_id: ReconciliationId(187), extent_id: 7 }
39429 Sep 22 23:15:07.303 DEBG 187 Reopen extent 7
39430 Sep 22 23:15:07.303 DEBG 187 Reopen extent 7
39431 Sep 22 23:15:07.304 DEBG 187 Reopen extent 7
39432 Sep 22 23:15:07.305 DEBG [2] It's time to notify for 187
39433 Sep 22 23:15:07.305 INFO Completion from [2] id:187 status:true
39434 Sep 22 23:15:07.305 INFO [188/752] Repair commands completed
39435 Sep 22 23:15:07.305 INFO Pop front: ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39436 Sep 22 23:15:07.305 INFO Sent repair work, now wait for resp
39437 Sep 22 23:15:07.305 INFO [0] received reconcile message
39438 Sep 22 23:15:07.305 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39439 Sep 22 23:15:07.305 INFO [0] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39440 Sep 22 23:15:07.305 INFO [1] received reconcile message
39441 Sep 22 23:15:07.305 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39442 Sep 22 23:15:07.305 INFO [1] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39443 Sep 22 23:15:07.305 INFO [2] received reconcile message
39444 Sep 22 23:15:07.305 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(188), op: ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39445 Sep 22 23:15:07.305 INFO [2] client ExtentFlush { repair_id: ReconciliationId(188), extent_id: 139, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39446 Sep 22 23:15:07.305 DEBG 188 Flush extent 139 with f:2 g:2
39447 Sep 22 23:15:07.305 DEBG Flush just extent 139 with f:2 and g:2
39448 Sep 22 23:15:07.305 DEBG [1] It's time to notify for 188
39449 Sep 22 23:15:07.305 INFO Completion from [1] id:188 status:true
39450 Sep 22 23:15:07.305 INFO [189/752] Repair commands completed
39451 Sep 22 23:15:07.305 INFO Pop front: ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }, state: ClientData([New, New, New]) }
39452 Sep 22 23:15:07.305 INFO Sent repair work, now wait for resp
39453 Sep 22 23:15:07.305 INFO [0] received reconcile message
39454 Sep 22 23:15:07.305 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }, state: ClientData([InProgress, New, New]) }, : downstairs
39455 Sep 22 23:15:07.305 INFO [0] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }
39456 Sep 22 23:15:07.305 INFO [1] received reconcile message
39457 Sep 22 23:15:07.305 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39458 Sep 22 23:15:07.305 INFO [1] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }
39459 Sep 22 23:15:07.305 INFO [2] received reconcile message
39460 Sep 22 23:15:07.305 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(189), op: ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39461 Sep 22 23:15:07.305 INFO [2] client ExtentClose { repair_id: ReconciliationId(189), extent_id: 139 }
39462 Sep 22 23:15:07.306 DEBG 189 Close extent 139
39463 Sep 22 23:15:07.306 DEBG 189 Close extent 139
39464 Sep 22 23:15:07.306 DEBG 189 Close extent 139
39465 Sep 22 23:15:07.306 DEBG [2] It's time to notify for 189
39466 Sep 22 23:15:07.307 INFO Completion from [2] id:189 status:true
39467 Sep 22 23:15:07.307 INFO [190/752] Repair commands completed
39468 Sep 22 23:15:07.307 INFO Pop front: ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39469 Sep 22 23:15:07.307 INFO Sent repair work, now wait for resp
39470 Sep 22 23:15:07.307 INFO [0] received reconcile message
39471 Sep 22 23:15:07.307 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39472 Sep 22 23:15:07.307 INFO [0] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39473 Sep 22 23:15:07.307 INFO [0] Sending repair request ReconciliationId(190)
39474 Sep 22 23:15:07.307 INFO [1] received reconcile message
39475 Sep 22 23:15:07.307 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39476 Sep 22 23:15:07.307 INFO [1] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39477 Sep 22 23:15:07.307 INFO [1] No action required ReconciliationId(190)
39478 Sep 22 23:15:07.307 INFO [2] received reconcile message
39479 Sep 22 23:15:07.307 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(190), op: ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39480 Sep 22 23:15:07.307 INFO [2] client ExtentRepair { repair_id: ReconciliationId(190), extent_id: 139, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39481 Sep 22 23:15:07.307 INFO [2] No action required ReconciliationId(190)
39482 Sep 22 23:15:07.307 DEBG 190 Repair extent 139 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39483 Sep 22 23:15:07.307 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08B.copy"
39484 Sep 22 23:15:07.370 INFO accepted connection, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39485 Sep 22 23:15:07.371 TRCE incoming request, uri: /extent/139/files, method: GET, req_id: 4db907e1-aa75-4bbd-a43f-11c76a754ef2, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39486 Sep 22 23:15:07.371 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/139/files, method: GET, req_id: 4db907e1-aa75-4bbd-a43f-11c76a754ef2, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39487 Sep 22 23:15:07.371 INFO eid:139 Found repair files: ["08B", "08B.db"]
39488 Sep 22 23:15:07.371 TRCE incoming request, uri: /newextent/139/data, method: GET, req_id: 01fb575e-e85b-4fc4-b43e-2ece49be8b0a, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39489 Sep 22 23:15:07.372 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/139/data, method: GET, req_id: 01fb575e-e85b-4fc4-b43e-2ece49be8b0a, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39490 Sep 22 23:15:07.377 TRCE incoming request, uri: /newextent/139/db, method: GET, req_id: f607117f-4ab6-4c84-b3bf-ed29b62420be, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39491 Sep 22 23:15:07.377 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/139/db, method: GET, req_id: f607117f-4ab6-4c84-b3bf-ed29b62420be, remote_addr: 127.0.0.1:57559, local_addr: 127.0.0.1:46213, task: repair
39492 Sep 22 23:15:07.378 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08B.replace"
39493 Sep 22 23:15:07.378 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39494 Sep 22 23:15:07.379 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39495 Sep 22 23:15:07.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08B"
39496 Sep 22 23:15:07.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08B.db"
39497 Sep 22 23:15:07.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39498 Sep 22 23:15:07.379 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08B.completed"
39499 Sep 22 23:15:07.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39500 Sep 22 23:15:07.380 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39501 Sep 22 23:15:07.380 DEBG [0] It's time to notify for 190
39502 Sep 22 23:15:07.380 INFO Completion from [0] id:190 status:true
39503 Sep 22 23:15:07.380 INFO [191/752] Repair commands completed
39504 Sep 22 23:15:07.380 INFO Pop front: ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }, state: ClientData([New, New, New]) }
39505 Sep 22 23:15:07.380 INFO Sent repair work, now wait for resp
39506 Sep 22 23:15:07.380 INFO [0] received reconcile message
39507 Sep 22 23:15:07.380 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }, state: ClientData([InProgress, New, New]) }, : downstairs
39508 Sep 22 23:15:07.380 INFO [0] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }
39509 Sep 22 23:15:07.380 INFO [1] received reconcile message
39510 Sep 22 23:15:07.380 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39511 Sep 22 23:15:07.380 INFO [1] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }
39512 Sep 22 23:15:07.380 INFO [2] received reconcile message
39513 Sep 22 23:15:07.380 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(191), op: ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39514 Sep 22 23:15:07.380 INFO [2] client ExtentReopen { repair_id: ReconciliationId(191), extent_id: 139 }
39515 Sep 22 23:15:07.380 DEBG 191 Reopen extent 139
39516 Sep 22 23:15:07.381 DEBG 191 Reopen extent 139
39517 Sep 22 23:15:07.382 DEBG 191 Reopen extent 139
39518 Sep 22 23:15:07.382 DEBG [2] It's time to notify for 191
39519 Sep 22 23:15:07.382 INFO Completion from [2] id:191 status:true
39520 Sep 22 23:15:07.382 INFO [192/752] Repair commands completed
39521 Sep 22 23:15:07.382 INFO Pop front: ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39522 Sep 22 23:15:07.382 INFO Sent repair work, now wait for resp
39523 Sep 22 23:15:07.382 INFO [0] received reconcile message
39524 Sep 22 23:15:07.382 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39525 Sep 22 23:15:07.382 INFO [0] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39526 Sep 22 23:15:07.382 INFO [1] received reconcile message
39527 Sep 22 23:15:07.382 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39528 Sep 22 23:15:07.382 INFO [1] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39529 Sep 22 23:15:07.382 INFO [2] received reconcile message
39530 Sep 22 23:15:07.382 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(192), op: ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39531 Sep 22 23:15:07.382 INFO [2] client ExtentFlush { repair_id: ReconciliationId(192), extent_id: 163, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39532 Sep 22 23:15:07.383 DEBG 192 Flush extent 163 with f:2 g:2
39533 Sep 22 23:15:07.383 DEBG Flush just extent 163 with f:2 and g:2
39534 Sep 22 23:15:07.383 DEBG [1] It's time to notify for 192
39535 Sep 22 23:15:07.383 INFO Completion from [1] id:192 status:true
39536 Sep 22 23:15:07.383 INFO [193/752] Repair commands completed
39537 Sep 22 23:15:07.383 INFO Pop front: ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }, state: ClientData([New, New, New]) }
39538 Sep 22 23:15:07.383 INFO Sent repair work, now wait for resp
39539 Sep 22 23:15:07.383 INFO [0] received reconcile message
39540 Sep 22 23:15:07.383 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }, state: ClientData([InProgress, New, New]) }, : downstairs
39541 Sep 22 23:15:07.383 INFO [0] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }
39542 Sep 22 23:15:07.383 INFO [1] received reconcile message
39543 Sep 22 23:15:07.383 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39544 Sep 22 23:15:07.383 INFO [1] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }
39545 Sep 22 23:15:07.383 INFO [2] received reconcile message
39546 Sep 22 23:15:07.383 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(193), op: ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39547 Sep 22 23:15:07.383 INFO [2] client ExtentClose { repair_id: ReconciliationId(193), extent_id: 163 }
39548 Sep 22 23:15:07.383 DEBG 193 Close extent 163
39549 Sep 22 23:15:07.383 DEBG 193 Close extent 163
39550 Sep 22 23:15:07.384 DEBG 193 Close extent 163
39551 Sep 22 23:15:07.384 DEBG [2] It's time to notify for 193
39552 Sep 22 23:15:07.384 INFO Completion from [2] id:193 status:true
39553 Sep 22 23:15:07.384 INFO [194/752] Repair commands completed
39554 Sep 22 23:15:07.384 INFO Pop front: ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39555 Sep 22 23:15:07.384 INFO Sent repair work, now wait for resp
39556 Sep 22 23:15:07.384 INFO [0] received reconcile message
39557 Sep 22 23:15:07.384 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39558 Sep 22 23:15:07.384 INFO [0] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39559 Sep 22 23:15:07.384 INFO [0] Sending repair request ReconciliationId(194)
39560 Sep 22 23:15:07.384 INFO [1] received reconcile message
39561 Sep 22 23:15:07.384 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39562 Sep 22 23:15:07.384 INFO [1] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39563 Sep 22 23:15:07.384 INFO [1] No action required ReconciliationId(194)
39564 Sep 22 23:15:07.384 INFO [2] received reconcile message
39565 Sep 22 23:15:07.384 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(194), op: ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39566 Sep 22 23:15:07.384 INFO [2] client ExtentRepair { repair_id: ReconciliationId(194), extent_id: 163, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39567 Sep 22 23:15:07.384 INFO [2] No action required ReconciliationId(194)
39568 Sep 22 23:15:07.385 DEBG 194 Repair extent 163 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39569 Sep 22 23:15:07.385 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A3.copy"
39570 Sep 22 23:15:07.445 DEBG up_ds_listen was notified
39571 Sep 22 23:15:07.445 DEBG up_ds_listen process 1077
39572 Sep 22 23:15:07.446 DEBG [A] ack job 1077:78, : downstairs
39573 Sep 22 23:15:07.446 DEBG [rc] retire 1077 clears [JobId(1076), JobId(1077)], : downstairs
39574 Sep 22 23:15:07.446 DEBG up_ds_listen checked 1 jobs, back to waiting
39575 Sep 22 23:15:07.446 INFO accepted connection, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39576 Sep 22 23:15:07.446 TRCE incoming request, uri: /extent/163/files, method: GET, req_id: 3c4d2311-2eb1-4b63-bae7-234365e29d49, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39577 Sep 22 23:15:07.446 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/163/files, method: GET, req_id: 3c4d2311-2eb1-4b63-bae7-234365e29d49, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39578 Sep 22 23:15:07.447 INFO eid:163 Found repair files: ["0A3", "0A3.db"]
39579 Sep 22 23:15:07.447 TRCE incoming request, uri: /newextent/163/data, method: GET, req_id: 680a42dd-80d6-491f-8f53-ba700bb966b0, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39580 Sep 22 23:15:07.447 INFO request completed, latency_us: 255, response_code: 200, uri: /newextent/163/data, method: GET, req_id: 680a42dd-80d6-491f-8f53-ba700bb966b0, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39581 Sep 22 23:15:07.452 TRCE incoming request, uri: /newextent/163/db, method: GET, req_id: e1d61684-de09-4875-9f90-8f6e3106e293, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39582 Sep 22 23:15:07.452 DEBG IO Flush 1079 has deps [JobId(1078)]
39583 Sep 22 23:15:07.452 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/163/db, method: GET, req_id: e1d61684-de09-4875-9f90-8f6e3106e293, remote_addr: 127.0.0.1:35289, local_addr: 127.0.0.1:46213, task: repair
39584 Sep 22 23:15:07.453 INFO [lossy] sleeping 1 second
39585 Sep 22 23:15:07.454 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A3.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A3.replace"
39586 Sep 22 23:15:07.454 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39587 Sep 22 23:15:07.454 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A3.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39588 Sep 22 23:15:07.455 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A3"
39589 Sep 22 23:15:07.455 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A3.db"
39590 Sep 22 23:15:07.455 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39591 Sep 22 23:15:07.455 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A3.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A3.completed"
39592 Sep 22 23:15:07.455 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39593 Sep 22 23:15:07.455 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39594 Sep 22 23:15:07.455 DEBG [0] It's time to notify for 194
39595 Sep 22 23:15:07.455 INFO Completion from [0] id:194 status:true
39596 Sep 22 23:15:07.455 INFO [195/752] Repair commands completed
39597 Sep 22 23:15:07.455 INFO Pop front: ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }, state: ClientData([New, New, New]) }
39598 Sep 22 23:15:07.455 INFO Sent repair work, now wait for resp
39599 Sep 22 23:15:07.455 INFO [0] received reconcile message
39600 Sep 22 23:15:07.455 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }, state: ClientData([InProgress, New, New]) }, : downstairs
39601 Sep 22 23:15:07.455 INFO [0] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }
39602 Sep 22 23:15:07.455 INFO [1] received reconcile message
39603 Sep 22 23:15:07.455 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39604 Sep 22 23:15:07.455 INFO [1] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }
39605 Sep 22 23:15:07.456 INFO [2] received reconcile message
39606 Sep 22 23:15:07.456 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(195), op: ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39607 Sep 22 23:15:07.456 INFO [2] client ExtentReopen { repair_id: ReconciliationId(195), extent_id: 163 }
39608 Sep 22 23:15:07.456 DEBG 195 Reopen extent 163
39609 Sep 22 23:15:07.456 DEBG 195 Reopen extent 163
39610 Sep 22 23:15:07.457 DEBG 195 Reopen extent 163
39611 Sep 22 23:15:07.457 DEBG [2] It's time to notify for 195
39612 Sep 22 23:15:07.457 INFO Completion from [2] id:195 status:true
39613 Sep 22 23:15:07.457 INFO [196/752] Repair commands completed
39614 Sep 22 23:15:07.457 INFO Pop front: ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39615 Sep 22 23:15:07.457 INFO Sent repair work, now wait for resp
39616 Sep 22 23:15:07.458 INFO [0] received reconcile message
39617 Sep 22 23:15:07.458 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39618 Sep 22 23:15:07.458 INFO [0] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39619 Sep 22 23:15:07.458 INFO [1] received reconcile message
39620 Sep 22 23:15:07.458 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39621 Sep 22 23:15:07.458 INFO [1] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39622 Sep 22 23:15:07.458 INFO [2] received reconcile message
39623 Sep 22 23:15:07.458 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(196), op: ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39624 Sep 22 23:15:07.458 INFO [2] client ExtentFlush { repair_id: ReconciliationId(196), extent_id: 165, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39625 Sep 22 23:15:07.458 DEBG 196 Flush extent 165 with f:2 g:2
39626 Sep 22 23:15:07.458 DEBG Flush just extent 165 with f:2 and g:2
39627 Sep 22 23:15:07.458 DEBG [1] It's time to notify for 196
39628 Sep 22 23:15:07.458 INFO Completion from [1] id:196 status:true
39629 Sep 22 23:15:07.458 INFO [197/752] Repair commands completed
39630 Sep 22 23:15:07.458 INFO Pop front: ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }, state: ClientData([New, New, New]) }
39631 Sep 22 23:15:07.458 INFO Sent repair work, now wait for resp
39632 Sep 22 23:15:07.458 INFO [0] received reconcile message
39633 Sep 22 23:15:07.458 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }, state: ClientData([InProgress, New, New]) }, : downstairs
39634 Sep 22 23:15:07.458 INFO [0] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }
39635 Sep 22 23:15:07.458 INFO [1] received reconcile message
39636 Sep 22 23:15:07.458 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39637 Sep 22 23:15:07.458 INFO [1] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }
39638 Sep 22 23:15:07.458 INFO [2] received reconcile message
39639 Sep 22 23:15:07.458 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(197), op: ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39640 Sep 22 23:15:07.458 INFO [2] client ExtentClose { repair_id: ReconciliationId(197), extent_id: 165 }
39641 Sep 22 23:15:07.458 DEBG 197 Close extent 165
39642 Sep 22 23:15:07.459 DEBG 197 Close extent 165
39643 Sep 22 23:15:07.459 DEBG 197 Close extent 165
39644 Sep 22 23:15:07.459 DEBG [2] It's time to notify for 197
39645 Sep 22 23:15:07.459 INFO Completion from [2] id:197 status:true
39646 Sep 22 23:15:07.459 INFO [198/752] Repair commands completed
39647 Sep 22 23:15:07.459 INFO Pop front: ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39648 Sep 22 23:15:07.459 INFO Sent repair work, now wait for resp
39649 Sep 22 23:15:07.459 INFO [0] received reconcile message
39650 Sep 22 23:15:07.459 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39651 Sep 22 23:15:07.459 INFO [0] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39652 Sep 22 23:15:07.459 INFO [0] Sending repair request ReconciliationId(198)
39653 Sep 22 23:15:07.459 INFO [1] received reconcile message
39654 Sep 22 23:15:07.459 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39655 Sep 22 23:15:07.460 INFO [1] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39656 Sep 22 23:15:07.460 INFO [1] No action required ReconciliationId(198)
39657 Sep 22 23:15:07.460 INFO [2] received reconcile message
39658 Sep 22 23:15:07.460 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(198), op: ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39659 Sep 22 23:15:07.460 INFO [2] client ExtentRepair { repair_id: ReconciliationId(198), extent_id: 165, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39660 Sep 22 23:15:07.460 INFO [2] No action required ReconciliationId(198)
39661 Sep 22 23:15:07.460 DEBG 198 Repair extent 165 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39662 Sep 22 23:15:07.460 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A5.copy"
39663 Sep 22 23:15:07.523 INFO accepted connection, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39664 Sep 22 23:15:07.523 TRCE incoming request, uri: /extent/165/files, method: GET, req_id: c24b49bb-a7e7-40f1-a9ab-69f214c06fdf, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39665 Sep 22 23:15:07.524 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/165/files, method: GET, req_id: c24b49bb-a7e7-40f1-a9ab-69f214c06fdf, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39666 Sep 22 23:15:07.524 INFO eid:165 Found repair files: ["0A5", "0A5.db"]
39667 Sep 22 23:15:07.524 TRCE incoming request, uri: /newextent/165/data, method: GET, req_id: bb24b161-4847-42d1-bec2-8e762515403f, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39668 Sep 22 23:15:07.525 INFO request completed, latency_us: 320, response_code: 200, uri: /newextent/165/data, method: GET, req_id: bb24b161-4847-42d1-bec2-8e762515403f, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39669 Sep 22 23:15:07.530 TRCE incoming request, uri: /newextent/165/db, method: GET, req_id: ea83bd3f-85c1-413a-af0d-49188727ac03, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39670 Sep 22 23:15:07.530 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/165/db, method: GET, req_id: ea83bd3f-85c1-413a-af0d-49188727ac03, remote_addr: 127.0.0.1:49460, local_addr: 127.0.0.1:46213, task: repair
39671 Sep 22 23:15:07.531 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A5.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A5.replace"
39672 Sep 22 23:15:07.531 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39673 Sep 22 23:15:07.532 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A5.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39674 Sep 22 23:15:07.532 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A5"
39675 Sep 22 23:15:07.532 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A5.db"
39676 Sep 22 23:15:07.532 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39677 Sep 22 23:15:07.532 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A5.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A5.completed"
39678 Sep 22 23:15:07.532 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39679 Sep 22 23:15:07.532 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39680 Sep 22 23:15:07.532 DEBG [0] It's time to notify for 198
39681 Sep 22 23:15:07.533 INFO Completion from [0] id:198 status:true
39682 Sep 22 23:15:07.533 INFO [199/752] Repair commands completed
39683 Sep 22 23:15:07.533 INFO Pop front: ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }, state: ClientData([New, New, New]) }
39684 Sep 22 23:15:07.533 INFO Sent repair work, now wait for resp
39685 Sep 22 23:15:07.533 INFO [0] received reconcile message
39686 Sep 22 23:15:07.533 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }, state: ClientData([InProgress, New, New]) }, : downstairs
39687 Sep 22 23:15:07.533 INFO [0] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }
39688 Sep 22 23:15:07.533 INFO [1] received reconcile message
39689 Sep 22 23:15:07.533 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39690 Sep 22 23:15:07.533 INFO [1] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }
39691 Sep 22 23:15:07.533 INFO [2] received reconcile message
39692 Sep 22 23:15:07.533 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(199), op: ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39693 Sep 22 23:15:07.533 INFO [2] client ExtentReopen { repair_id: ReconciliationId(199), extent_id: 165 }
39694 Sep 22 23:15:07.533 DEBG 199 Reopen extent 165
39695 Sep 22 23:15:07.534 DEBG 199 Reopen extent 165
39696 Sep 22 23:15:07.534 DEBG 199 Reopen extent 165
39697 Sep 22 23:15:07.535 DEBG [2] It's time to notify for 199
39698 Sep 22 23:15:07.535 INFO Completion from [2] id:199 status:true
39699 Sep 22 23:15:07.535 INFO [200/752] Repair commands completed
39700 Sep 22 23:15:07.535 INFO Pop front: ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39701 Sep 22 23:15:07.535 INFO Sent repair work, now wait for resp
39702 Sep 22 23:15:07.535 INFO [0] received reconcile message
39703 Sep 22 23:15:07.535 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39704 Sep 22 23:15:07.535 INFO [0] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39705 Sep 22 23:15:07.535 INFO [1] received reconcile message
39706 Sep 22 23:15:07.535 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39707 Sep 22 23:15:07.535 INFO [1] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39708 Sep 22 23:15:07.535 INFO [2] received reconcile message
39709 Sep 22 23:15:07.535 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(200), op: ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39710 Sep 22 23:15:07.535 INFO [2] client ExtentFlush { repair_id: ReconciliationId(200), extent_id: 71, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39711 Sep 22 23:15:07.535 DEBG 200 Flush extent 71 with f:2 g:2
39712 Sep 22 23:15:07.535 DEBG Flush just extent 71 with f:2 and g:2
39713 Sep 22 23:15:07.535 DEBG [1] It's time to notify for 200
39714 Sep 22 23:15:07.535 INFO Completion from [1] id:200 status:true
39715 Sep 22 23:15:07.535 INFO [201/752] Repair commands completed
39716 Sep 22 23:15:07.535 INFO Pop front: ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }, state: ClientData([New, New, New]) }
39717 Sep 22 23:15:07.535 INFO Sent repair work, now wait for resp
39718 Sep 22 23:15:07.535 INFO [0] received reconcile message
39719 Sep 22 23:15:07.535 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }, state: ClientData([InProgress, New, New]) }, : downstairs
39720 Sep 22 23:15:07.535 INFO [0] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }
39721 Sep 22 23:15:07.535 INFO [1] received reconcile message
39722 Sep 22 23:15:07.535 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39723 Sep 22 23:15:07.535 INFO [1] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }
39724 Sep 22 23:15:07.536 INFO [2] received reconcile message
39725 Sep 22 23:15:07.536 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(201), op: ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39726 Sep 22 23:15:07.536 INFO [2] client ExtentClose { repair_id: ReconciliationId(201), extent_id: 71 }
39727 Sep 22 23:15:07.536 DEBG 201 Close extent 71
39728 Sep 22 23:15:07.536 DEBG 201 Close extent 71
39729 Sep 22 23:15:07.536 DEBG 201 Close extent 71
39730 Sep 22 23:15:07.537 DEBG [2] It's time to notify for 201
39731 Sep 22 23:15:07.537 INFO Completion from [2] id:201 status:true
39732 Sep 22 23:15:07.537 INFO [202/752] Repair commands completed
39733 Sep 22 23:15:07.537 INFO Pop front: ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39734 Sep 22 23:15:07.537 INFO Sent repair work, now wait for resp
39735 Sep 22 23:15:07.537 INFO [0] received reconcile message
39736 Sep 22 23:15:07.537 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39737 Sep 22 23:15:07.537 INFO [0] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39738 Sep 22 23:15:07.537 INFO [0] Sending repair request ReconciliationId(202)
39739 Sep 22 23:15:07.537 INFO [1] received reconcile message
39740 Sep 22 23:15:07.537 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39741 Sep 22 23:15:07.537 INFO [1] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39742 Sep 22 23:15:07.537 INFO [1] No action required ReconciliationId(202)
39743 Sep 22 23:15:07.537 INFO [2] received reconcile message
39744 Sep 22 23:15:07.537 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(202), op: ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39745 Sep 22 23:15:07.537 INFO [2] client ExtentRepair { repair_id: ReconciliationId(202), extent_id: 71, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39746 Sep 22 23:15:07.537 INFO [2] No action required ReconciliationId(202)
39747 Sep 22 23:15:07.537 DEBG 202 Repair extent 71 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39748 Sep 22 23:15:07.537 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/047.copy"
39749 Sep 22 23:15:07.597 INFO accepted connection, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39750 Sep 22 23:15:07.598 TRCE incoming request, uri: /extent/71/files, method: GET, req_id: 647b4075-1a57-4e43-a49b-f700ed9d6a51, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39751 Sep 22 23:15:07.598 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/71/files, method: GET, req_id: 647b4075-1a57-4e43-a49b-f700ed9d6a51, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39752 Sep 22 23:15:07.598 INFO eid:71 Found repair files: ["047", "047.db"]
39753 Sep 22 23:15:07.598 TRCE incoming request, uri: /newextent/71/data, method: GET, req_id: 3844fc9a-02e2-4cb1-9ef6-6133ea20948f, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39754 Sep 22 23:15:07.599 INFO request completed, latency_us: 327, response_code: 200, uri: /newextent/71/data, method: GET, req_id: 3844fc9a-02e2-4cb1-9ef6-6133ea20948f, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39755 Sep 22 23:15:07.604 TRCE incoming request, uri: /newextent/71/db, method: GET, req_id: 98d8015c-0582-467e-923e-79055a81464f, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39756 Sep 22 23:15:07.604 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/71/db, method: GET, req_id: 98d8015c-0582-467e-923e-79055a81464f, remote_addr: 127.0.0.1:56999, local_addr: 127.0.0.1:46213, task: repair
39757 Sep 22 23:15:07.605 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/047.copy" to "/tmp/downstairs-vrx8aK6L/00/000/047.replace"
39758 Sep 22 23:15:07.605 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39759 Sep 22 23:15:07.606 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/047.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39760 Sep 22 23:15:07.606 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/047"
39761 Sep 22 23:15:07.606 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/047.db"
39762 Sep 22 23:15:07.606 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39763 Sep 22 23:15:07.606 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/047.replace" to "/tmp/downstairs-vrx8aK6L/00/000/047.completed"
39764 Sep 22 23:15:07.606 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39765 Sep 22 23:15:07.606 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39766 Sep 22 23:15:07.606 DEBG [0] It's time to notify for 202
39767 Sep 22 23:15:07.607 INFO Completion from [0] id:202 status:true
39768 Sep 22 23:15:07.607 INFO [203/752] Repair commands completed
39769 Sep 22 23:15:07.607 INFO Pop front: ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }, state: ClientData([New, New, New]) }
39770 Sep 22 23:15:07.607 INFO Sent repair work, now wait for resp
39771 Sep 22 23:15:07.607 INFO [0] received reconcile message
39772 Sep 22 23:15:07.607 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }, state: ClientData([InProgress, New, New]) }, : downstairs
39773 Sep 22 23:15:07.607 INFO [0] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }
39774 Sep 22 23:15:07.607 INFO [1] received reconcile message
39775 Sep 22 23:15:07.607 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39776 Sep 22 23:15:07.607 INFO [1] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }
39777 Sep 22 23:15:07.607 INFO [2] received reconcile message
39778 Sep 22 23:15:07.607 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(203), op: ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39779 Sep 22 23:15:07.607 INFO [2] client ExtentReopen { repair_id: ReconciliationId(203), extent_id: 71 }
39780 Sep 22 23:15:07.607 DEBG 203 Reopen extent 71
39781 Sep 22 23:15:07.607 DEBG 203 Reopen extent 71
39782 Sep 22 23:15:07.608 DEBG 203 Reopen extent 71
39783 Sep 22 23:15:07.609 DEBG [2] It's time to notify for 203
39784 Sep 22 23:15:07.609 INFO Completion from [2] id:203 status:true
39785 Sep 22 23:15:07.609 INFO [204/752] Repair commands completed
39786 Sep 22 23:15:07.609 INFO Pop front: ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39787 Sep 22 23:15:07.609 INFO Sent repair work, now wait for resp
39788 Sep 22 23:15:07.609 INFO [0] received reconcile message
39789 Sep 22 23:15:07.609 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39790 Sep 22 23:15:07.609 INFO [0] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39791 Sep 22 23:15:07.609 INFO [1] received reconcile message
39792 Sep 22 23:15:07.609 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39793 Sep 22 23:15:07.609 INFO [1] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39794 Sep 22 23:15:07.609 INFO [2] received reconcile message
39795 Sep 22 23:15:07.609 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(204), op: ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39796 Sep 22 23:15:07.609 INFO [2] client ExtentFlush { repair_id: ReconciliationId(204), extent_id: 141, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39797 Sep 22 23:15:07.609 DEBG 204 Flush extent 141 with f:2 g:2
39798 Sep 22 23:15:07.609 DEBG Flush just extent 141 with f:2 and g:2
39799 Sep 22 23:15:07.609 DEBG [1] It's time to notify for 204
39800 Sep 22 23:15:07.609 INFO Completion from [1] id:204 status:true
39801 Sep 22 23:15:07.609 INFO [205/752] Repair commands completed
39802 Sep 22 23:15:07.609 INFO Pop front: ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }, state: ClientData([New, New, New]) }
39803 Sep 22 23:15:07.609 INFO Sent repair work, now wait for resp
39804 Sep 22 23:15:07.609 INFO [0] received reconcile message
39805 Sep 22 23:15:07.609 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }, state: ClientData([InProgress, New, New]) }, : downstairs
39806 Sep 22 23:15:07.609 INFO [0] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }
39807 Sep 22 23:15:07.609 INFO [1] received reconcile message
39808 Sep 22 23:15:07.609 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39809 Sep 22 23:15:07.609 INFO [1] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }
39810 Sep 22 23:15:07.609 INFO [2] received reconcile message
39811 Sep 22 23:15:07.609 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(205), op: ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39812 Sep 22 23:15:07.609 INFO [2] client ExtentClose { repair_id: ReconciliationId(205), extent_id: 141 }
39813 Sep 22 23:15:07.610 DEBG 205 Close extent 141
39814 Sep 22 23:15:07.610 DEBG 205 Close extent 141
39815 Sep 22 23:15:07.610 DEBG 205 Close extent 141
39816 Sep 22 23:15:07.610 DEBG [2] It's time to notify for 205
39817 Sep 22 23:15:07.611 INFO Completion from [2] id:205 status:true
39818 Sep 22 23:15:07.611 INFO [206/752] Repair commands completed
39819 Sep 22 23:15:07.611 INFO Pop front: ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39820 Sep 22 23:15:07.611 INFO Sent repair work, now wait for resp
39821 Sep 22 23:15:07.611 INFO [0] received reconcile message
39822 Sep 22 23:15:07.611 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39823 Sep 22 23:15:07.611 INFO [0] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39824 Sep 22 23:15:07.611 INFO [0] Sending repair request ReconciliationId(206)
39825 Sep 22 23:15:07.611 INFO [1] received reconcile message
39826 Sep 22 23:15:07.611 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39827 Sep 22 23:15:07.611 INFO [1] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39828 Sep 22 23:15:07.611 INFO [1] No action required ReconciliationId(206)
39829 Sep 22 23:15:07.611 INFO [2] received reconcile message
39830 Sep 22 23:15:07.611 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(206), op: ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39831 Sep 22 23:15:07.611 INFO [2] client ExtentRepair { repair_id: ReconciliationId(206), extent_id: 141, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39832 Sep 22 23:15:07.611 INFO [2] No action required ReconciliationId(206)
39833 Sep 22 23:15:07.611 DEBG 206 Repair extent 141 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39834 Sep 22 23:15:07.611 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08D.copy"
39835 Sep 22 23:15:07.675 INFO accepted connection, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39836 Sep 22 23:15:07.675 TRCE incoming request, uri: /extent/141/files, method: GET, req_id: d984cac8-a869-4355-84c9-b52543b196b2, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39837 Sep 22 23:15:07.676 INFO request completed, latency_us: 211, response_code: 200, uri: /extent/141/files, method: GET, req_id: d984cac8-a869-4355-84c9-b52543b196b2, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39838 Sep 22 23:15:07.676 INFO eid:141 Found repair files: ["08D", "08D.db"]
39839 Sep 22 23:15:07.676 TRCE incoming request, uri: /newextent/141/data, method: GET, req_id: df7c18b3-ab72-450d-922a-ff13b4ff928f, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39840 Sep 22 23:15:07.677 INFO request completed, latency_us: 328, response_code: 200, uri: /newextent/141/data, method: GET, req_id: df7c18b3-ab72-450d-922a-ff13b4ff928f, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39841 Sep 22 23:15:07.682 TRCE incoming request, uri: /newextent/141/db, method: GET, req_id: 77f6cb1b-5fa7-45e3-9624-f0100a9da33e, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39842 Sep 22 23:15:07.682 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/141/db, method: GET, req_id: 77f6cb1b-5fa7-45e3-9624-f0100a9da33e, remote_addr: 127.0.0.1:42719, local_addr: 127.0.0.1:46213, task: repair
39843 Sep 22 23:15:07.683 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08D.replace"
39844 Sep 22 23:15:07.683 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39845 Sep 22 23:15:07.684 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39846 Sep 22 23:15:07.684 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08D"
39847 Sep 22 23:15:07.684 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08D.db"
39848 Sep 22 23:15:07.684 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39849 Sep 22 23:15:07.684 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08D.completed"
39850 Sep 22 23:15:07.685 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39851 Sep 22 23:15:07.685 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39852 Sep 22 23:15:07.685 DEBG [0] It's time to notify for 206
39853 Sep 22 23:15:07.685 INFO Completion from [0] id:206 status:true
39854 Sep 22 23:15:07.685 INFO [207/752] Repair commands completed
39855 Sep 22 23:15:07.685 INFO Pop front: ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }, state: ClientData([New, New, New]) }
39856 Sep 22 23:15:07.685 INFO Sent repair work, now wait for resp
39857 Sep 22 23:15:07.685 INFO [0] received reconcile message
39858 Sep 22 23:15:07.685 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }, state: ClientData([InProgress, New, New]) }, : downstairs
39859 Sep 22 23:15:07.685 INFO [0] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }
39860 Sep 22 23:15:07.685 INFO [1] received reconcile message
39861 Sep 22 23:15:07.685 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39862 Sep 22 23:15:07.685 INFO [1] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }
39863 Sep 22 23:15:07.685 INFO [2] received reconcile message
39864 Sep 22 23:15:07.685 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(207), op: ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39865 Sep 22 23:15:07.685 INFO [2] client ExtentReopen { repair_id: ReconciliationId(207), extent_id: 141 }
39866 Sep 22 23:15:07.685 DEBG 207 Reopen extent 141
39867 Sep 22 23:15:07.686 DEBG Read :1078 deps:[JobId(1077)] res:true
39868 Sep 22 23:15:07.686 DEBG 207 Reopen extent 141
39869 Sep 22 23:15:07.687 DEBG 207 Reopen extent 141
39870 Sep 22 23:15:07.687 DEBG [2] It's time to notify for 207
39871 Sep 22 23:15:07.687 INFO Completion from [2] id:207 status:true
39872 Sep 22 23:15:07.687 INFO [208/752] Repair commands completed
39873 Sep 22 23:15:07.687 INFO Pop front: ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39874 Sep 22 23:15:07.687 INFO Sent repair work, now wait for resp
39875 Sep 22 23:15:07.687 INFO [0] received reconcile message
39876 Sep 22 23:15:07.687 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39877 Sep 22 23:15:07.687 INFO [0] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39878 Sep 22 23:15:07.687 INFO [1] received reconcile message
39879 Sep 22 23:15:07.687 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39880 Sep 22 23:15:07.687 INFO [1] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39881 Sep 22 23:15:07.687 INFO [2] received reconcile message
39882 Sep 22 23:15:07.687 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(208), op: ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39883 Sep 22 23:15:07.687 INFO [2] client ExtentFlush { repair_id: ReconciliationId(208), extent_id: 114, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39884 Sep 22 23:15:07.688 DEBG 208 Flush extent 114 with f:2 g:2
39885 Sep 22 23:15:07.688 DEBG Flush just extent 114 with f:2 and g:2
39886 Sep 22 23:15:07.688 DEBG [1] It's time to notify for 208
39887 Sep 22 23:15:07.688 INFO Completion from [1] id:208 status:true
39888 Sep 22 23:15:07.688 INFO [209/752] Repair commands completed
39889 Sep 22 23:15:07.688 INFO Pop front: ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }, state: ClientData([New, New, New]) }
39890 Sep 22 23:15:07.688 INFO Sent repair work, now wait for resp
39891 Sep 22 23:15:07.688 INFO [0] received reconcile message
39892 Sep 22 23:15:07.688 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }, state: ClientData([InProgress, New, New]) }, : downstairs
39893 Sep 22 23:15:07.688 INFO [0] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }
39894 Sep 22 23:15:07.688 INFO [1] received reconcile message
39895 Sep 22 23:15:07.688 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39896 Sep 22 23:15:07.688 INFO [1] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }
39897 Sep 22 23:15:07.688 INFO [2] received reconcile message
39898 Sep 22 23:15:07.688 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(209), op: ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39899 Sep 22 23:15:07.688 INFO [2] client ExtentClose { repair_id: ReconciliationId(209), extent_id: 114 }
39900 Sep 22 23:15:07.688 DEBG 209 Close extent 114
39901 Sep 22 23:15:07.688 DEBG 209 Close extent 114
39902 Sep 22 23:15:07.689 DEBG 209 Close extent 114
39903 Sep 22 23:15:07.689 DEBG [2] It's time to notify for 209
39904 Sep 22 23:15:07.689 INFO Completion from [2] id:209 status:true
39905 Sep 22 23:15:07.689 INFO [210/752] Repair commands completed
39906 Sep 22 23:15:07.689 INFO Pop front: ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39907 Sep 22 23:15:07.689 INFO Sent repair work, now wait for resp
39908 Sep 22 23:15:07.689 INFO [0] received reconcile message
39909 Sep 22 23:15:07.689 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
39910 Sep 22 23:15:07.689 INFO [0] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39911 Sep 22 23:15:07.689 INFO [0] Sending repair request ReconciliationId(210)
39912 Sep 22 23:15:07.689 INFO [1] received reconcile message
39913 Sep 22 23:15:07.689 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39914 Sep 22 23:15:07.689 INFO [1] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39915 Sep 22 23:15:07.689 INFO [1] No action required ReconciliationId(210)
39916 Sep 22 23:15:07.689 INFO [2] received reconcile message
39917 Sep 22 23:15:07.689 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(210), op: ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
39918 Sep 22 23:15:07.689 INFO [2] client ExtentRepair { repair_id: ReconciliationId(210), extent_id: 114, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
39919 Sep 22 23:15:07.690 INFO [2] No action required ReconciliationId(210)
39920 Sep 22 23:15:07.690 DEBG 210 Repair extent 114 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
39921 Sep 22 23:15:07.690 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/072.copy"
39922 Sep 22 23:15:07.707 INFO [lossy] skipping 1078
39923 Sep 22 23:15:07.707 INFO [lossy] skipping 1078
39924 Sep 22 23:15:07.714 DEBG Read :1078 deps:[JobId(1077)] res:true
39925 Sep 22 23:15:07.737 INFO [lossy] skipping 1079
39926 Sep 22 23:15:07.737 DEBG Flush :1079 extent_limit None deps:[JobId(1078)] res:true f:29 g:1
39927 Sep 22 23:15:07.737 INFO [lossy] sleeping 1 second
39928 Sep 22 23:15:07.750 INFO accepted connection, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39929 Sep 22 23:15:07.750 TRCE incoming request, uri: /extent/114/files, method: GET, req_id: e0183434-1d00-4919-b2a3-77d22d5d5997, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39930 Sep 22 23:15:07.750 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/114/files, method: GET, req_id: e0183434-1d00-4919-b2a3-77d22d5d5997, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39931 Sep 22 23:15:07.750 INFO eid:114 Found repair files: ["072", "072.db"]
39932 Sep 22 23:15:07.751 TRCE incoming request, uri: /newextent/114/data, method: GET, req_id: 6a9364e3-6f10-4480-ab72-341cc126b76b, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39933 Sep 22 23:15:07.751 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/114/data, method: GET, req_id: 6a9364e3-6f10-4480-ab72-341cc126b76b, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39934 Sep 22 23:15:07.756 TRCE incoming request, uri: /newextent/114/db, method: GET, req_id: db957bd1-472f-4fbc-94a4-3a19979571e5, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39935 Sep 22 23:15:07.756 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/114/db, method: GET, req_id: db957bd1-472f-4fbc-94a4-3a19979571e5, remote_addr: 127.0.0.1:57983, local_addr: 127.0.0.1:46213, task: repair
39936 Sep 22 23:15:07.758 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/072.copy" to "/tmp/downstairs-vrx8aK6L/00/000/072.replace"
39937 Sep 22 23:15:07.758 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39938 Sep 22 23:15:07.759 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/072.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
39939 Sep 22 23:15:07.759 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/072"
39940 Sep 22 23:15:07.759 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/072.db"
39941 Sep 22 23:15:07.759 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39942 Sep 22 23:15:07.759 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/072.replace" to "/tmp/downstairs-vrx8aK6L/00/000/072.completed"
39943 Sep 22 23:15:07.759 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39944 Sep 22 23:15:07.759 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
39945 Sep 22 23:15:07.759 DEBG [0] It's time to notify for 210
39946 Sep 22 23:15:07.759 INFO Completion from [0] id:210 status:true
39947 Sep 22 23:15:07.759 INFO [211/752] Repair commands completed
39948 Sep 22 23:15:07.759 INFO Pop front: ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }, state: ClientData([New, New, New]) }
39949 Sep 22 23:15:07.759 INFO Sent repair work, now wait for resp
39950 Sep 22 23:15:07.760 INFO [0] received reconcile message
39951 Sep 22 23:15:07.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }, state: ClientData([InProgress, New, New]) }, : downstairs
39952 Sep 22 23:15:07.760 INFO [0] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }
39953 Sep 22 23:15:07.760 INFO [1] received reconcile message
39954 Sep 22 23:15:07.760 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39955 Sep 22 23:15:07.760 INFO [1] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }
39956 Sep 22 23:15:07.760 INFO [2] received reconcile message
39957 Sep 22 23:15:07.760 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(211), op: ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39958 Sep 22 23:15:07.760 INFO [2] client ExtentReopen { repair_id: ReconciliationId(211), extent_id: 114 }
39959 Sep 22 23:15:07.760 DEBG 211 Reopen extent 114
39960 Sep 22 23:15:07.761 DEBG 211 Reopen extent 114
39961 Sep 22 23:15:07.761 DEBG 211 Reopen extent 114
39962 Sep 22 23:15:07.762 DEBG [2] It's time to notify for 211
39963 Sep 22 23:15:07.762 INFO Completion from [2] id:211 status:true
39964 Sep 22 23:15:07.762 INFO [212/752] Repair commands completed
39965 Sep 22 23:15:07.762 INFO Pop front: ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
39966 Sep 22 23:15:07.762 INFO Sent repair work, now wait for resp
39967 Sep 22 23:15:07.762 INFO [0] received reconcile message
39968 Sep 22 23:15:07.762 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
39969 Sep 22 23:15:07.762 INFO [0] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39970 Sep 22 23:15:07.762 INFO [1] received reconcile message
39971 Sep 22 23:15:07.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
39972 Sep 22 23:15:07.762 INFO [1] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39973 Sep 22 23:15:07.762 INFO [2] received reconcile message
39974 Sep 22 23:15:07.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(212), op: ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
39975 Sep 22 23:15:07.762 INFO [2] client ExtentFlush { repair_id: ReconciliationId(212), extent_id: 107, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
39976 Sep 22 23:15:07.762 DEBG 212 Flush extent 107 with f:2 g:2
39977 Sep 22 23:15:07.762 DEBG Flush just extent 107 with f:2 and g:2
39978 Sep 22 23:15:07.762 DEBG [1] It's time to notify for 212
39979 Sep 22 23:15:07.762 INFO Completion from [1] id:212 status:true
39980 Sep 22 23:15:07.762 INFO [213/752] Repair commands completed
39981 Sep 22 23:15:07.762 INFO Pop front: ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }, state: ClientData([New, New, New]) }
39982 Sep 22 23:15:07.762 INFO Sent repair work, now wait for resp
39983 Sep 22 23:15:07.762 INFO [0] received reconcile message
39984 Sep 22 23:15:07.762 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }, state: ClientData([InProgress, New, New]) }, : downstairs
39985 Sep 22 23:15:07.762 INFO [0] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }
39986 Sep 22 23:15:07.762 INFO [1] received reconcile message
39987 Sep 22 23:15:07.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
39988 Sep 22 23:15:07.762 INFO [1] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }
39989 Sep 22 23:15:07.763 INFO [2] received reconcile message
39990 Sep 22 23:15:07.763 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(213), op: ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
39991 Sep 22 23:15:07.763 INFO [2] client ExtentClose { repair_id: ReconciliationId(213), extent_id: 107 }
39992 Sep 22 23:15:07.763 DEBG 213 Close extent 107
39993 Sep 22 23:15:07.763 DEBG 213 Close extent 107
39994 Sep 22 23:15:07.763 DEBG 213 Close extent 107
39995 Sep 22 23:15:07.764 DEBG [2] It's time to notify for 213
39996 Sep 22 23:15:07.764 INFO Completion from [2] id:213 status:true
39997 Sep 22 23:15:07.764 INFO [214/752] Repair commands completed
39998 Sep 22 23:15:07.764 INFO Pop front: ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
39999 Sep 22 23:15:07.764 INFO Sent repair work, now wait for resp
40000 Sep 22 23:15:07.764 INFO [0] received reconcile message
40001 Sep 22 23:15:07.764 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40002 Sep 22 23:15:07.764 INFO [0] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40003 Sep 22 23:15:07.764 INFO [0] Sending repair request ReconciliationId(214)
40004 Sep 22 23:15:07.764 INFO [1] received reconcile message
40005 Sep 22 23:15:07.764 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40006 Sep 22 23:15:07.764 INFO [1] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40007 Sep 22 23:15:07.764 INFO [1] No action required ReconciliationId(214)
40008 Sep 22 23:15:07.764 INFO [2] received reconcile message
40009 Sep 22 23:15:07.764 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(214), op: ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40010 Sep 22 23:15:07.764 INFO [2] client ExtentRepair { repair_id: ReconciliationId(214), extent_id: 107, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40011 Sep 22 23:15:07.764 INFO [2] No action required ReconciliationId(214)
40012 Sep 22 23:15:07.764 DEBG 214 Repair extent 107 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40013 Sep 22 23:15:07.764 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06B.copy"
40014 Sep 22 23:15:07.786 DEBG Flush :1079 extent_limit None deps:[JobId(1078)] res:true f:29 g:1
40015 Sep 22 23:15:07.786 INFO [lossy] sleeping 1 second
40016 Sep 22 23:15:07.829 INFO accepted connection, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40017 Sep 22 23:15:07.829 TRCE incoming request, uri: /extent/107/files, method: GET, req_id: bc890764-1cf6-41d7-9420-7b1a70592f84, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40018 Sep 22 23:15:07.829 INFO request completed, latency_us: 231, response_code: 200, uri: /extent/107/files, method: GET, req_id: bc890764-1cf6-41d7-9420-7b1a70592f84, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40019 Sep 22 23:15:07.829 INFO eid:107 Found repair files: ["06B", "06B.db"]
40020 Sep 22 23:15:07.830 TRCE incoming request, uri: /newextent/107/data, method: GET, req_id: d9c05532-0e9b-496b-9399-af02db856432, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40021 Sep 22 23:15:07.830 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/107/data, method: GET, req_id: d9c05532-0e9b-496b-9399-af02db856432, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40022 Sep 22 23:15:07.835 TRCE incoming request, uri: /newextent/107/db, method: GET, req_id: 0ca9d7fc-feed-41f8-8070-ef0cec295249, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40023 Sep 22 23:15:07.835 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/107/db, method: GET, req_id: 0ca9d7fc-feed-41f8-8070-ef0cec295249, remote_addr: 127.0.0.1:61335, local_addr: 127.0.0.1:46213, task: repair
40024 Sep 22 23:15:07.837 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06B.replace"
40025 Sep 22 23:15:07.837 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40026 Sep 22 23:15:07.837 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40027 Sep 22 23:15:07.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06B"
40028 Sep 22 23:15:07.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06B.db"
40029 Sep 22 23:15:07.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40030 Sep 22 23:15:07.838 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06B.completed"
40031 Sep 22 23:15:07.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40032 Sep 22 23:15:07.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40033 Sep 22 23:15:07.838 DEBG [0] It's time to notify for 214
40034 Sep 22 23:15:07.838 INFO Completion from [0] id:214 status:true
40035 Sep 22 23:15:07.838 INFO [215/752] Repair commands completed
40036 Sep 22 23:15:07.838 INFO Pop front: ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }, state: ClientData([New, New, New]) }
40037 Sep 22 23:15:07.838 INFO Sent repair work, now wait for resp
40038 Sep 22 23:15:07.838 INFO [0] received reconcile message
40039 Sep 22 23:15:07.838 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }, state: ClientData([InProgress, New, New]) }, : downstairs
40040 Sep 22 23:15:07.838 INFO [0] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }
40041 Sep 22 23:15:07.838 INFO [1] received reconcile message
40042 Sep 22 23:15:07.838 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40043 Sep 22 23:15:07.839 INFO [1] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }
40044 Sep 22 23:15:07.839 INFO [2] received reconcile message
40045 Sep 22 23:15:07.839 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(215), op: ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40046 Sep 22 23:15:07.839 INFO [2] client ExtentReopen { repair_id: ReconciliationId(215), extent_id: 107 }
40047 Sep 22 23:15:07.839 DEBG 215 Reopen extent 107
40048 Sep 22 23:15:07.839 DEBG 215 Reopen extent 107
40049 Sep 22 23:15:07.840 DEBG 215 Reopen extent 107
40050 Sep 22 23:15:07.841 DEBG [2] It's time to notify for 215
40051 Sep 22 23:15:07.841 INFO Completion from [2] id:215 status:true
40052 Sep 22 23:15:07.841 INFO [216/752] Repair commands completed
40053 Sep 22 23:15:07.841 INFO Pop front: ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40054 Sep 22 23:15:07.841 INFO Sent repair work, now wait for resp
40055 Sep 22 23:15:07.841 INFO [0] received reconcile message
40056 Sep 22 23:15:07.841 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40057 Sep 22 23:15:07.841 INFO [0] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40058 Sep 22 23:15:07.841 INFO [1] received reconcile message
40059 Sep 22 23:15:07.841 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40060 Sep 22 23:15:07.841 INFO [1] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40061 Sep 22 23:15:07.841 INFO [2] received reconcile message
40062 Sep 22 23:15:07.841 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(216), op: ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40063 Sep 22 23:15:07.841 INFO [2] client ExtentFlush { repair_id: ReconciliationId(216), extent_id: 150, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40064 Sep 22 23:15:07.841 DEBG 216 Flush extent 150 with f:2 g:2
40065 Sep 22 23:15:07.841 DEBG Flush just extent 150 with f:2 and g:2
40066 Sep 22 23:15:07.841 DEBG [1] It's time to notify for 216
40067 Sep 22 23:15:07.841 INFO Completion from [1] id:216 status:true
40068 Sep 22 23:15:07.841 INFO [217/752] Repair commands completed
40069 Sep 22 23:15:07.841 INFO Pop front: ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }, state: ClientData([New, New, New]) }
40070 Sep 22 23:15:07.841 INFO Sent repair work, now wait for resp
40071 Sep 22 23:15:07.841 INFO [0] received reconcile message
40072 Sep 22 23:15:07.841 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }, state: ClientData([InProgress, New, New]) }, : downstairs
40073 Sep 22 23:15:07.841 INFO [0] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }
40074 Sep 22 23:15:07.841 INFO [1] received reconcile message
40075 Sep 22 23:15:07.841 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40076 Sep 22 23:15:07.841 INFO [1] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }
40077 Sep 22 23:15:07.841 INFO [2] received reconcile message
40078 Sep 22 23:15:07.841 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(217), op: ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40079 Sep 22 23:15:07.841 INFO [2] client ExtentClose { repair_id: ReconciliationId(217), extent_id: 150 }
40080 Sep 22 23:15:07.842 DEBG 217 Close extent 150
40081 Sep 22 23:15:07.842 DEBG 217 Close extent 150
40082 Sep 22 23:15:07.842 DEBG 217 Close extent 150
40083 Sep 22 23:15:07.842 DEBG [2] It's time to notify for 217
40084 Sep 22 23:15:07.842 INFO Completion from [2] id:217 status:true
40085 Sep 22 23:15:07.843 INFO [218/752] Repair commands completed
40086 Sep 22 23:15:07.843 INFO Pop front: ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40087 Sep 22 23:15:07.843 INFO Sent repair work, now wait for resp
40088 Sep 22 23:15:07.843 INFO [0] received reconcile message
40089 Sep 22 23:15:07.843 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40090 Sep 22 23:15:07.843 INFO [0] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40091 Sep 22 23:15:07.843 INFO [0] Sending repair request ReconciliationId(218)
40092 Sep 22 23:15:07.843 INFO [1] received reconcile message
40093 Sep 22 23:15:07.843 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40094 Sep 22 23:15:07.843 INFO [1] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40095 Sep 22 23:15:07.843 INFO [1] No action required ReconciliationId(218)
40096 Sep 22 23:15:07.843 INFO [2] received reconcile message
40097 Sep 22 23:15:07.843 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(218), op: ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40098 Sep 22 23:15:07.843 INFO [2] client ExtentRepair { repair_id: ReconciliationId(218), extent_id: 150, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40099 Sep 22 23:15:07.843 INFO [2] No action required ReconciliationId(218)
40100 Sep 22 23:15:07.843 DEBG 218 Repair extent 150 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40101 Sep 22 23:15:07.843 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/096.copy"
40102 Sep 22 23:15:07.906 INFO accepted connection, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40103 Sep 22 23:15:07.906 TRCE incoming request, uri: /extent/150/files, method: GET, req_id: 485308e5-feac-4a15-a065-bbb6ab5eff82, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40104 Sep 22 23:15:07.906 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/150/files, method: GET, req_id: 485308e5-feac-4a15-a065-bbb6ab5eff82, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40105 Sep 22 23:15:07.907 INFO eid:150 Found repair files: ["096", "096.db"]
40106 Sep 22 23:15:07.907 TRCE incoming request, uri: /newextent/150/data, method: GET, req_id: 25fa0698-c715-466e-931c-f2cec1cdcf94, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40107 Sep 22 23:15:07.907 INFO request completed, latency_us: 256, response_code: 200, uri: /newextent/150/data, method: GET, req_id: 25fa0698-c715-466e-931c-f2cec1cdcf94, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40108 Sep 22 23:15:07.912 TRCE incoming request, uri: /newextent/150/db, method: GET, req_id: ba231f0f-72b5-4e75-bf5b-c910f85870ad, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40109 Sep 22 23:15:07.913 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/150/db, method: GET, req_id: ba231f0f-72b5-4e75-bf5b-c910f85870ad, remote_addr: 127.0.0.1:56142, local_addr: 127.0.0.1:46213, task: repair
40110 Sep 22 23:15:07.914 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/096.copy" to "/tmp/downstairs-vrx8aK6L/00/000/096.replace"
40111 Sep 22 23:15:07.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40112 Sep 22 23:15:07.915 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/096.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40113 Sep 22 23:15:07.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/096"
40114 Sep 22 23:15:07.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/096.db"
40115 Sep 22 23:15:07.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40116 Sep 22 23:15:07.915 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/096.replace" to "/tmp/downstairs-vrx8aK6L/00/000/096.completed"
40117 Sep 22 23:15:07.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40118 Sep 22 23:15:07.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40119 Sep 22 23:15:07.915 DEBG [0] It's time to notify for 218
40120 Sep 22 23:15:07.916 INFO Completion from [0] id:218 status:true
40121 Sep 22 23:15:07.916 INFO [219/752] Repair commands completed
40122 Sep 22 23:15:07.916 INFO Pop front: ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }, state: ClientData([New, New, New]) }
40123 Sep 22 23:15:07.916 INFO Sent repair work, now wait for resp
40124 Sep 22 23:15:07.916 INFO [0] received reconcile message
40125 Sep 22 23:15:07.916 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }, state: ClientData([InProgress, New, New]) }, : downstairs
40126 Sep 22 23:15:07.916 INFO [0] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }
40127 Sep 22 23:15:07.916 INFO [1] received reconcile message
40128 Sep 22 23:15:07.916 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40129 Sep 22 23:15:07.916 INFO [1] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }
40130 Sep 22 23:15:07.916 INFO [2] received reconcile message
40131 Sep 22 23:15:07.916 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(219), op: ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40132 Sep 22 23:15:07.916 INFO [2] client ExtentReopen { repair_id: ReconciliationId(219), extent_id: 150 }
40133 Sep 22 23:15:07.916 DEBG 219 Reopen extent 150
40134 Sep 22 23:15:07.917 DEBG 219 Reopen extent 150
40135 Sep 22 23:15:07.917 DEBG 219 Reopen extent 150
40136 Sep 22 23:15:07.918 DEBG [2] It's time to notify for 219
40137 Sep 22 23:15:07.918 INFO Completion from [2] id:219 status:true
40138 Sep 22 23:15:07.918 INFO [220/752] Repair commands completed
40139 Sep 22 23:15:07.918 INFO Pop front: ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40140 Sep 22 23:15:07.918 INFO Sent repair work, now wait for resp
40141 Sep 22 23:15:07.918 INFO [0] received reconcile message
40142 Sep 22 23:15:07.918 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40143 Sep 22 23:15:07.918 INFO [0] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40144 Sep 22 23:15:07.918 INFO [1] received reconcile message
40145 Sep 22 23:15:07.918 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40146 Sep 22 23:15:07.918 INFO [1] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40147 Sep 22 23:15:07.918 INFO [2] received reconcile message
40148 Sep 22 23:15:07.918 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(220), op: ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40149 Sep 22 23:15:07.918 INFO [2] client ExtentFlush { repair_id: ReconciliationId(220), extent_id: 164, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40150 Sep 22 23:15:07.918 DEBG 220 Flush extent 164 with f:2 g:2
40151 Sep 22 23:15:07.918 DEBG Flush just extent 164 with f:2 and g:2
40152 Sep 22 23:15:07.918 DEBG [1] It's time to notify for 220
40153 Sep 22 23:15:07.918 INFO Completion from [1] id:220 status:true
40154 Sep 22 23:15:07.918 INFO [221/752] Repair commands completed
40155 Sep 22 23:15:07.918 INFO Pop front: ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }, state: ClientData([New, New, New]) }
40156 Sep 22 23:15:07.918 INFO Sent repair work, now wait for resp
40157 Sep 22 23:15:07.918 INFO [0] received reconcile message
40158 Sep 22 23:15:07.918 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }, state: ClientData([InProgress, New, New]) }, : downstairs
40159 Sep 22 23:15:07.918 INFO [0] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }
40160 Sep 22 23:15:07.918 INFO [1] received reconcile message
40161 Sep 22 23:15:07.918 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40162 Sep 22 23:15:07.919 INFO [1] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }
40163 Sep 22 23:15:07.919 INFO [2] received reconcile message
40164 Sep 22 23:15:07.919 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(221), op: ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40165 Sep 22 23:15:07.919 INFO [2] client ExtentClose { repair_id: ReconciliationId(221), extent_id: 164 }
40166 Sep 22 23:15:07.919 DEBG 221 Close extent 164
40167 Sep 22 23:15:07.919 DEBG 221 Close extent 164
40168 Sep 22 23:15:07.919 DEBG 221 Close extent 164
40169 Sep 22 23:15:07.920 DEBG [2] It's time to notify for 221
40170 Sep 22 23:15:07.920 INFO Completion from [2] id:221 status:true
40171 Sep 22 23:15:07.920 INFO [222/752] Repair commands completed
40172 Sep 22 23:15:07.920 INFO Pop front: ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40173 Sep 22 23:15:07.920 INFO Sent repair work, now wait for resp
40174 Sep 22 23:15:07.920 INFO [0] received reconcile message
40175 Sep 22 23:15:07.920 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40176 Sep 22 23:15:07.920 INFO [0] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40177 Sep 22 23:15:07.920 INFO [0] Sending repair request ReconciliationId(222)
40178 Sep 22 23:15:07.920 INFO [1] received reconcile message
40179 Sep 22 23:15:07.920 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40180 Sep 22 23:15:07.920 INFO [1] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40181 Sep 22 23:15:07.920 INFO [1] No action required ReconciliationId(222)
40182 Sep 22 23:15:07.920 INFO [2] received reconcile message
40183 Sep 22 23:15:07.920 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(222), op: ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40184 Sep 22 23:15:07.920 INFO [2] client ExtentRepair { repair_id: ReconciliationId(222), extent_id: 164, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40185 Sep 22 23:15:07.920 INFO [2] No action required ReconciliationId(222)
40186 Sep 22 23:15:07.920 DEBG 222 Repair extent 164 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40187 Sep 22 23:15:07.920 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A4.copy"
40188 Sep 22 23:15:07.982 INFO accepted connection, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40189 Sep 22 23:15:07.982 TRCE incoming request, uri: /extent/164/files, method: GET, req_id: f1347db1-6abe-4744-a5de-1498938e6d10, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40190 Sep 22 23:15:07.982 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/164/files, method: GET, req_id: f1347db1-6abe-4744-a5de-1498938e6d10, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40191 Sep 22 23:15:07.983 INFO eid:164 Found repair files: ["0A4", "0A4.db"]
40192 Sep 22 23:15:07.983 TRCE incoming request, uri: /newextent/164/data, method: GET, req_id: cdc6ddd0-c5e9-4729-9fcd-3d9ecfb05af6, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40193 Sep 22 23:15:07.983 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/164/data, method: GET, req_id: cdc6ddd0-c5e9-4729-9fcd-3d9ecfb05af6, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40194 Sep 22 23:15:07.988 TRCE incoming request, uri: /newextent/164/db, method: GET, req_id: 6cb19581-325a-499e-99cc-96959db5020c, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40195 Sep 22 23:15:07.988 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/164/db, method: GET, req_id: 6cb19581-325a-499e-99cc-96959db5020c, remote_addr: 127.0.0.1:41953, local_addr: 127.0.0.1:46213, task: repair
40196 Sep 22 23:15:07.990 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A4.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A4.replace"
40197 Sep 22 23:15:07.990 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40198 Sep 22 23:15:07.990 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A4.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40199 Sep 22 23:15:07.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A4"
40200 Sep 22 23:15:07.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A4.db"
40201 Sep 22 23:15:07.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40202 Sep 22 23:15:07.991 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A4.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A4.completed"
40203 Sep 22 23:15:07.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40204 Sep 22 23:15:07.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40205 Sep 22 23:15:07.991 DEBG [0] It's time to notify for 222
40206 Sep 22 23:15:07.991 INFO Completion from [0] id:222 status:true
40207 Sep 22 23:15:07.991 INFO [223/752] Repair commands completed
40208 Sep 22 23:15:07.991 INFO Pop front: ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }, state: ClientData([New, New, New]) }
40209 Sep 22 23:15:07.991 INFO Sent repair work, now wait for resp
40210 Sep 22 23:15:07.991 INFO [0] received reconcile message
40211 Sep 22 23:15:07.991 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }, state: ClientData([InProgress, New, New]) }, : downstairs
40212 Sep 22 23:15:07.991 INFO [0] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }
40213 Sep 22 23:15:07.991 INFO [1] received reconcile message
40214 Sep 22 23:15:07.991 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40215 Sep 22 23:15:07.991 INFO [1] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }
40216 Sep 22 23:15:07.992 INFO [2] received reconcile message
40217 Sep 22 23:15:07.992 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(223), op: ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40218 Sep 22 23:15:07.992 INFO [2] client ExtentReopen { repair_id: ReconciliationId(223), extent_id: 164 }
40219 Sep 22 23:15:07.992 DEBG 223 Reopen extent 164
40220 Sep 22 23:15:07.992 DEBG 223 Reopen extent 164
40221 Sep 22 23:15:07.993 DEBG 223 Reopen extent 164
40222 Sep 22 23:15:07.993 DEBG [2] It's time to notify for 223
40223 Sep 22 23:15:07.994 INFO Completion from [2] id:223 status:true
40224 Sep 22 23:15:07.994 INFO [224/752] Repair commands completed
40225 Sep 22 23:15:07.994 INFO Pop front: ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40226 Sep 22 23:15:07.994 INFO Sent repair work, now wait for resp
40227 Sep 22 23:15:07.994 INFO [0] received reconcile message
40228 Sep 22 23:15:07.994 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40229 Sep 22 23:15:07.994 INFO [0] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40230 Sep 22 23:15:07.994 INFO [1] received reconcile message
40231 Sep 22 23:15:07.994 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40232 Sep 22 23:15:07.994 INFO [1] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40233 Sep 22 23:15:07.994 INFO [2] received reconcile message
40234 Sep 22 23:15:07.994 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(224), op: ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40235 Sep 22 23:15:07.994 INFO [2] client ExtentFlush { repair_id: ReconciliationId(224), extent_id: 6, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40236 Sep 22 23:15:07.994 DEBG 224 Flush extent 6 with f:2 g:2
40237 Sep 22 23:15:07.994 DEBG Flush just extent 6 with f:2 and g:2
40238 Sep 22 23:15:07.994 DEBG [1] It's time to notify for 224
40239 Sep 22 23:15:07.994 INFO Completion from [1] id:224 status:true
40240 Sep 22 23:15:07.994 INFO [225/752] Repair commands completed
40241 Sep 22 23:15:07.994 INFO Pop front: ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }, state: ClientData([New, New, New]) }
40242 Sep 22 23:15:07.994 INFO Sent repair work, now wait for resp
40243 Sep 22 23:15:07.994 INFO [0] received reconcile message
40244 Sep 22 23:15:07.994 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }, state: ClientData([InProgress, New, New]) }, : downstairs
40245 Sep 22 23:15:07.994 INFO [0] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }
40246 Sep 22 23:15:07.994 INFO [1] received reconcile message
40247 Sep 22 23:15:07.994 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40248 Sep 22 23:15:07.994 INFO [1] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }
40249 Sep 22 23:15:07.994 INFO [2] received reconcile message
40250 Sep 22 23:15:07.994 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(225), op: ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40251 Sep 22 23:15:07.994 INFO [2] client ExtentClose { repair_id: ReconciliationId(225), extent_id: 6 }
40252 Sep 22 23:15:07.995 DEBG 225 Close extent 6
40253 Sep 22 23:15:07.995 DEBG 225 Close extent 6
40254 Sep 22 23:15:07.995 DEBG 225 Close extent 6
40255 Sep 22 23:15:07.995 DEBG [2] It's time to notify for 225
40256 Sep 22 23:15:07.995 INFO Completion from [2] id:225 status:true
40257 Sep 22 23:15:07.996 INFO [226/752] Repair commands completed
40258 Sep 22 23:15:07.996 INFO Pop front: ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40259 Sep 22 23:15:07.996 INFO Sent repair work, now wait for resp
40260 Sep 22 23:15:07.996 INFO [0] received reconcile message
40261 Sep 22 23:15:07.996 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40262 Sep 22 23:15:07.996 INFO [0] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40263 Sep 22 23:15:07.996 INFO [0] Sending repair request ReconciliationId(226)
40264 Sep 22 23:15:07.996 INFO [1] received reconcile message
40265 Sep 22 23:15:07.996 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40266 Sep 22 23:15:07.996 INFO [1] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40267 Sep 22 23:15:07.996 INFO [1] No action required ReconciliationId(226)
40268 Sep 22 23:15:07.996 INFO [2] received reconcile message
40269 Sep 22 23:15:07.996 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(226), op: ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40270 Sep 22 23:15:07.996 INFO [2] client ExtentRepair { repair_id: ReconciliationId(226), extent_id: 6, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40271 Sep 22 23:15:07.996 INFO [2] No action required ReconciliationId(226)
40272 Sep 22 23:15:07.996 DEBG 226 Repair extent 6 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40273 Sep 22 23:15:07.996 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/006.copy"
40274 Sep 22 23:15:08.062 INFO accepted connection, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40275 Sep 22 23:15:08.062 TRCE incoming request, uri: /extent/6/files, method: GET, req_id: 3c6e4bb7-3232-4151-b908-91455dd1ddf8, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40276 Sep 22 23:15:08.062 INFO request completed, latency_us: 273, response_code: 200, uri: /extent/6/files, method: GET, req_id: 3c6e4bb7-3232-4151-b908-91455dd1ddf8, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40277 Sep 22 23:15:08.063 INFO eid:6 Found repair files: ["006", "006.db"]
40278 Sep 22 23:15:08.063 TRCE incoming request, uri: /newextent/6/data, method: GET, req_id: d601c36f-2536-4edf-8e17-04ad840d7ed9, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40279 Sep 22 23:15:08.063 INFO request completed, latency_us: 354, response_code: 200, uri: /newextent/6/data, method: GET, req_id: d601c36f-2536-4edf-8e17-04ad840d7ed9, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40280 Sep 22 23:15:08.068 TRCE incoming request, uri: /newextent/6/db, method: GET, req_id: d33e26a8-722a-41a1-affd-84a567874bef, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40281 Sep 22 23:15:08.069 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/6/db, method: GET, req_id: d33e26a8-722a-41a1-affd-84a567874bef, remote_addr: 127.0.0.1:53023, local_addr: 127.0.0.1:46213, task: repair
40282 Sep 22 23:15:08.070 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/006.copy" to "/tmp/downstairs-vrx8aK6L/00/000/006.replace"
40283 Sep 22 23:15:08.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40284 Sep 22 23:15:08.071 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/006.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40285 Sep 22 23:15:08.071 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/006"
40286 Sep 22 23:15:08.071 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/006.db"
40287 Sep 22 23:15:08.071 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40288 Sep 22 23:15:08.071 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/006.replace" to "/tmp/downstairs-vrx8aK6L/00/000/006.completed"
40289 Sep 22 23:15:08.071 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40290 Sep 22 23:15:08.072 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40291 Sep 22 23:15:08.072 DEBG [0] It's time to notify for 226
40292 Sep 22 23:15:08.072 INFO Completion from [0] id:226 status:true
40293 Sep 22 23:15:08.072 INFO [227/752] Repair commands completed
40294 Sep 22 23:15:08.072 INFO Pop front: ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }, state: ClientData([New, New, New]) }
40295 Sep 22 23:15:08.072 INFO Sent repair work, now wait for resp
40296 Sep 22 23:15:08.072 INFO [0] received reconcile message
40297 Sep 22 23:15:08.072 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }, state: ClientData([InProgress, New, New]) }, : downstairs
40298 Sep 22 23:15:08.072 INFO [0] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }
40299 Sep 22 23:15:08.072 INFO [1] received reconcile message
40300 Sep 22 23:15:08.072 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40301 Sep 22 23:15:08.072 INFO [1] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }
40302 Sep 22 23:15:08.072 INFO [2] received reconcile message
40303 Sep 22 23:15:08.072 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(227), op: ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40304 Sep 22 23:15:08.072 INFO [2] client ExtentReopen { repair_id: ReconciliationId(227), extent_id: 6 }
40305 Sep 22 23:15:08.072 DEBG 227 Reopen extent 6
40306 Sep 22 23:15:08.073 DEBG 227 Reopen extent 6
40307 Sep 22 23:15:08.074 DEBG 227 Reopen extent 6
40308 Sep 22 23:15:08.074 DEBG [2] It's time to notify for 227
40309 Sep 22 23:15:08.074 INFO Completion from [2] id:227 status:true
40310 Sep 22 23:15:08.074 INFO [228/752] Repair commands completed
40311 Sep 22 23:15:08.074 INFO Pop front: ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40312 Sep 22 23:15:08.075 INFO Sent repair work, now wait for resp
40313 Sep 22 23:15:08.075 INFO [0] received reconcile message
40314 Sep 22 23:15:08.075 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40315 Sep 22 23:15:08.075 INFO [0] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40316 Sep 22 23:15:08.075 INFO [1] received reconcile message
40317 Sep 22 23:15:08.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40318 Sep 22 23:15:08.075 INFO [1] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40319 Sep 22 23:15:08.075 INFO [2] received reconcile message
40320 Sep 22 23:15:08.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(228), op: ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40321 Sep 22 23:15:08.075 INFO [2] client ExtentFlush { repair_id: ReconciliationId(228), extent_id: 52, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40322 Sep 22 23:15:08.075 DEBG 228 Flush extent 52 with f:2 g:2
40323 Sep 22 23:15:08.075 DEBG Flush just extent 52 with f:2 and g:2
40324 Sep 22 23:15:08.075 DEBG [1] It's time to notify for 228
40325 Sep 22 23:15:08.075 INFO Completion from [1] id:228 status:true
40326 Sep 22 23:15:08.075 INFO [229/752] Repair commands completed
40327 Sep 22 23:15:08.075 INFO Pop front: ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }, state: ClientData([New, New, New]) }
40328 Sep 22 23:15:08.075 INFO Sent repair work, now wait for resp
40329 Sep 22 23:15:08.075 INFO [0] received reconcile message
40330 Sep 22 23:15:08.075 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }, state: ClientData([InProgress, New, New]) }, : downstairs
40331 Sep 22 23:15:08.075 INFO [0] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }
40332 Sep 22 23:15:08.075 INFO [1] received reconcile message
40333 Sep 22 23:15:08.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40334 Sep 22 23:15:08.075 INFO [1] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }
40335 Sep 22 23:15:08.075 INFO [2] received reconcile message
40336 Sep 22 23:15:08.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(229), op: ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40337 Sep 22 23:15:08.075 INFO [2] client ExtentClose { repair_id: ReconciliationId(229), extent_id: 52 }
40338 Sep 22 23:15:08.075 DEBG 229 Close extent 52
40339 Sep 22 23:15:08.076 DEBG 229 Close extent 52
40340 Sep 22 23:15:08.076 DEBG 229 Close extent 52
40341 Sep 22 23:15:08.076 DEBG [2] It's time to notify for 229
40342 Sep 22 23:15:08.076 INFO Completion from [2] id:229 status:true
40343 Sep 22 23:15:08.076 INFO [230/752] Repair commands completed
40344 Sep 22 23:15:08.076 INFO Pop front: ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40345 Sep 22 23:15:08.076 INFO Sent repair work, now wait for resp
40346 Sep 22 23:15:08.077 INFO [0] received reconcile message
40347 Sep 22 23:15:08.077 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40348 Sep 22 23:15:08.077 INFO [0] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40349 Sep 22 23:15:08.077 INFO [0] Sending repair request ReconciliationId(230)
40350 Sep 22 23:15:08.077 INFO [1] received reconcile message
40351 Sep 22 23:15:08.077 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40352 Sep 22 23:15:08.077 INFO [1] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40353 Sep 22 23:15:08.077 INFO [1] No action required ReconciliationId(230)
40354 Sep 22 23:15:08.077 INFO [2] received reconcile message
40355 Sep 22 23:15:08.077 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(230), op: ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40356 Sep 22 23:15:08.077 INFO [2] client ExtentRepair { repair_id: ReconciliationId(230), extent_id: 52, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40357 Sep 22 23:15:08.077 INFO [2] No action required ReconciliationId(230)
40358 Sep 22 23:15:08.077 DEBG 230 Repair extent 52 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40359 Sep 22 23:15:08.077 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/034.copy"
40360 Sep 22 23:15:08.115 DEBG [1] Read AckReady 1078, : downstairs
40361 Sep 22 23:15:08.117 DEBG up_ds_listen was notified
40362 Sep 22 23:15:08.117 DEBG up_ds_listen process 1078
40363 Sep 22 23:15:08.117 DEBG [A] ack job 1078:79, : downstairs
40364 Sep 22 23:15:08.139 INFO accepted connection, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40365 Sep 22 23:15:08.139 TRCE incoming request, uri: /extent/52/files, method: GET, req_id: 6f0dc614-ab4b-4240-9a0e-ac44346765fb, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40366 Sep 22 23:15:08.140 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/52/files, method: GET, req_id: 6f0dc614-ab4b-4240-9a0e-ac44346765fb, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40367 Sep 22 23:15:08.140 INFO eid:52 Found repair files: ["034", "034.db"]
40368 Sep 22 23:15:08.140 TRCE incoming request, uri: /newextent/52/data, method: GET, req_id: 8012db34-2bfd-4f35-9899-fccf1fb7484b, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40369 Sep 22 23:15:08.141 INFO request completed, latency_us: 343, response_code: 200, uri: /newextent/52/data, method: GET, req_id: 8012db34-2bfd-4f35-9899-fccf1fb7484b, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40370 Sep 22 23:15:08.146 TRCE incoming request, uri: /newextent/52/db, method: GET, req_id: ff883fb6-6497-4197-ab81-eee86b2297d9, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40371 Sep 22 23:15:08.146 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/52/db, method: GET, req_id: ff883fb6-6497-4197-ab81-eee86b2297d9, remote_addr: 127.0.0.1:40497, local_addr: 127.0.0.1:46213, task: repair
40372 Sep 22 23:15:08.147 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/034.copy" to "/tmp/downstairs-vrx8aK6L/00/000/034.replace"
40373 Sep 22 23:15:08.147 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40374 Sep 22 23:15:08.148 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/034.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40375 Sep 22 23:15:08.148 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/034"
40376 Sep 22 23:15:08.148 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/034.db"
40377 Sep 22 23:15:08.149 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40378 Sep 22 23:15:08.149 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/034.replace" to "/tmp/downstairs-vrx8aK6L/00/000/034.completed"
40379 Sep 22 23:15:08.149 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40380 Sep 22 23:15:08.149 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40381 Sep 22 23:15:08.149 DEBG [0] It's time to notify for 230
40382 Sep 22 23:15:08.149 INFO Completion from [0] id:230 status:true
40383 Sep 22 23:15:08.149 INFO [231/752] Repair commands completed
40384 Sep 22 23:15:08.149 INFO Pop front: ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }, state: ClientData([New, New, New]) }
40385 Sep 22 23:15:08.149 INFO Sent repair work, now wait for resp
40386 Sep 22 23:15:08.149 INFO [0] received reconcile message
40387 Sep 22 23:15:08.149 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }, state: ClientData([InProgress, New, New]) }, : downstairs
40388 Sep 22 23:15:08.149 INFO [0] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }
40389 Sep 22 23:15:08.149 INFO [1] received reconcile message
40390 Sep 22 23:15:08.149 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40391 Sep 22 23:15:08.149 INFO [1] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }
40392 Sep 22 23:15:08.149 INFO [2] received reconcile message
40393 Sep 22 23:15:08.149 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(231), op: ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40394 Sep 22 23:15:08.149 INFO [2] client ExtentReopen { repair_id: ReconciliationId(231), extent_id: 52 }
40395 Sep 22 23:15:08.149 DEBG 231 Reopen extent 52
40396 Sep 22 23:15:08.150 DEBG 231 Reopen extent 52
40397 Sep 22 23:15:08.151 DEBG 231 Reopen extent 52
40398 Sep 22 23:15:08.151 DEBG [2] It's time to notify for 231
40399 Sep 22 23:15:08.151 INFO Completion from [2] id:231 status:true
40400 Sep 22 23:15:08.151 INFO [232/752] Repair commands completed
40401 Sep 22 23:15:08.151 INFO Pop front: ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40402 Sep 22 23:15:08.151 INFO Sent repair work, now wait for resp
40403 Sep 22 23:15:08.151 INFO [0] received reconcile message
40404 Sep 22 23:15:08.151 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40405 Sep 22 23:15:08.151 INFO [0] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40406 Sep 22 23:15:08.152 INFO [1] received reconcile message
40407 Sep 22 23:15:08.152 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40408 Sep 22 23:15:08.152 INFO [1] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40409 Sep 22 23:15:08.152 INFO [2] received reconcile message
40410 Sep 22 23:15:08.152 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(232), op: ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40411 Sep 22 23:15:08.152 INFO [2] client ExtentFlush { repair_id: ReconciliationId(232), extent_id: 19, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40412 Sep 22 23:15:08.152 DEBG 232 Flush extent 19 with f:2 g:2
40413 Sep 22 23:15:08.152 DEBG Flush just extent 19 with f:2 and g:2
40414 Sep 22 23:15:08.152 DEBG [1] It's time to notify for 232
40415 Sep 22 23:15:08.152 INFO Completion from [1] id:232 status:true
40416 Sep 22 23:15:08.152 INFO [233/752] Repair commands completed
40417 Sep 22 23:15:08.152 INFO Pop front: ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }, state: ClientData([New, New, New]) }
40418 Sep 22 23:15:08.152 INFO Sent repair work, now wait for resp
40419 Sep 22 23:15:08.152 INFO [0] received reconcile message
40420 Sep 22 23:15:08.152 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }, state: ClientData([InProgress, New, New]) }, : downstairs
40421 Sep 22 23:15:08.152 INFO [0] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }
40422 Sep 22 23:15:08.152 INFO [1] received reconcile message
40423 Sep 22 23:15:08.152 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40424 Sep 22 23:15:08.152 INFO [1] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }
40425 Sep 22 23:15:08.152 INFO [2] received reconcile message
40426 Sep 22 23:15:08.152 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(233), op: ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40427 Sep 22 23:15:08.152 INFO [2] client ExtentClose { repair_id: ReconciliationId(233), extent_id: 19 }
40428 Sep 22 23:15:08.152 DEBG 233 Close extent 19
40429 Sep 22 23:15:08.153 DEBG 233 Close extent 19
40430 Sep 22 23:15:08.153 DEBG 233 Close extent 19
40431 Sep 22 23:15:08.153 DEBG [2] It's time to notify for 233
40432 Sep 22 23:15:08.153 INFO Completion from [2] id:233 status:true
40433 Sep 22 23:15:08.153 INFO [234/752] Repair commands completed
40434 Sep 22 23:15:08.153 INFO Pop front: ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40435 Sep 22 23:15:08.153 INFO Sent repair work, now wait for resp
40436 Sep 22 23:15:08.153 INFO [0] received reconcile message
40437 Sep 22 23:15:08.153 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40438 Sep 22 23:15:08.153 INFO [0] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40439 Sep 22 23:15:08.153 INFO [0] Sending repair request ReconciliationId(234)
40440 Sep 22 23:15:08.153 INFO [1] received reconcile message
40441 Sep 22 23:15:08.154 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40442 Sep 22 23:15:08.154 INFO [1] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40443 Sep 22 23:15:08.154 INFO [1] No action required ReconciliationId(234)
40444 Sep 22 23:15:08.154 INFO [2] received reconcile message
40445 Sep 22 23:15:08.154 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(234), op: ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40446 Sep 22 23:15:08.154 INFO [2] client ExtentRepair { repair_id: ReconciliationId(234), extent_id: 19, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40447 Sep 22 23:15:08.154 INFO [2] No action required ReconciliationId(234)
40448 Sep 22 23:15:08.154 DEBG 234 Repair extent 19 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40449 Sep 22 23:15:08.154 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/013.copy"
40450 Sep 22 23:15:08.170 DEBG up_ds_listen checked 1 jobs, back to waiting
40451 Sep 22 23:15:08.216 INFO accepted connection, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40452 Sep 22 23:15:08.216 TRCE incoming request, uri: /extent/19/files, method: GET, req_id: 9b0db1c6-7264-421b-a456-30ef20c327e0, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40453 Sep 22 23:15:08.217 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/19/files, method: GET, req_id: 9b0db1c6-7264-421b-a456-30ef20c327e0, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40454 Sep 22 23:15:08.217 INFO eid:19 Found repair files: ["013", "013.db"]
40455 Sep 22 23:15:08.217 TRCE incoming request, uri: /newextent/19/data, method: GET, req_id: dd66a06d-be55-4f7a-954c-c287f356db99, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40456 Sep 22 23:15:08.217 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/19/data, method: GET, req_id: dd66a06d-be55-4f7a-954c-c287f356db99, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40457 Sep 22 23:15:08.222 TRCE incoming request, uri: /newextent/19/db, method: GET, req_id: 5ce3ed06-6c58-4a2b-8693-faa45cbb4e78, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40458 Sep 22 23:15:08.223 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/19/db, method: GET, req_id: 5ce3ed06-6c58-4a2b-8693-faa45cbb4e78, remote_addr: 127.0.0.1:39244, local_addr: 127.0.0.1:46213, task: repair
40459 Sep 22 23:15:08.224 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/013.copy" to "/tmp/downstairs-vrx8aK6L/00/000/013.replace"
40460 Sep 22 23:15:08.224 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40461 Sep 22 23:15:08.225 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/013.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40462 Sep 22 23:15:08.225 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/013"
40463 Sep 22 23:15:08.225 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/013.db"
40464 Sep 22 23:15:08.225 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40465 Sep 22 23:15:08.225 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/013.replace" to "/tmp/downstairs-vrx8aK6L/00/000/013.completed"
40466 Sep 22 23:15:08.225 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40467 Sep 22 23:15:08.225 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40468 Sep 22 23:15:08.226 DEBG [0] It's time to notify for 234
40469 Sep 22 23:15:08.226 INFO Completion from [0] id:234 status:true
40470 Sep 22 23:15:08.226 INFO [235/752] Repair commands completed
40471 Sep 22 23:15:08.226 INFO Pop front: ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }, state: ClientData([New, New, New]) }
40472 Sep 22 23:15:08.226 INFO Sent repair work, now wait for resp
40473 Sep 22 23:15:08.226 INFO [0] received reconcile message
40474 Sep 22 23:15:08.226 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }, state: ClientData([InProgress, New, New]) }, : downstairs
40475 Sep 22 23:15:08.226 INFO [0] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }
40476 Sep 22 23:15:08.226 INFO [1] received reconcile message
40477 Sep 22 23:15:08.226 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40478 Sep 22 23:15:08.226 INFO [1] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }
40479 Sep 22 23:15:08.226 INFO [2] received reconcile message
40480 Sep 22 23:15:08.226 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(235), op: ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40481 Sep 22 23:15:08.226 INFO [2] client ExtentReopen { repair_id: ReconciliationId(235), extent_id: 19 }
40482 Sep 22 23:15:08.226 DEBG 235 Reopen extent 19
40483 Sep 22 23:15:08.227 DEBG 235 Reopen extent 19
40484 Sep 22 23:15:08.227 DEBG 235 Reopen extent 19
40485 Sep 22 23:15:08.228 DEBG [2] It's time to notify for 235
40486 Sep 22 23:15:08.228 INFO Completion from [2] id:235 status:true
40487 Sep 22 23:15:08.228 INFO [236/752] Repair commands completed
40488 Sep 22 23:15:08.228 INFO Pop front: ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40489 Sep 22 23:15:08.228 INFO Sent repair work, now wait for resp
40490 Sep 22 23:15:08.228 INFO [0] received reconcile message
40491 Sep 22 23:15:08.228 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40492 Sep 22 23:15:08.228 INFO [0] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40493 Sep 22 23:15:08.228 INFO [1] received reconcile message
40494 Sep 22 23:15:08.228 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40495 Sep 22 23:15:08.228 INFO [1] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40496 Sep 22 23:15:08.228 INFO [2] received reconcile message
40497 Sep 22 23:15:08.228 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(236), op: ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40498 Sep 22 23:15:08.228 INFO [2] client ExtentFlush { repair_id: ReconciliationId(236), extent_id: 79, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40499 Sep 22 23:15:08.228 DEBG 236 Flush extent 79 with f:2 g:2
40500 Sep 22 23:15:08.228 DEBG Flush just extent 79 with f:2 and g:2
40501 Sep 22 23:15:08.229 DEBG [1] It's time to notify for 236
40502 Sep 22 23:15:08.229 INFO Completion from [1] id:236 status:true
40503 Sep 22 23:15:08.229 INFO [237/752] Repair commands completed
40504 Sep 22 23:15:08.229 INFO Pop front: ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }, state: ClientData([New, New, New]) }
40505 Sep 22 23:15:08.229 INFO Sent repair work, now wait for resp
40506 Sep 22 23:15:08.229 INFO [0] received reconcile message
40507 Sep 22 23:15:08.229 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }, state: ClientData([InProgress, New, New]) }, : downstairs
40508 Sep 22 23:15:08.229 INFO [0] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }
40509 Sep 22 23:15:08.229 INFO [1] received reconcile message
40510 Sep 22 23:15:08.229 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40511 Sep 22 23:15:08.229 INFO [1] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }
40512 Sep 22 23:15:08.229 INFO [2] received reconcile message
40513 Sep 22 23:15:08.229 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(237), op: ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40514 Sep 22 23:15:08.229 INFO [2] client ExtentClose { repair_id: ReconciliationId(237), extent_id: 79 }
40515 Sep 22 23:15:08.229 DEBG 237 Close extent 79
40516 Sep 22 23:15:08.229 DEBG 237 Close extent 79
40517 Sep 22 23:15:08.230 DEBG 237 Close extent 79
40518 Sep 22 23:15:08.230 DEBG [2] It's time to notify for 237
40519 Sep 22 23:15:08.230 INFO Completion from [2] id:237 status:true
40520 Sep 22 23:15:08.230 INFO [238/752] Repair commands completed
40521 Sep 22 23:15:08.230 INFO Pop front: ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40522 Sep 22 23:15:08.230 INFO Sent repair work, now wait for resp
40523 Sep 22 23:15:08.230 INFO [0] received reconcile message
40524 Sep 22 23:15:08.230 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40525 Sep 22 23:15:08.230 INFO [0] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40526 Sep 22 23:15:08.230 INFO [0] Sending repair request ReconciliationId(238)
40527 Sep 22 23:15:08.230 INFO [1] received reconcile message
40528 Sep 22 23:15:08.230 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40529 Sep 22 23:15:08.230 INFO [1] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40530 Sep 22 23:15:08.230 INFO [1] No action required ReconciliationId(238)
40531 Sep 22 23:15:08.230 INFO [2] received reconcile message
40532 Sep 22 23:15:08.230 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(238), op: ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40533 Sep 22 23:15:08.230 INFO [2] client ExtentRepair { repair_id: ReconciliationId(238), extent_id: 79, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40534 Sep 22 23:15:08.230 INFO [2] No action required ReconciliationId(238)
40535 Sep 22 23:15:08.230 DEBG 238 Repair extent 79 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40536 Sep 22 23:15:08.230 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04F.copy"
40537 Sep 22 23:15:08.294 INFO accepted connection, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40538 Sep 22 23:15:08.294 TRCE incoming request, uri: /extent/79/files, method: GET, req_id: 1a0ce382-95e0-4b8c-8f36-e28d23b9430e, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40539 Sep 22 23:15:08.294 INFO request completed, latency_us: 220, response_code: 200, uri: /extent/79/files, method: GET, req_id: 1a0ce382-95e0-4b8c-8f36-e28d23b9430e, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40540 Sep 22 23:15:08.294 INFO eid:79 Found repair files: ["04F", "04F.db"]
40541 Sep 22 23:15:08.295 TRCE incoming request, uri: /newextent/79/data, method: GET, req_id: da00ece0-ea5a-41fa-9563-12a7df3dccdd, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40542 Sep 22 23:15:08.295 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/79/data, method: GET, req_id: da00ece0-ea5a-41fa-9563-12a7df3dccdd, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40543 Sep 22 23:15:08.300 TRCE incoming request, uri: /newextent/79/db, method: GET, req_id: abd7909e-0c1d-4c69-918a-fab5412e6c8f, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40544 Sep 22 23:15:08.300 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/79/db, method: GET, req_id: abd7909e-0c1d-4c69-918a-fab5412e6c8f, remote_addr: 127.0.0.1:56211, local_addr: 127.0.0.1:46213, task: repair
40545 Sep 22 23:15:08.301 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04F.replace"
40546 Sep 22 23:15:08.301 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40547 Sep 22 23:15:08.302 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40548 Sep 22 23:15:08.303 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04F"
40549 Sep 22 23:15:08.303 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04F.db"
40550 Sep 22 23:15:08.303 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40551 Sep 22 23:15:08.303 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04F.completed"
40552 Sep 22 23:15:08.303 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40553 Sep 22 23:15:08.303 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40554 Sep 22 23:15:08.303 DEBG [0] It's time to notify for 238
40555 Sep 22 23:15:08.303 INFO Completion from [0] id:238 status:true
40556 Sep 22 23:15:08.303 INFO [239/752] Repair commands completed
40557 Sep 22 23:15:08.303 INFO Pop front: ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }, state: ClientData([New, New, New]) }
40558 Sep 22 23:15:08.303 INFO Sent repair work, now wait for resp
40559 Sep 22 23:15:08.303 INFO [0] received reconcile message
40560 Sep 22 23:15:08.303 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }, state: ClientData([InProgress, New, New]) }, : downstairs
40561 Sep 22 23:15:08.303 INFO [0] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }
40562 Sep 22 23:15:08.303 INFO [1] received reconcile message
40563 Sep 22 23:15:08.303 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40564 Sep 22 23:15:08.303 INFO [1] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }
40565 Sep 22 23:15:08.303 INFO [2] received reconcile message
40566 Sep 22 23:15:08.304 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(239), op: ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40567 Sep 22 23:15:08.304 INFO [2] client ExtentReopen { repair_id: ReconciliationId(239), extent_id: 79 }
40568 Sep 22 23:15:08.304 DEBG 239 Reopen extent 79
40569 Sep 22 23:15:08.304 DEBG 239 Reopen extent 79
40570 Sep 22 23:15:08.305 DEBG 239 Reopen extent 79
40571 Sep 22 23:15:08.305 DEBG [2] It's time to notify for 239
40572 Sep 22 23:15:08.305 INFO Completion from [2] id:239 status:true
40573 Sep 22 23:15:08.305 INFO [240/752] Repair commands completed
40574 Sep 22 23:15:08.306 INFO Pop front: ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40575 Sep 22 23:15:08.306 INFO Sent repair work, now wait for resp
40576 Sep 22 23:15:08.306 INFO [0] received reconcile message
40577 Sep 22 23:15:08.306 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40578 Sep 22 23:15:08.306 INFO [0] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40579 Sep 22 23:15:08.306 INFO [1] received reconcile message
40580 Sep 22 23:15:08.306 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40581 Sep 22 23:15:08.306 INFO [1] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40582 Sep 22 23:15:08.306 INFO [2] received reconcile message
40583 Sep 22 23:15:08.306 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(240), op: ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40584 Sep 22 23:15:08.306 INFO [2] client ExtentFlush { repair_id: ReconciliationId(240), extent_id: 97, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40585 Sep 22 23:15:08.306 DEBG 240 Flush extent 97 with f:2 g:2
40586 Sep 22 23:15:08.306 DEBG Flush just extent 97 with f:2 and g:2
40587 Sep 22 23:15:08.306 DEBG [1] It's time to notify for 240
40588 Sep 22 23:15:08.306 INFO Completion from [1] id:240 status:true
40589 Sep 22 23:15:08.306 INFO [241/752] Repair commands completed
40590 Sep 22 23:15:08.306 INFO Pop front: ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }, state: ClientData([New, New, New]) }
40591 Sep 22 23:15:08.306 INFO Sent repair work, now wait for resp
40592 Sep 22 23:15:08.306 INFO [0] received reconcile message
40593 Sep 22 23:15:08.306 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }, state: ClientData([InProgress, New, New]) }, : downstairs
40594 Sep 22 23:15:08.306 INFO [0] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }
40595 Sep 22 23:15:08.306 INFO [1] received reconcile message
40596 Sep 22 23:15:08.306 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40597 Sep 22 23:15:08.306 INFO [1] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }
40598 Sep 22 23:15:08.306 INFO [2] received reconcile message
40599 Sep 22 23:15:08.306 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(241), op: ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40600 Sep 22 23:15:08.306 INFO [2] client ExtentClose { repair_id: ReconciliationId(241), extent_id: 97 }
40601 Sep 22 23:15:08.306 DEBG 241 Close extent 97
40602 Sep 22 23:15:08.307 DEBG 241 Close extent 97
40603 Sep 22 23:15:08.307 DEBG 241 Close extent 97
40604 Sep 22 23:15:08.307 DEBG [2] It's time to notify for 241
40605 Sep 22 23:15:08.307 INFO Completion from [2] id:241 status:true
40606 Sep 22 23:15:08.307 INFO [242/752] Repair commands completed
40607 Sep 22 23:15:08.307 INFO Pop front: ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40608 Sep 22 23:15:08.308 INFO Sent repair work, now wait for resp
40609 Sep 22 23:15:08.308 INFO [0] received reconcile message
40610 Sep 22 23:15:08.308 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40611 Sep 22 23:15:08.308 INFO [0] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40612 Sep 22 23:15:08.308 INFO [0] Sending repair request ReconciliationId(242)
40613 Sep 22 23:15:08.308 INFO [1] received reconcile message
40614 Sep 22 23:15:08.308 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40615 Sep 22 23:15:08.308 INFO [1] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40616 Sep 22 23:15:08.308 INFO [1] No action required ReconciliationId(242)
40617 Sep 22 23:15:08.308 INFO [2] received reconcile message
40618 Sep 22 23:15:08.308 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(242), op: ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40619 Sep 22 23:15:08.308 INFO [2] client ExtentRepair { repair_id: ReconciliationId(242), extent_id: 97, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40620 Sep 22 23:15:08.308 INFO [2] No action required ReconciliationId(242)
40621 Sep 22 23:15:08.308 DEBG 242 Repair extent 97 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40622 Sep 22 23:15:08.308 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/061.copy"
40623 Sep 22 23:15:08.369 INFO accepted connection, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40624 Sep 22 23:15:08.369 TRCE incoming request, uri: /extent/97/files, method: GET, req_id: 4397840c-4b10-4bee-a068-243448318d36, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40625 Sep 22 23:15:08.370 INFO request completed, latency_us: 241, response_code: 200, uri: /extent/97/files, method: GET, req_id: 4397840c-4b10-4bee-a068-243448318d36, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40626 Sep 22 23:15:08.370 INFO eid:97 Found repair files: ["061", "061.db"]
40627 Sep 22 23:15:08.370 TRCE incoming request, uri: /newextent/97/data, method: GET, req_id: 81bf7306-62b8-41af-838b-be81d4466c8d, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40628 Sep 22 23:15:08.371 INFO request completed, latency_us: 357, response_code: 200, uri: /newextent/97/data, method: GET, req_id: 81bf7306-62b8-41af-838b-be81d4466c8d, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40629 Sep 22 23:15:08.376 TRCE incoming request, uri: /newextent/97/db, method: GET, req_id: c10fc532-7858-4dc6-9ce1-a6ee83ab8287, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40630 Sep 22 23:15:08.376 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/97/db, method: GET, req_id: c10fc532-7858-4dc6-9ce1-a6ee83ab8287, remote_addr: 127.0.0.1:56678, local_addr: 127.0.0.1:46213, task: repair
40631 Sep 22 23:15:08.377 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/061.copy" to "/tmp/downstairs-vrx8aK6L/00/000/061.replace"
40632 Sep 22 23:15:08.377 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40633 Sep 22 23:15:08.378 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/061.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40634 Sep 22 23:15:08.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/061"
40635 Sep 22 23:15:08.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/061.db"
40636 Sep 22 23:15:08.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40637 Sep 22 23:15:08.379 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/061.replace" to "/tmp/downstairs-vrx8aK6L/00/000/061.completed"
40638 Sep 22 23:15:08.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40639 Sep 22 23:15:08.379 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40640 Sep 22 23:15:08.379 DEBG [0] It's time to notify for 242
40641 Sep 22 23:15:08.379 INFO Completion from [0] id:242 status:true
40642 Sep 22 23:15:08.379 INFO [243/752] Repair commands completed
40643 Sep 22 23:15:08.379 INFO Pop front: ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }, state: ClientData([New, New, New]) }
40644 Sep 22 23:15:08.379 INFO Sent repair work, now wait for resp
40645 Sep 22 23:15:08.379 INFO [0] received reconcile message
40646 Sep 22 23:15:08.379 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }, state: ClientData([InProgress, New, New]) }, : downstairs
40647 Sep 22 23:15:08.379 INFO [0] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }
40648 Sep 22 23:15:08.379 INFO [1] received reconcile message
40649 Sep 22 23:15:08.379 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40650 Sep 22 23:15:08.379 INFO [1] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }
40651 Sep 22 23:15:08.379 INFO [2] received reconcile message
40652 Sep 22 23:15:08.379 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(243), op: ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40653 Sep 22 23:15:08.379 INFO [2] client ExtentReopen { repair_id: ReconciliationId(243), extent_id: 97 }
40654 Sep 22 23:15:08.380 DEBG 243 Reopen extent 97
40655 Sep 22 23:15:08.380 DEBG 243 Reopen extent 97
40656 Sep 22 23:15:08.381 DEBG 243 Reopen extent 97
40657 Sep 22 23:15:08.381 DEBG [2] It's time to notify for 243
40658 Sep 22 23:15:08.381 INFO Completion from [2] id:243 status:true
40659 Sep 22 23:15:08.381 INFO [244/752] Repair commands completed
40660 Sep 22 23:15:08.381 INFO Pop front: ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40661 Sep 22 23:15:08.382 INFO Sent repair work, now wait for resp
40662 Sep 22 23:15:08.382 INFO [0] received reconcile message
40663 Sep 22 23:15:08.382 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40664 Sep 22 23:15:08.382 INFO [0] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40665 Sep 22 23:15:08.382 INFO [1] received reconcile message
40666 Sep 22 23:15:08.382 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40667 Sep 22 23:15:08.382 INFO [1] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40668 Sep 22 23:15:08.382 INFO [2] received reconcile message
40669 Sep 22 23:15:08.382 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(244), op: ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40670 Sep 22 23:15:08.382 INFO [2] client ExtentFlush { repair_id: ReconciliationId(244), extent_id: 112, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40671 Sep 22 23:15:08.382 DEBG 244 Flush extent 112 with f:2 g:2
40672 Sep 22 23:15:08.382 DEBG Flush just extent 112 with f:2 and g:2
40673 Sep 22 23:15:08.382 DEBG [1] It's time to notify for 244
40674 Sep 22 23:15:08.382 INFO Completion from [1] id:244 status:true
40675 Sep 22 23:15:08.382 INFO [245/752] Repair commands completed
40676 Sep 22 23:15:08.382 INFO Pop front: ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }, state: ClientData([New, New, New]) }
40677 Sep 22 23:15:08.382 INFO Sent repair work, now wait for resp
40678 Sep 22 23:15:08.382 INFO [0] received reconcile message
40679 Sep 22 23:15:08.382 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }, state: ClientData([InProgress, New, New]) }, : downstairs
40680 Sep 22 23:15:08.382 INFO [0] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }
40681 Sep 22 23:15:08.382 INFO [1] received reconcile message
40682 Sep 22 23:15:08.382 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40683 Sep 22 23:15:08.382 INFO [1] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }
40684 Sep 22 23:15:08.382 INFO [2] received reconcile message
40685 Sep 22 23:15:08.382 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(245), op: ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40686 Sep 22 23:15:08.382 INFO [2] client ExtentClose { repair_id: ReconciliationId(245), extent_id: 112 }
40687 Sep 22 23:15:08.382 DEBG 245 Close extent 112
40688 Sep 22 23:15:08.383 DEBG 245 Close extent 112
40689 Sep 22 23:15:08.383 DEBG 245 Close extent 112
40690 Sep 22 23:15:08.383 DEBG [2] It's time to notify for 245
40691 Sep 22 23:15:08.383 INFO Completion from [2] id:245 status:true
40692 Sep 22 23:15:08.383 INFO [246/752] Repair commands completed
40693 Sep 22 23:15:08.383 INFO Pop front: ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40694 Sep 22 23:15:08.383 INFO Sent repair work, now wait for resp
40695 Sep 22 23:15:08.384 INFO [0] received reconcile message
40696 Sep 22 23:15:08.384 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40697 Sep 22 23:15:08.384 INFO [0] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40698 Sep 22 23:15:08.384 INFO [0] Sending repair request ReconciliationId(246)
40699 Sep 22 23:15:08.384 INFO [1] received reconcile message
40700 Sep 22 23:15:08.384 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40701 Sep 22 23:15:08.384 INFO [1] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40702 Sep 22 23:15:08.384 INFO [1] No action required ReconciliationId(246)
40703 Sep 22 23:15:08.384 INFO [2] received reconcile message
40704 Sep 22 23:15:08.384 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(246), op: ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40705 Sep 22 23:15:08.384 INFO [2] client ExtentRepair { repair_id: ReconciliationId(246), extent_id: 112, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40706 Sep 22 23:15:08.384 INFO [2] No action required ReconciliationId(246)
40707 Sep 22 23:15:08.384 DEBG 246 Repair extent 112 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40708 Sep 22 23:15:08.384 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/070.copy"
40709 Sep 22 23:15:08.448 INFO accepted connection, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40710 Sep 22 23:15:08.449 TRCE incoming request, uri: /extent/112/files, method: GET, req_id: b6c274dc-72ff-454c-aa0d-e0058817458c, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40711 Sep 22 23:15:08.449 INFO request completed, latency_us: 231, response_code: 200, uri: /extent/112/files, method: GET, req_id: b6c274dc-72ff-454c-aa0d-e0058817458c, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40712 Sep 22 23:15:08.449 INFO eid:112 Found repair files: ["070", "070.db"]
40713 Sep 22 23:15:08.449 TRCE incoming request, uri: /newextent/112/data, method: GET, req_id: d423bf0d-e096-4498-854c-cf34bb546773, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40714 Sep 22 23:15:08.450 INFO request completed, latency_us: 343, response_code: 200, uri: /newextent/112/data, method: GET, req_id: d423bf0d-e096-4498-854c-cf34bb546773, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40715 Sep 22 23:15:08.455 TRCE incoming request, uri: /newextent/112/db, method: GET, req_id: 14b047aa-c10f-4f49-9097-4a3c5cdc43e9, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40716 Sep 22 23:15:08.455 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/112/db, method: GET, req_id: 14b047aa-c10f-4f49-9097-4a3c5cdc43e9, remote_addr: 127.0.0.1:51815, local_addr: 127.0.0.1:46213, task: repair
40717 Sep 22 23:15:08.456 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/070.copy" to "/tmp/downstairs-vrx8aK6L/00/000/070.replace"
40718 Sep 22 23:15:08.456 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40719 Sep 22 23:15:08.457 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/070.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40720 Sep 22 23:15:08.457 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/070"
40721 Sep 22 23:15:08.457 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/070.db"
40722 Sep 22 23:15:08.457 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40723 Sep 22 23:15:08.457 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/070.replace" to "/tmp/downstairs-vrx8aK6L/00/000/070.completed"
40724 Sep 22 23:15:08.457 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40725 Sep 22 23:15:08.457 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40726 Sep 22 23:15:08.458 DEBG [0] It's time to notify for 246
40727 Sep 22 23:15:08.458 INFO Completion from [0] id:246 status:true
40728 Sep 22 23:15:08.458 INFO [247/752] Repair commands completed
40729 Sep 22 23:15:08.458 INFO Pop front: ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }, state: ClientData([New, New, New]) }
40730 Sep 22 23:15:08.458 INFO Sent repair work, now wait for resp
40731 Sep 22 23:15:08.458 INFO [0] received reconcile message
40732 Sep 22 23:15:08.458 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }, state: ClientData([InProgress, New, New]) }, : downstairs
40733 Sep 22 23:15:08.458 INFO [0] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }
40734 Sep 22 23:15:08.458 INFO [1] received reconcile message
40735 Sep 22 23:15:08.458 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40736 Sep 22 23:15:08.458 INFO [1] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }
40737 Sep 22 23:15:08.458 INFO [2] received reconcile message
40738 Sep 22 23:15:08.458 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(247), op: ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40739 Sep 22 23:15:08.458 INFO [2] client ExtentReopen { repair_id: ReconciliationId(247), extent_id: 112 }
40740 Sep 22 23:15:08.458 DEBG 247 Reopen extent 112
40741 Sep 22 23:15:08.459 DEBG 247 Reopen extent 112
40742 Sep 22 23:15:08.459 DEBG 247 Reopen extent 112
40743 Sep 22 23:15:08.460 DEBG [2] It's time to notify for 247
40744 Sep 22 23:15:08.460 INFO Completion from [2] id:247 status:true
40745 Sep 22 23:15:08.460 INFO [248/752] Repair commands completed
40746 Sep 22 23:15:08.460 INFO Pop front: ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40747 Sep 22 23:15:08.460 INFO Sent repair work, now wait for resp
40748 Sep 22 23:15:08.460 INFO [0] received reconcile message
40749 Sep 22 23:15:08.460 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40750 Sep 22 23:15:08.460 INFO [0] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40751 Sep 22 23:15:08.460 INFO [1] received reconcile message
40752 Sep 22 23:15:08.460 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40753 Sep 22 23:15:08.460 INFO [1] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40754 Sep 22 23:15:08.460 INFO [2] received reconcile message
40755 Sep 22 23:15:08.460 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(248), op: ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40756 Sep 22 23:15:08.460 INFO [2] client ExtentFlush { repair_id: ReconciliationId(248), extent_id: 118, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40757 Sep 22 23:15:08.460 DEBG 248 Flush extent 118 with f:2 g:2
40758 Sep 22 23:15:08.460 DEBG Flush just extent 118 with f:2 and g:2
40759 Sep 22 23:15:08.461 DEBG [1] It's time to notify for 248
40760 Sep 22 23:15:08.461 INFO Completion from [1] id:248 status:true
40761 Sep 22 23:15:08.461 INFO [249/752] Repair commands completed
40762 Sep 22 23:15:08.461 INFO Pop front: ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }, state: ClientData([New, New, New]) }
40763 Sep 22 23:15:08.461 INFO Sent repair work, now wait for resp
40764 Sep 22 23:15:08.461 INFO [0] received reconcile message
40765 Sep 22 23:15:08.461 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }, state: ClientData([InProgress, New, New]) }, : downstairs
40766 Sep 22 23:15:08.461 INFO [0] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }
40767 Sep 22 23:15:08.461 INFO [1] received reconcile message
40768 Sep 22 23:15:08.461 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40769 Sep 22 23:15:08.461 INFO [1] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }
40770 Sep 22 23:15:08.461 INFO [2] received reconcile message
40771 Sep 22 23:15:08.461 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(249), op: ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40772 Sep 22 23:15:08.461 INFO [2] client ExtentClose { repair_id: ReconciliationId(249), extent_id: 118 }
40773 Sep 22 23:15:08.461 DEBG 249 Close extent 118
40774 Sep 22 23:15:08.461 DEBG 249 Close extent 118
40775 Sep 22 23:15:08.462 DEBG 249 Close extent 118
40776 Sep 22 23:15:08.462 DEBG [2] It's time to notify for 249
40777 Sep 22 23:15:08.462 INFO Completion from [2] id:249 status:true
40778 Sep 22 23:15:08.462 INFO [250/752] Repair commands completed
40779 Sep 22 23:15:08.462 INFO Pop front: ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40780 Sep 22 23:15:08.462 INFO Sent repair work, now wait for resp
40781 Sep 22 23:15:08.462 INFO [0] received reconcile message
40782 Sep 22 23:15:08.462 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40783 Sep 22 23:15:08.462 INFO [0] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40784 Sep 22 23:15:08.462 INFO [0] Sending repair request ReconciliationId(250)
40785 Sep 22 23:15:08.462 INFO [1] received reconcile message
40786 Sep 22 23:15:08.462 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40787 Sep 22 23:15:08.462 INFO [1] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40788 Sep 22 23:15:08.462 INFO [1] No action required ReconciliationId(250)
40789 Sep 22 23:15:08.462 INFO [2] received reconcile message
40790 Sep 22 23:15:08.462 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(250), op: ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40791 Sep 22 23:15:08.462 INFO [2] client ExtentRepair { repair_id: ReconciliationId(250), extent_id: 118, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40792 Sep 22 23:15:08.462 INFO [2] No action required ReconciliationId(250)
40793 Sep 22 23:15:08.462 DEBG 250 Repair extent 118 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40794 Sep 22 23:15:08.463 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/076.copy"
40795 Sep 22 23:15:08.528 INFO accepted connection, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40796 Sep 22 23:15:08.528 TRCE incoming request, uri: /extent/118/files, method: GET, req_id: 9411d776-20a5-423f-b71c-f79048f07767, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40797 Sep 22 23:15:08.528 INFO request completed, latency_us: 248, response_code: 200, uri: /extent/118/files, method: GET, req_id: 9411d776-20a5-423f-b71c-f79048f07767, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40798 Sep 22 23:15:08.528 INFO eid:118 Found repair files: ["076", "076.db"]
40799 Sep 22 23:15:08.529 TRCE incoming request, uri: /newextent/118/data, method: GET, req_id: 35eeb2c3-5a88-45f0-a70d-04eac0d5cbec, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40800 Sep 22 23:15:08.529 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/118/data, method: GET, req_id: 35eeb2c3-5a88-45f0-a70d-04eac0d5cbec, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40801 Sep 22 23:15:08.534 TRCE incoming request, uri: /newextent/118/db, method: GET, req_id: c3f6d948-0e61-4734-9fe0-5a243de6ff08, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40802 Sep 22 23:15:08.534 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/118/db, method: GET, req_id: c3f6d948-0e61-4734-9fe0-5a243de6ff08, remote_addr: 127.0.0.1:47270, local_addr: 127.0.0.1:46213, task: repair
40803 Sep 22 23:15:08.536 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/076.copy" to "/tmp/downstairs-vrx8aK6L/00/000/076.replace"
40804 Sep 22 23:15:08.536 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40805 Sep 22 23:15:08.537 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/076.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40806 Sep 22 23:15:08.537 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/076"
40807 Sep 22 23:15:08.537 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/076.db"
40808 Sep 22 23:15:08.537 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40809 Sep 22 23:15:08.537 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/076.replace" to "/tmp/downstairs-vrx8aK6L/00/000/076.completed"
40810 Sep 22 23:15:08.537 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40811 Sep 22 23:15:08.537 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40812 Sep 22 23:15:08.537 DEBG [0] It's time to notify for 250
40813 Sep 22 23:15:08.538 INFO Completion from [0] id:250 status:true
40814 Sep 22 23:15:08.538 INFO [251/752] Repair commands completed
40815 Sep 22 23:15:08.538 INFO Pop front: ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }, state: ClientData([New, New, New]) }
40816 Sep 22 23:15:08.538 INFO Sent repair work, now wait for resp
40817 Sep 22 23:15:08.538 INFO [0] received reconcile message
40818 Sep 22 23:15:08.538 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }, state: ClientData([InProgress, New, New]) }, : downstairs
40819 Sep 22 23:15:08.538 INFO [0] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }
40820 Sep 22 23:15:08.538 INFO [1] received reconcile message
40821 Sep 22 23:15:08.538 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40822 Sep 22 23:15:08.538 INFO [1] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }
40823 Sep 22 23:15:08.538 INFO [2] received reconcile message
40824 Sep 22 23:15:08.538 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(251), op: ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40825 Sep 22 23:15:08.538 INFO [2] client ExtentReopen { repair_id: ReconciliationId(251), extent_id: 118 }
40826 Sep 22 23:15:08.538 DEBG 251 Reopen extent 118
40827 Sep 22 23:15:08.539 DEBG 251 Reopen extent 118
40828 Sep 22 23:15:08.539 DEBG 251 Reopen extent 118
40829 Sep 22 23:15:08.540 DEBG [2] It's time to notify for 251
40830 Sep 22 23:15:08.540 INFO Completion from [2] id:251 status:true
40831 Sep 22 23:15:08.540 INFO [252/752] Repair commands completed
40832 Sep 22 23:15:08.540 INFO Pop front: ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40833 Sep 22 23:15:08.540 INFO Sent repair work, now wait for resp
40834 Sep 22 23:15:08.540 INFO [0] received reconcile message
40835 Sep 22 23:15:08.540 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40836 Sep 22 23:15:08.540 INFO [0] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40837 Sep 22 23:15:08.540 INFO [1] received reconcile message
40838 Sep 22 23:15:08.540 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40839 Sep 22 23:15:08.540 INFO [1] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40840 Sep 22 23:15:08.540 INFO [2] received reconcile message
40841 Sep 22 23:15:08.540 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(252), op: ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40842 Sep 22 23:15:08.540 INFO [2] client ExtentFlush { repair_id: ReconciliationId(252), extent_id: 130, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40843 Sep 22 23:15:08.540 DEBG 252 Flush extent 130 with f:2 g:2
40844 Sep 22 23:15:08.540 DEBG Flush just extent 130 with f:2 and g:2
40845 Sep 22 23:15:08.541 DEBG [1] It's time to notify for 252
40846 Sep 22 23:15:08.541 INFO Completion from [1] id:252 status:true
40847 Sep 22 23:15:08.541 INFO [253/752] Repair commands completed
40848 Sep 22 23:15:08.541 INFO Pop front: ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }, state: ClientData([New, New, New]) }
40849 Sep 22 23:15:08.541 INFO Sent repair work, now wait for resp
40850 Sep 22 23:15:08.541 INFO [0] received reconcile message
40851 Sep 22 23:15:08.541 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }, state: ClientData([InProgress, New, New]) }, : downstairs
40852 Sep 22 23:15:08.541 INFO [0] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }
40853 Sep 22 23:15:08.541 INFO [1] received reconcile message
40854 Sep 22 23:15:08.541 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40855 Sep 22 23:15:08.541 INFO [1] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }
40856 Sep 22 23:15:08.541 INFO [2] received reconcile message
40857 Sep 22 23:15:08.541 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(253), op: ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40858 Sep 22 23:15:08.541 INFO [2] client ExtentClose { repair_id: ReconciliationId(253), extent_id: 130 }
40859 Sep 22 23:15:08.541 DEBG 253 Close extent 130
40860 Sep 22 23:15:08.541 DEBG 253 Close extent 130
40861 Sep 22 23:15:08.542 DEBG 253 Close extent 130
40862 Sep 22 23:15:08.542 DEBG [2] It's time to notify for 253
40863 Sep 22 23:15:08.542 INFO Completion from [2] id:253 status:true
40864 Sep 22 23:15:08.542 INFO [254/752] Repair commands completed
40865 Sep 22 23:15:08.542 INFO Pop front: ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40866 Sep 22 23:15:08.542 INFO Sent repair work, now wait for resp
40867 Sep 22 23:15:08.542 INFO [0] received reconcile message
40868 Sep 22 23:15:08.542 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40869 Sep 22 23:15:08.542 INFO [0] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40870 Sep 22 23:15:08.542 INFO [0] Sending repair request ReconciliationId(254)
40871 Sep 22 23:15:08.542 INFO [1] received reconcile message
40872 Sep 22 23:15:08.542 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40873 Sep 22 23:15:08.542 INFO [1] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40874 Sep 22 23:15:08.542 INFO [1] No action required ReconciliationId(254)
40875 Sep 22 23:15:08.542 INFO [2] received reconcile message
40876 Sep 22 23:15:08.542 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(254), op: ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40877 Sep 22 23:15:08.542 INFO [2] client ExtentRepair { repair_id: ReconciliationId(254), extent_id: 130, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40878 Sep 22 23:15:08.542 INFO [2] No action required ReconciliationId(254)
40879 Sep 22 23:15:08.542 DEBG 254 Repair extent 130 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40880 Sep 22 23:15:08.543 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/082.copy"
40881 Sep 22 23:15:08.550 DEBG up_ds_listen was notified
40882 Sep 22 23:15:08.550 DEBG up_ds_listen process 1079
40883 Sep 22 23:15:08.550 DEBG [A] ack job 1079:80, : downstairs
40884 Sep 22 23:15:08.551 DEBG up_ds_listen checked 1 jobs, back to waiting
40885 Sep 22 23:15:08.551 WARN returning error on read!
40886 Sep 22 23:15:08.551 DEBG Read :1078 deps:[JobId(1077)] res:false
40887 Sep 22 23:15:08.551 INFO [lossy] skipping 1079
40888 Sep 22 23:15:08.551 WARN returning error on read!
40889 Sep 22 23:15:08.551 DEBG Read :1078 deps:[JobId(1077)] res:false
40890 Sep 22 23:15:08.557 DEBG Read :1078 deps:[JobId(1077)] res:true
40891 Sep 22 23:15:08.578 DEBG IO Read 1080 has deps [JobId(1079)]
40892 Sep 22 23:15:08.591 ERRO [2] job id 1078 saw error GenericError("test error")
40893 Sep 22 23:15:08.591 ERRO [2] job id 1078 saw error GenericError("test error")
40894 Sep 22 23:15:08.593 DEBG Flush :1079 extent_limit None deps:[JobId(1078)] res:true f:29 g:1
40895 Sep 22 23:15:08.599 DEBG Read :1080 deps:[JobId(1079)] res:true
40896 Sep 22 23:15:08.607 INFO accepted connection, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40897 Sep 22 23:15:08.607 TRCE incoming request, uri: /extent/130/files, method: GET, req_id: 91a25e6a-d277-4ba5-83d7-8195af20ef10, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40898 Sep 22 23:15:08.608 INFO request completed, latency_us: 251, response_code: 200, uri: /extent/130/files, method: GET, req_id: 91a25e6a-d277-4ba5-83d7-8195af20ef10, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40899 Sep 22 23:15:08.608 INFO eid:130 Found repair files: ["082", "082.db"]
40900 Sep 22 23:15:08.608 TRCE incoming request, uri: /newextent/130/data, method: GET, req_id: c83db128-2efd-4eec-868a-4a8d7a4432b8, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40901 Sep 22 23:15:08.609 INFO request completed, latency_us: 353, response_code: 200, uri: /newextent/130/data, method: GET, req_id: c83db128-2efd-4eec-868a-4a8d7a4432b8, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40902 Sep 22 23:15:08.614 TRCE incoming request, uri: /newextent/130/db, method: GET, req_id: 408c7289-c6fa-4c56-901e-14c0cc0d69f3, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40903 Sep 22 23:15:08.614 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/130/db, method: GET, req_id: 408c7289-c6fa-4c56-901e-14c0cc0d69f3, remote_addr: 127.0.0.1:46306, local_addr: 127.0.0.1:46213, task: repair
40904 Sep 22 23:15:08.615 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/082.copy" to "/tmp/downstairs-vrx8aK6L/00/000/082.replace"
40905 Sep 22 23:15:08.615 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40906 Sep 22 23:15:08.616 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/082.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40907 Sep 22 23:15:08.616 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/082"
40908 Sep 22 23:15:08.617 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/082.db"
40909 Sep 22 23:15:08.617 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40910 Sep 22 23:15:08.617 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/082.replace" to "/tmp/downstairs-vrx8aK6L/00/000/082.completed"
40911 Sep 22 23:15:08.617 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40912 Sep 22 23:15:08.617 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40913 Sep 22 23:15:08.617 DEBG [0] It's time to notify for 254
40914 Sep 22 23:15:08.617 INFO Completion from [0] id:254 status:true
40915 Sep 22 23:15:08.617 INFO [255/752] Repair commands completed
40916 Sep 22 23:15:08.617 INFO Pop front: ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }, state: ClientData([New, New, New]) }
40917 Sep 22 23:15:08.617 INFO Sent repair work, now wait for resp
40918 Sep 22 23:15:08.617 INFO [0] received reconcile message
40919 Sep 22 23:15:08.617 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }, state: ClientData([InProgress, New, New]) }, : downstairs
40920 Sep 22 23:15:08.617 INFO [0] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }
40921 Sep 22 23:15:08.617 INFO [1] received reconcile message
40922 Sep 22 23:15:08.617 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40923 Sep 22 23:15:08.617 INFO [1] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }
40924 Sep 22 23:15:08.617 INFO [2] received reconcile message
40925 Sep 22 23:15:08.617 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(255), op: ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40926 Sep 22 23:15:08.617 INFO [2] client ExtentReopen { repair_id: ReconciliationId(255), extent_id: 130 }
40927 Sep 22 23:15:08.618 DEBG 255 Reopen extent 130
40928 Sep 22 23:15:08.618 DEBG 255 Reopen extent 130
40929 Sep 22 23:15:08.619 DEBG 255 Reopen extent 130
40930 Sep 22 23:15:08.619 DEBG [2] It's time to notify for 255
40931 Sep 22 23:15:08.620 INFO Completion from [2] id:255 status:true
40932 Sep 22 23:15:08.620 INFO [256/752] Repair commands completed
40933 Sep 22 23:15:08.620 INFO Pop front: ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
40934 Sep 22 23:15:08.620 INFO Sent repair work, now wait for resp
40935 Sep 22 23:15:08.620 INFO [0] received reconcile message
40936 Sep 22 23:15:08.620 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
40937 Sep 22 23:15:08.620 INFO [0] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40938 Sep 22 23:15:08.620 INFO [1] received reconcile message
40939 Sep 22 23:15:08.620 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
40940 Sep 22 23:15:08.620 INFO [1] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40941 Sep 22 23:15:08.620 INFO [2] received reconcile message
40942 Sep 22 23:15:08.620 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(256), op: ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
40943 Sep 22 23:15:08.620 INFO [2] client ExtentFlush { repair_id: ReconciliationId(256), extent_id: 25, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
40944 Sep 22 23:15:08.620 DEBG 256 Flush extent 25 with f:2 g:2
40945 Sep 22 23:15:08.620 DEBG Flush just extent 25 with f:2 and g:2
40946 Sep 22 23:15:08.620 DEBG [1] It's time to notify for 256
40947 Sep 22 23:15:08.620 INFO Completion from [1] id:256 status:true
40948 Sep 22 23:15:08.620 INFO [257/752] Repair commands completed
40949 Sep 22 23:15:08.620 INFO Pop front: ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }, state: ClientData([New, New, New]) }
40950 Sep 22 23:15:08.620 INFO Sent repair work, now wait for resp
40951 Sep 22 23:15:08.620 INFO [0] received reconcile message
40952 Sep 22 23:15:08.620 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }, state: ClientData([InProgress, New, New]) }, : downstairs
40953 Sep 22 23:15:08.620 INFO [0] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }
40954 Sep 22 23:15:08.620 INFO [1] received reconcile message
40955 Sep 22 23:15:08.620 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40956 Sep 22 23:15:08.620 INFO [1] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }
40957 Sep 22 23:15:08.620 INFO [2] received reconcile message
40958 Sep 22 23:15:08.620 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(257), op: ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
40959 Sep 22 23:15:08.620 INFO [2] client ExtentClose { repair_id: ReconciliationId(257), extent_id: 25 }
40960 Sep 22 23:15:08.621 DEBG 257 Close extent 25
40961 Sep 22 23:15:08.621 DEBG 257 Close extent 25
40962 Sep 22 23:15:08.621 DEBG 257 Close extent 25
40963 Sep 22 23:15:08.621 DEBG [2] It's time to notify for 257
40964 Sep 22 23:15:08.621 INFO Completion from [2] id:257 status:true
40965 Sep 22 23:15:08.622 INFO [258/752] Repair commands completed
40966 Sep 22 23:15:08.622 INFO Pop front: ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
40967 Sep 22 23:15:08.622 INFO Sent repair work, now wait for resp
40968 Sep 22 23:15:08.622 INFO [0] received reconcile message
40969 Sep 22 23:15:08.622 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
40970 Sep 22 23:15:08.622 INFO [0] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40971 Sep 22 23:15:08.622 INFO [0] Sending repair request ReconciliationId(258)
40972 Sep 22 23:15:08.622 INFO [1] received reconcile message
40973 Sep 22 23:15:08.622 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
40974 Sep 22 23:15:08.622 INFO [1] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40975 Sep 22 23:15:08.622 INFO [1] No action required ReconciliationId(258)
40976 Sep 22 23:15:08.622 INFO [2] received reconcile message
40977 Sep 22 23:15:08.622 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(258), op: ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
40978 Sep 22 23:15:08.622 INFO [2] client ExtentRepair { repair_id: ReconciliationId(258), extent_id: 25, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
40979 Sep 22 23:15:08.622 INFO [2] No action required ReconciliationId(258)
40980 Sep 22 23:15:08.622 DEBG 258 Repair extent 25 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
40981 Sep 22 23:15:08.622 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/019.copy"
40982 Sep 22 23:15:08.687 INFO accepted connection, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40983 Sep 22 23:15:08.687 TRCE incoming request, uri: /extent/25/files, method: GET, req_id: b94cbafe-84ee-4da2-8176-11127bfcdf56, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40984 Sep 22 23:15:08.687 INFO request completed, latency_us: 269, response_code: 200, uri: /extent/25/files, method: GET, req_id: b94cbafe-84ee-4da2-8176-11127bfcdf56, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40985 Sep 22 23:15:08.688 INFO eid:25 Found repair files: ["019", "019.db"]
40986 Sep 22 23:15:08.688 TRCE incoming request, uri: /newextent/25/data, method: GET, req_id: 48787ab5-a370-470e-a736-bf561e9d7a94, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40987 Sep 22 23:15:08.688 INFO request completed, latency_us: 401, response_code: 200, uri: /newextent/25/data, method: GET, req_id: 48787ab5-a370-470e-a736-bf561e9d7a94, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40988 Sep 22 23:15:08.694 TRCE incoming request, uri: /newextent/25/db, method: GET, req_id: 2467fff0-11fc-43ea-8bd5-7b7cee2c28a3, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40989 Sep 22 23:15:08.694 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/25/db, method: GET, req_id: 2467fff0-11fc-43ea-8bd5-7b7cee2c28a3, remote_addr: 127.0.0.1:65372, local_addr: 127.0.0.1:46213, task: repair
40990 Sep 22 23:15:08.695 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/019.copy" to "/tmp/downstairs-vrx8aK6L/00/000/019.replace"
40991 Sep 22 23:15:08.695 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40992 Sep 22 23:15:08.697 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/019.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
40993 Sep 22 23:15:08.697 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/019"
40994 Sep 22 23:15:08.697 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/019.db"
40995 Sep 22 23:15:08.697 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40996 Sep 22 23:15:08.697 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/019.replace" to "/tmp/downstairs-vrx8aK6L/00/000/019.completed"
40997 Sep 22 23:15:08.697 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40998 Sep 22 23:15:08.697 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
40999 Sep 22 23:15:08.697 DEBG [0] It's time to notify for 258
41000 Sep 22 23:15:08.697 INFO Completion from [0] id:258 status:true
41001 Sep 22 23:15:08.697 INFO [259/752] Repair commands completed
41002 Sep 22 23:15:08.697 INFO Pop front: ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }, state: ClientData([New, New, New]) }
41003 Sep 22 23:15:08.698 INFO Sent repair work, now wait for resp
41004 Sep 22 23:15:08.698 INFO [0] received reconcile message
41005 Sep 22 23:15:08.698 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }, state: ClientData([InProgress, New, New]) }, : downstairs
41006 Sep 22 23:15:08.698 INFO [0] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }
41007 Sep 22 23:15:08.698 INFO [1] received reconcile message
41008 Sep 22 23:15:08.698 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41009 Sep 22 23:15:08.698 INFO [1] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }
41010 Sep 22 23:15:08.698 INFO [2] received reconcile message
41011 Sep 22 23:15:08.698 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(259), op: ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41012 Sep 22 23:15:08.698 INFO [2] client ExtentReopen { repair_id: ReconciliationId(259), extent_id: 25 }
41013 Sep 22 23:15:08.698 DEBG 259 Reopen extent 25
41014 Sep 22 23:15:08.699 DEBG 259 Reopen extent 25
41015 Sep 22 23:15:08.699 DEBG 259 Reopen extent 25
41016 Sep 22 23:15:08.700 DEBG [2] It's time to notify for 259
41017 Sep 22 23:15:08.700 INFO Completion from [2] id:259 status:true
41018 Sep 22 23:15:08.700 INFO [260/752] Repair commands completed
41019 Sep 22 23:15:08.700 INFO Pop front: ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41020 Sep 22 23:15:08.700 INFO Sent repair work, now wait for resp
41021 Sep 22 23:15:08.700 INFO [0] received reconcile message
41022 Sep 22 23:15:08.700 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41023 Sep 22 23:15:08.700 INFO [0] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41024 Sep 22 23:15:08.700 INFO [1] received reconcile message
41025 Sep 22 23:15:08.700 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41026 Sep 22 23:15:08.700 INFO [1] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41027 Sep 22 23:15:08.700 INFO [2] received reconcile message
41028 Sep 22 23:15:08.700 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(260), op: ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41029 Sep 22 23:15:08.700 INFO [2] client ExtentFlush { repair_id: ReconciliationId(260), extent_id: 47, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41030 Sep 22 23:15:08.700 DEBG 260 Flush extent 47 with f:2 g:2
41031 Sep 22 23:15:08.700 DEBG Flush just extent 47 with f:2 and g:2
41032 Sep 22 23:15:08.700 DEBG [1] It's time to notify for 260
41033 Sep 22 23:15:08.701 INFO Completion from [1] id:260 status:true
41034 Sep 22 23:15:08.701 INFO [261/752] Repair commands completed
41035 Sep 22 23:15:08.701 INFO Pop front: ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }, state: ClientData([New, New, New]) }
41036 Sep 22 23:15:08.701 INFO Sent repair work, now wait for resp
41037 Sep 22 23:15:08.701 INFO [0] received reconcile message
41038 Sep 22 23:15:08.701 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }, state: ClientData([InProgress, New, New]) }, : downstairs
41039 Sep 22 23:15:08.701 INFO [0] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }
41040 Sep 22 23:15:08.701 INFO [1] received reconcile message
41041 Sep 22 23:15:08.701 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41042 Sep 22 23:15:08.701 INFO [1] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }
41043 Sep 22 23:15:08.701 INFO [2] received reconcile message
41044 Sep 22 23:15:08.701 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(261), op: ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41045 Sep 22 23:15:08.701 INFO [2] client ExtentClose { repair_id: ReconciliationId(261), extent_id: 47 }
41046 Sep 22 23:15:08.701 DEBG 261 Close extent 47
41047 Sep 22 23:15:08.701 DEBG 261 Close extent 47
41048 Sep 22 23:15:08.702 DEBG 261 Close extent 47
41049 Sep 22 23:15:08.702 DEBG [2] It's time to notify for 261
41050 Sep 22 23:15:08.702 INFO Completion from [2] id:261 status:true
41051 Sep 22 23:15:08.702 INFO [262/752] Repair commands completed
41052 Sep 22 23:15:08.702 INFO Pop front: ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41053 Sep 22 23:15:08.702 INFO Sent repair work, now wait for resp
41054 Sep 22 23:15:08.702 INFO [0] received reconcile message
41055 Sep 22 23:15:08.702 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41056 Sep 22 23:15:08.702 INFO [0] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41057 Sep 22 23:15:08.702 INFO [0] Sending repair request ReconciliationId(262)
41058 Sep 22 23:15:08.702 INFO [1] received reconcile message
41059 Sep 22 23:15:08.702 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41060 Sep 22 23:15:08.702 INFO [1] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41061 Sep 22 23:15:08.702 INFO [1] No action required ReconciliationId(262)
41062 Sep 22 23:15:08.702 INFO [2] received reconcile message
41063 Sep 22 23:15:08.702 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(262), op: ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41064 Sep 22 23:15:08.702 INFO [2] client ExtentRepair { repair_id: ReconciliationId(262), extent_id: 47, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41065 Sep 22 23:15:08.702 INFO [2] No action required ReconciliationId(262)
41066 Sep 22 23:15:08.702 DEBG 262 Repair extent 47 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41067 Sep 22 23:15:08.702 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02F.copy"
41068 Sep 22 23:15:08.768 INFO accepted connection, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41069 Sep 22 23:15:08.768 TRCE incoming request, uri: /extent/47/files, method: GET, req_id: 023b9695-3b92-4745-805d-6fefdfdf4477, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41070 Sep 22 23:15:08.768 INFO request completed, latency_us: 274, response_code: 200, uri: /extent/47/files, method: GET, req_id: 023b9695-3b92-4745-805d-6fefdfdf4477, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41071 Sep 22 23:15:08.768 INFO eid:47 Found repair files: ["02F", "02F.db"]
41072 Sep 22 23:15:08.769 TRCE incoming request, uri: /newextent/47/data, method: GET, req_id: 14a9116b-7d54-4529-aec3-f21d7501bde9, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41073 Sep 22 23:15:08.769 INFO request completed, latency_us: 371, response_code: 200, uri: /newextent/47/data, method: GET, req_id: 14a9116b-7d54-4529-aec3-f21d7501bde9, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41074 Sep 22 23:15:08.774 TRCE incoming request, uri: /newextent/47/db, method: GET, req_id: 4040ce8a-a082-4191-ae39-1d1c108c24b4, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41075 Sep 22 23:15:08.775 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/47/db, method: GET, req_id: 4040ce8a-a082-4191-ae39-1d1c108c24b4, remote_addr: 127.0.0.1:49772, local_addr: 127.0.0.1:46213, task: repair
41076 Sep 22 23:15:08.776 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02F.replace"
41077 Sep 22 23:15:08.776 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41078 Sep 22 23:15:08.777 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41079 Sep 22 23:15:08.777 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02F"
41080 Sep 22 23:15:08.777 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02F.db"
41081 Sep 22 23:15:08.777 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41082 Sep 22 23:15:08.777 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02F.completed"
41083 Sep 22 23:15:08.777 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41084 Sep 22 23:15:08.777 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41085 Sep 22 23:15:08.778 DEBG [0] It's time to notify for 262
41086 Sep 22 23:15:08.778 INFO Completion from [0] id:262 status:true
41087 Sep 22 23:15:08.778 INFO [263/752] Repair commands completed
41088 Sep 22 23:15:08.778 INFO Pop front: ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }, state: ClientData([New, New, New]) }
41089 Sep 22 23:15:08.778 INFO Sent repair work, now wait for resp
41090 Sep 22 23:15:08.778 INFO [0] received reconcile message
41091 Sep 22 23:15:08.778 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }, state: ClientData([InProgress, New, New]) }, : downstairs
41092 Sep 22 23:15:08.778 INFO [0] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }
41093 Sep 22 23:15:08.778 INFO [1] received reconcile message
41094 Sep 22 23:15:08.778 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41095 Sep 22 23:15:08.778 INFO [1] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }
41096 Sep 22 23:15:08.778 INFO [2] received reconcile message
41097 Sep 22 23:15:08.778 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(263), op: ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41098 Sep 22 23:15:08.778 INFO [2] client ExtentReopen { repair_id: ReconciliationId(263), extent_id: 47 }
41099 Sep 22 23:15:08.778 DEBG 263 Reopen extent 47
41100 Sep 22 23:15:08.779 DEBG 263 Reopen extent 47
41101 Sep 22 23:15:08.780 DEBG 263 Reopen extent 47
41102 Sep 22 23:15:08.780 DEBG [2] It's time to notify for 263
41103 Sep 22 23:15:08.780 INFO Completion from [2] id:263 status:true
41104 Sep 22 23:15:08.780 INFO [264/752] Repair commands completed
41105 Sep 22 23:15:08.780 INFO Pop front: ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41106 Sep 22 23:15:08.780 INFO Sent repair work, now wait for resp
41107 Sep 22 23:15:08.780 INFO [0] received reconcile message
41108 Sep 22 23:15:08.780 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41109 Sep 22 23:15:08.780 INFO [0] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41110 Sep 22 23:15:08.780 INFO [1] received reconcile message
41111 Sep 22 23:15:08.780 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41112 Sep 22 23:15:08.780 INFO [1] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41113 Sep 22 23:15:08.781 INFO [2] received reconcile message
41114 Sep 22 23:15:08.781 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(264), op: ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41115 Sep 22 23:15:08.781 INFO [2] client ExtentFlush { repair_id: ReconciliationId(264), extent_id: 3, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41116 Sep 22 23:15:08.781 DEBG 264 Flush extent 3 with f:2 g:2
41117 Sep 22 23:15:08.781 DEBG Flush just extent 3 with f:2 and g:2
41118 Sep 22 23:15:08.781 DEBG [1] It's time to notify for 264
41119 Sep 22 23:15:08.781 INFO Completion from [1] id:264 status:true
41120 Sep 22 23:15:08.781 INFO [265/752] Repair commands completed
41121 Sep 22 23:15:08.781 INFO Pop front: ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }, state: ClientData([New, New, New]) }
41122 Sep 22 23:15:08.781 INFO Sent repair work, now wait for resp
41123 Sep 22 23:15:08.781 INFO [0] received reconcile message
41124 Sep 22 23:15:08.781 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }, state: ClientData([InProgress, New, New]) }, : downstairs
41125 Sep 22 23:15:08.781 INFO [0] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }
41126 Sep 22 23:15:08.781 INFO [1] received reconcile message
41127 Sep 22 23:15:08.781 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41128 Sep 22 23:15:08.781 INFO [1] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }
41129 Sep 22 23:15:08.781 INFO [2] received reconcile message
41130 Sep 22 23:15:08.781 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(265), op: ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41131 Sep 22 23:15:08.781 INFO [2] client ExtentClose { repair_id: ReconciliationId(265), extent_id: 3 }
41132 Sep 22 23:15:08.781 DEBG 265 Close extent 3
41133 Sep 22 23:15:08.782 DEBG 265 Close extent 3
41134 Sep 22 23:15:08.782 DEBG 265 Close extent 3
41135 Sep 22 23:15:08.782 DEBG [2] It's time to notify for 265
41136 Sep 22 23:15:08.782 INFO Completion from [2] id:265 status:true
41137 Sep 22 23:15:08.782 INFO [266/752] Repair commands completed
41138 Sep 22 23:15:08.782 INFO Pop front: ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41139 Sep 22 23:15:08.782 INFO Sent repair work, now wait for resp
41140 Sep 22 23:15:08.782 INFO [0] received reconcile message
41141 Sep 22 23:15:08.782 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41142 Sep 22 23:15:08.782 INFO [0] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41143 Sep 22 23:15:08.782 INFO [0] Sending repair request ReconciliationId(266)
41144 Sep 22 23:15:08.783 INFO [1] received reconcile message
41145 Sep 22 23:15:08.783 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41146 Sep 22 23:15:08.783 INFO [1] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41147 Sep 22 23:15:08.783 INFO [1] No action required ReconciliationId(266)
41148 Sep 22 23:15:08.783 INFO [2] received reconcile message
41149 Sep 22 23:15:08.783 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(266), op: ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41150 Sep 22 23:15:08.783 INFO [2] client ExtentRepair { repair_id: ReconciliationId(266), extent_id: 3, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41151 Sep 22 23:15:08.783 INFO [2] No action required ReconciliationId(266)
41152 Sep 22 23:15:08.783 DEBG 266 Repair extent 3 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41153 Sep 22 23:15:08.783 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/003.copy"
41154 Sep 22 23:15:08.847 INFO accepted connection, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41155 Sep 22 23:15:08.847 TRCE incoming request, uri: /extent/3/files, method: GET, req_id: 425a1a92-8976-4cdb-9dd8-fd3fac6fe0d0, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41156 Sep 22 23:15:08.847 INFO request completed, latency_us: 271, response_code: 200, uri: /extent/3/files, method: GET, req_id: 425a1a92-8976-4cdb-9dd8-fd3fac6fe0d0, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41157 Sep 22 23:15:08.848 INFO eid:3 Found repair files: ["003", "003.db"]
41158 Sep 22 23:15:08.848 TRCE incoming request, uri: /newextent/3/data, method: GET, req_id: c9fe7f27-3e91-4497-ae33-9a0f28068798, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41159 Sep 22 23:15:08.848 INFO request completed, latency_us: 317, response_code: 200, uri: /newextent/3/data, method: GET, req_id: c9fe7f27-3e91-4497-ae33-9a0f28068798, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41160 Sep 22 23:15:08.853 TRCE incoming request, uri: /newextent/3/db, method: GET, req_id: d89aaba0-20b5-49b1-b550-0ec478d8eee1, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41161 Sep 22 23:15:08.854 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/3/db, method: GET, req_id: d89aaba0-20b5-49b1-b550-0ec478d8eee1, remote_addr: 127.0.0.1:44396, local_addr: 127.0.0.1:46213, task: repair
41162 Sep 22 23:15:08.855 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/003.copy" to "/tmp/downstairs-vrx8aK6L/00/000/003.replace"
41163 Sep 22 23:15:08.855 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41164 Sep 22 23:15:08.856 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/003.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41165 Sep 22 23:15:08.857 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/003"
41166 Sep 22 23:15:08.857 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/003.db"
41167 Sep 22 23:15:08.857 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41168 Sep 22 23:15:08.857 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/003.replace" to "/tmp/downstairs-vrx8aK6L/00/000/003.completed"
41169 Sep 22 23:15:08.857 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41170 Sep 22 23:15:08.857 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41171 Sep 22 23:15:08.857 DEBG [0] It's time to notify for 266
41172 Sep 22 23:15:08.857 INFO Completion from [0] id:266 status:true
41173 Sep 22 23:15:08.857 INFO [267/752] Repair commands completed
41174 Sep 22 23:15:08.857 INFO Pop front: ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }, state: ClientData([New, New, New]) }
41175 Sep 22 23:15:08.857 INFO Sent repair work, now wait for resp
41176 Sep 22 23:15:08.857 INFO [0] received reconcile message
41177 Sep 22 23:15:08.857 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }, state: ClientData([InProgress, New, New]) }, : downstairs
41178 Sep 22 23:15:08.857 INFO [0] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }
41179 Sep 22 23:15:08.857 INFO [1] received reconcile message
41180 Sep 22 23:15:08.857 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41181 Sep 22 23:15:08.857 INFO [1] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }
41182 Sep 22 23:15:08.857 INFO [2] received reconcile message
41183 Sep 22 23:15:08.857 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(267), op: ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41184 Sep 22 23:15:08.858 INFO [2] client ExtentReopen { repair_id: ReconciliationId(267), extent_id: 3 }
41185 Sep 22 23:15:08.858 DEBG 267 Reopen extent 3
41186 Sep 22 23:15:08.858 DEBG 267 Reopen extent 3
41187 Sep 22 23:15:08.859 DEBG 267 Reopen extent 3
41188 Sep 22 23:15:08.860 DEBG [2] It's time to notify for 267
41189 Sep 22 23:15:08.860 INFO Completion from [2] id:267 status:true
41190 Sep 22 23:15:08.860 INFO [268/752] Repair commands completed
41191 Sep 22 23:15:08.860 INFO Pop front: ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41192 Sep 22 23:15:08.860 INFO Sent repair work, now wait for resp
41193 Sep 22 23:15:08.860 INFO [0] received reconcile message
41194 Sep 22 23:15:08.860 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41195 Sep 22 23:15:08.860 INFO [0] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41196 Sep 22 23:15:08.860 INFO [1] received reconcile message
41197 Sep 22 23:15:08.860 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41198 Sep 22 23:15:08.860 INFO [1] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41199 Sep 22 23:15:08.860 INFO [2] received reconcile message
41200 Sep 22 23:15:08.860 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(268), op: ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41201 Sep 22 23:15:08.860 INFO [2] client ExtentFlush { repair_id: ReconciliationId(268), extent_id: 54, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41202 Sep 22 23:15:08.860 DEBG 268 Flush extent 54 with f:2 g:2
41203 Sep 22 23:15:08.860 DEBG Flush just extent 54 with f:2 and g:2
41204 Sep 22 23:15:08.860 DEBG [1] It's time to notify for 268
41205 Sep 22 23:15:08.860 INFO Completion from [1] id:268 status:true
41206 Sep 22 23:15:08.860 INFO [269/752] Repair commands completed
41207 Sep 22 23:15:08.860 INFO Pop front: ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }, state: ClientData([New, New, New]) }
41208 Sep 22 23:15:08.860 INFO Sent repair work, now wait for resp
41209 Sep 22 23:15:08.860 INFO [0] received reconcile message
41210 Sep 22 23:15:08.860 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }, state: ClientData([InProgress, New, New]) }, : downstairs
41211 Sep 22 23:15:08.860 INFO [0] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }
41212 Sep 22 23:15:08.860 INFO [1] received reconcile message
41213 Sep 22 23:15:08.860 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41214 Sep 22 23:15:08.860 INFO [1] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }
41215 Sep 22 23:15:08.860 INFO [2] received reconcile message
41216 Sep 22 23:15:08.860 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(269), op: ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41217 Sep 22 23:15:08.861 INFO [2] client ExtentClose { repair_id: ReconciliationId(269), extent_id: 54 }
41218 Sep 22 23:15:08.861 DEBG 269 Close extent 54
41219 Sep 22 23:15:08.861 DEBG 269 Close extent 54
41220 Sep 22 23:15:08.861 DEBG 269 Close extent 54
41221 Sep 22 23:15:08.862 DEBG [2] It's time to notify for 269
41222 Sep 22 23:15:08.862 INFO Completion from [2] id:269 status:true
41223 Sep 22 23:15:08.862 INFO [270/752] Repair commands completed
41224 Sep 22 23:15:08.862 INFO Pop front: ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41225 Sep 22 23:15:08.862 INFO Sent repair work, now wait for resp
41226 Sep 22 23:15:08.862 INFO [0] received reconcile message
41227 Sep 22 23:15:08.862 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41228 Sep 22 23:15:08.862 INFO [0] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41229 Sep 22 23:15:08.862 INFO [0] Sending repair request ReconciliationId(270)
41230 Sep 22 23:15:08.862 INFO [1] received reconcile message
41231 Sep 22 23:15:08.862 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41232 Sep 22 23:15:08.862 INFO [1] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41233 Sep 22 23:15:08.862 INFO [1] No action required ReconciliationId(270)
41234 Sep 22 23:15:08.862 INFO [2] received reconcile message
41235 Sep 22 23:15:08.862 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(270), op: ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41236 Sep 22 23:15:08.862 INFO [2] client ExtentRepair { repair_id: ReconciliationId(270), extent_id: 54, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41237 Sep 22 23:15:08.862 INFO [2] No action required ReconciliationId(270)
41238 Sep 22 23:15:08.862 DEBG 270 Repair extent 54 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41239 Sep 22 23:15:08.862 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/036.copy"
41240 Sep 22 23:15:08.926 INFO accepted connection, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41241 Sep 22 23:15:08.926 TRCE incoming request, uri: /extent/54/files, method: GET, req_id: 928d57ed-1600-4df4-9eaa-2ca91051d98c, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41242 Sep 22 23:15:08.927 INFO request completed, latency_us: 277, response_code: 200, uri: /extent/54/files, method: GET, req_id: 928d57ed-1600-4df4-9eaa-2ca91051d98c, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41243 Sep 22 23:15:08.927 INFO eid:54 Found repair files: ["036", "036.db"]
41244 Sep 22 23:15:08.927 TRCE incoming request, uri: /newextent/54/data, method: GET, req_id: 5cc2d47e-6f21-495e-9ed3-9fed6fbc16be, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41245 Sep 22 23:15:08.928 INFO request completed, latency_us: 369, response_code: 200, uri: /newextent/54/data, method: GET, req_id: 5cc2d47e-6f21-495e-9ed3-9fed6fbc16be, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41246 Sep 22 23:15:08.933 TRCE incoming request, uri: /newextent/54/db, method: GET, req_id: fc21011c-17ac-4efc-a91f-d4faaa8d624f, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41247 Sep 22 23:15:08.933 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/54/db, method: GET, req_id: fc21011c-17ac-4efc-a91f-d4faaa8d624f, remote_addr: 127.0.0.1:61574, local_addr: 127.0.0.1:46213, task: repair
41248 Sep 22 23:15:08.934 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/036.copy" to "/tmp/downstairs-vrx8aK6L/00/000/036.replace"
41249 Sep 22 23:15:08.934 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41250 Sep 22 23:15:08.936 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/036.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41251 Sep 22 23:15:08.936 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/036"
41252 Sep 22 23:15:08.936 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/036.db"
41253 Sep 22 23:15:08.936 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41254 Sep 22 23:15:08.936 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/036.replace" to "/tmp/downstairs-vrx8aK6L/00/000/036.completed"
41255 Sep 22 23:15:08.936 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41256 Sep 22 23:15:08.936 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41257 Sep 22 23:15:08.937 DEBG [0] It's time to notify for 270
41258 Sep 22 23:15:08.937 INFO Completion from [0] id:270 status:true
41259 Sep 22 23:15:08.937 INFO [271/752] Repair commands completed
41260 Sep 22 23:15:08.937 INFO Pop front: ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }, state: ClientData([New, New, New]) }
41261 Sep 22 23:15:08.937 INFO Sent repair work, now wait for resp
41262 Sep 22 23:15:08.937 INFO [0] received reconcile message
41263 Sep 22 23:15:08.937 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }, state: ClientData([InProgress, New, New]) }, : downstairs
41264 Sep 22 23:15:08.937 INFO [0] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }
41265 Sep 22 23:15:08.937 INFO [1] received reconcile message
41266 Sep 22 23:15:08.937 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41267 Sep 22 23:15:08.937 INFO [1] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }
41268 Sep 22 23:15:08.937 INFO [2] received reconcile message
41269 Sep 22 23:15:08.937 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(271), op: ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41270 Sep 22 23:15:08.937 INFO [2] client ExtentReopen { repair_id: ReconciliationId(271), extent_id: 54 }
41271 Sep 22 23:15:08.937 DEBG 271 Reopen extent 54
41272 Sep 22 23:15:08.938 DEBG 271 Reopen extent 54
41273 Sep 22 23:15:08.939 DEBG 271 Reopen extent 54
41274 Sep 22 23:15:08.939 DEBG [2] It's time to notify for 271
41275 Sep 22 23:15:08.939 INFO Completion from [2] id:271 status:true
41276 Sep 22 23:15:08.939 INFO [272/752] Repair commands completed
41277 Sep 22 23:15:08.939 INFO Pop front: ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41278 Sep 22 23:15:08.939 INFO Sent repair work, now wait for resp
41279 Sep 22 23:15:08.939 INFO [0] received reconcile message
41280 Sep 22 23:15:08.939 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41281 Sep 22 23:15:08.939 INFO [0] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41282 Sep 22 23:15:08.939 INFO [1] received reconcile message
41283 Sep 22 23:15:08.940 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41284 Sep 22 23:15:08.940 INFO [1] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41285 Sep 22 23:15:08.940 INFO [2] received reconcile message
41286 Sep 22 23:15:08.940 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(272), op: ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41287 Sep 22 23:15:08.940 INFO [2] client ExtentFlush { repair_id: ReconciliationId(272), extent_id: 58, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41288 Sep 22 23:15:08.940 DEBG 272 Flush extent 58 with f:2 g:2
41289 Sep 22 23:15:08.940 DEBG Flush just extent 58 with f:2 and g:2
41290 Sep 22 23:15:08.940 DEBG [1] It's time to notify for 272
41291 Sep 22 23:15:08.940 INFO Completion from [1] id:272 status:true
41292 Sep 22 23:15:08.940 INFO [273/752] Repair commands completed
41293 Sep 22 23:15:08.940 INFO Pop front: ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }, state: ClientData([New, New, New]) }
41294 Sep 22 23:15:08.940 INFO Sent repair work, now wait for resp
41295 Sep 22 23:15:08.940 INFO [0] received reconcile message
41296 Sep 22 23:15:08.940 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }, state: ClientData([InProgress, New, New]) }, : downstairs
41297 Sep 22 23:15:08.940 INFO [0] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }
41298 Sep 22 23:15:08.940 INFO [1] received reconcile message
41299 Sep 22 23:15:08.940 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41300 Sep 22 23:15:08.940 INFO [1] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }
41301 Sep 22 23:15:08.940 INFO [2] received reconcile message
41302 Sep 22 23:15:08.940 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(273), op: ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41303 Sep 22 23:15:08.940 INFO [2] client ExtentClose { repair_id: ReconciliationId(273), extent_id: 58 }
41304 Sep 22 23:15:08.940 DEBG 273 Close extent 58
41305 Sep 22 23:15:08.941 DEBG 273 Close extent 58
41306 Sep 22 23:15:08.941 DEBG 273 Close extent 58
41307 Sep 22 23:15:08.941 DEBG [2] It's time to notify for 273
41308 Sep 22 23:15:08.941 INFO Completion from [2] id:273 status:true
41309 Sep 22 23:15:08.941 INFO [274/752] Repair commands completed
41310 Sep 22 23:15:08.941 INFO Pop front: ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41311 Sep 22 23:15:08.941 INFO Sent repair work, now wait for resp
41312 Sep 22 23:15:08.941 INFO [0] received reconcile message
41313 Sep 22 23:15:08.941 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41314 Sep 22 23:15:08.941 INFO [0] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41315 Sep 22 23:15:08.942 INFO [0] Sending repair request ReconciliationId(274)
41316 Sep 22 23:15:08.942 INFO [1] received reconcile message
41317 Sep 22 23:15:08.942 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41318 Sep 22 23:15:08.942 INFO [1] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41319 Sep 22 23:15:08.942 INFO [1] No action required ReconciliationId(274)
41320 Sep 22 23:15:08.942 INFO [2] received reconcile message
41321 Sep 22 23:15:08.942 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(274), op: ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41322 Sep 22 23:15:08.942 INFO [2] client ExtentRepair { repair_id: ReconciliationId(274), extent_id: 58, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41323 Sep 22 23:15:08.942 INFO [2] No action required ReconciliationId(274)
41324 Sep 22 23:15:08.942 DEBG 274 Repair extent 58 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41325 Sep 22 23:15:08.942 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03A.copy"
41326 Sep 22 23:15:09.006 DEBG [rc] retire 1079 clears [JobId(1078), JobId(1079)], : downstairs
41327 Sep 22 23:15:09.007 DEBG IO Flush 1081 has deps [JobId(1080)]
41328 Sep 22 23:15:09.007 INFO [lossy] skipping 1080
41329 Sep 22 23:15:09.007 INFO [lossy] skipping 1080
41330 Sep 22 23:15:09.007 INFO [lossy] skipping 1080
41331 Sep 22 23:15:09.007 INFO accepted connection, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41332 Sep 22 23:15:09.007 TRCE incoming request, uri: /extent/58/files, method: GET, req_id: 18219db5-28e5-4163-ae6f-aa8ec777c36e, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41333 Sep 22 23:15:09.008 INFO request completed, latency_us: 260, response_code: 200, uri: /extent/58/files, method: GET, req_id: 18219db5-28e5-4163-ae6f-aa8ec777c36e, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41334 Sep 22 23:15:09.008 INFO eid:58 Found repair files: ["03A", "03A.db"]
41335 Sep 22 23:15:09.008 TRCE incoming request, uri: /newextent/58/data, method: GET, req_id: ba6329a9-77a3-4181-b2a6-8658cff93bf9, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41336 Sep 22 23:15:09.009 INFO request completed, latency_us: 360, response_code: 200, uri: /newextent/58/data, method: GET, req_id: ba6329a9-77a3-4181-b2a6-8658cff93bf9, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41337 Sep 22 23:15:09.013 DEBG Read :1080 deps:[JobId(1079)] res:true
41338 Sep 22 23:15:09.014 TRCE incoming request, uri: /newextent/58/db, method: GET, req_id: 889bebd9-31e2-4d74-8651-714e3d49581e, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41339 Sep 22 23:15:09.014 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/58/db, method: GET, req_id: 889bebd9-31e2-4d74-8651-714e3d49581e, remote_addr: 127.0.0.1:59871, local_addr: 127.0.0.1:46213, task: repair
41340 Sep 22 23:15:09.015 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03A.replace"
41341 Sep 22 23:15:09.015 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41342 Sep 22 23:15:09.016 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41343 Sep 22 23:15:09.017 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03A"
41344 Sep 22 23:15:09.017 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03A.db"
41345 Sep 22 23:15:09.017 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41346 Sep 22 23:15:09.017 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03A.completed"
41347 Sep 22 23:15:09.017 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41348 Sep 22 23:15:09.017 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41349 Sep 22 23:15:09.017 DEBG [0] It's time to notify for 274
41350 Sep 22 23:15:09.017 INFO Completion from [0] id:274 status:true
41351 Sep 22 23:15:09.017 INFO [275/752] Repair commands completed
41352 Sep 22 23:15:09.017 INFO Pop front: ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }, state: ClientData([New, New, New]) }
41353 Sep 22 23:15:09.017 INFO Sent repair work, now wait for resp
41354 Sep 22 23:15:09.017 INFO [0] received reconcile message
41355 Sep 22 23:15:09.017 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }, state: ClientData([InProgress, New, New]) }, : downstairs
41356 Sep 22 23:15:09.017 INFO [0] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }
41357 Sep 22 23:15:09.017 INFO [1] received reconcile message
41358 Sep 22 23:15:09.017 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41359 Sep 22 23:15:09.017 INFO [1] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }
41360 Sep 22 23:15:09.018 INFO [2] received reconcile message
41361 Sep 22 23:15:09.018 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(275), op: ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41362 Sep 22 23:15:09.018 INFO [2] client ExtentReopen { repair_id: ReconciliationId(275), extent_id: 58 }
41363 Sep 22 23:15:09.018 DEBG 275 Reopen extent 58
41364 Sep 22 23:15:09.018 DEBG 275 Reopen extent 58
41365 Sep 22 23:15:09.019 DEBG 275 Reopen extent 58
41366 Sep 22 23:15:09.020 DEBG [2] It's time to notify for 275
41367 Sep 22 23:15:09.020 INFO Completion from [2] id:275 status:true
41368 Sep 22 23:15:09.020 INFO [276/752] Repair commands completed
41369 Sep 22 23:15:09.020 INFO Pop front: ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41370 Sep 22 23:15:09.020 INFO Sent repair work, now wait for resp
41371 Sep 22 23:15:09.020 INFO [0] received reconcile message
41372 Sep 22 23:15:09.020 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41373 Sep 22 23:15:09.020 INFO [0] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41374 Sep 22 23:15:09.020 INFO [1] received reconcile message
41375 Sep 22 23:15:09.020 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41376 Sep 22 23:15:09.020 INFO [1] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41377 Sep 22 23:15:09.020 INFO [2] received reconcile message
41378 Sep 22 23:15:09.020 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(276), op: ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41379 Sep 22 23:15:09.020 INFO [2] client ExtentFlush { repair_id: ReconciliationId(276), extent_id: 18, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41380 Sep 22 23:15:09.020 DEBG 276 Flush extent 18 with f:2 g:2
41381 Sep 22 23:15:09.020 DEBG Flush just extent 18 with f:2 and g:2
41382 Sep 22 23:15:09.020 DEBG [1] It's time to notify for 276
41383 Sep 22 23:15:09.020 INFO Completion from [1] id:276 status:true
41384 Sep 22 23:15:09.020 INFO [277/752] Repair commands completed
41385 Sep 22 23:15:09.020 INFO Pop front: ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }, state: ClientData([New, New, New]) }
41386 Sep 22 23:15:09.020 INFO Sent repair work, now wait for resp
41387 Sep 22 23:15:09.020 INFO [0] received reconcile message
41388 Sep 22 23:15:09.020 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }, state: ClientData([InProgress, New, New]) }, : downstairs
41389 Sep 22 23:15:09.020 INFO [0] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }
41390 Sep 22 23:15:09.020 INFO [1] received reconcile message
41391 Sep 22 23:15:09.020 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41392 Sep 22 23:15:09.020 INFO [1] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }
41393 Sep 22 23:15:09.020 INFO [2] received reconcile message
41394 Sep 22 23:15:09.020 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(277), op: ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41395 Sep 22 23:15:09.020 INFO [2] client ExtentClose { repair_id: ReconciliationId(277), extent_id: 18 }
41396 Sep 22 23:15:09.021 DEBG 277 Close extent 18
41397 Sep 22 23:15:09.021 DEBG 277 Close extent 18
41398 Sep 22 23:15:09.021 DEBG 277 Close extent 18
41399 Sep 22 23:15:09.022 DEBG [2] It's time to notify for 277
41400 Sep 22 23:15:09.022 INFO Completion from [2] id:277 status:true
41401 Sep 22 23:15:09.022 INFO [278/752] Repair commands completed
41402 Sep 22 23:15:09.022 INFO Pop front: ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41403 Sep 22 23:15:09.022 INFO Sent repair work, now wait for resp
41404 Sep 22 23:15:09.022 INFO [0] received reconcile message
41405 Sep 22 23:15:09.022 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41406 Sep 22 23:15:09.022 INFO [0] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41407 Sep 22 23:15:09.022 INFO [0] Sending repair request ReconciliationId(278)
41408 Sep 22 23:15:09.022 INFO [1] received reconcile message
41409 Sep 22 23:15:09.022 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41410 Sep 22 23:15:09.022 INFO [1] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41411 Sep 22 23:15:09.022 INFO [1] No action required ReconciliationId(278)
41412 Sep 22 23:15:09.022 INFO [2] received reconcile message
41413 Sep 22 23:15:09.022 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(278), op: ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41414 Sep 22 23:15:09.022 INFO [2] client ExtentRepair { repair_id: ReconciliationId(278), extent_id: 18, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41415 Sep 22 23:15:09.022 INFO [2] No action required ReconciliationId(278)
41416 Sep 22 23:15:09.022 DEBG 278 Repair extent 18 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41417 Sep 22 23:15:09.022 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/012.copy"
41418 Sep 22 23:15:09.041 DEBG Read :1080 deps:[JobId(1079)] res:true
41419 Sep 22 23:15:09.065 DEBG Flush :1081 extent_limit None deps:[JobId(1080)] res:true f:30 g:1
41420 Sep 22 23:15:09.066 INFO [lossy] sleeping 1 second
41421 Sep 22 23:15:09.086 INFO accepted connection, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41422 Sep 22 23:15:09.086 TRCE incoming request, uri: /extent/18/files, method: GET, req_id: 35996c0e-b820-4ceb-9a8c-30ee8c71fd40, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41423 Sep 22 23:15:09.086 INFO request completed, latency_us: 258, response_code: 200, uri: /extent/18/files, method: GET, req_id: 35996c0e-b820-4ceb-9a8c-30ee8c71fd40, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41424 Sep 22 23:15:09.087 INFO eid:18 Found repair files: ["012", "012.db"]
41425 Sep 22 23:15:09.087 TRCE incoming request, uri: /newextent/18/data, method: GET, req_id: 6d31bf7d-4b15-4e1f-a8c9-1513d1dbc2b2, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41426 Sep 22 23:15:09.087 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/18/data, method: GET, req_id: 6d31bf7d-4b15-4e1f-a8c9-1513d1dbc2b2, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41427 Sep 22 23:15:09.092 TRCE incoming request, uri: /newextent/18/db, method: GET, req_id: 6351641e-f6ab-4547-838b-56b8e37a6d52, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41428 Sep 22 23:15:09.093 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/18/db, method: GET, req_id: 6351641e-f6ab-4547-838b-56b8e37a6d52, remote_addr: 127.0.0.1:63513, local_addr: 127.0.0.1:46213, task: repair
41429 Sep 22 23:15:09.094 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/012.copy" to "/tmp/downstairs-vrx8aK6L/00/000/012.replace"
41430 Sep 22 23:15:09.094 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41431 Sep 22 23:15:09.095 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/012.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41432 Sep 22 23:15:09.095 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/012"
41433 Sep 22 23:15:09.095 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/012.db"
41434 Sep 22 23:15:09.095 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41435 Sep 22 23:15:09.095 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/012.replace" to "/tmp/downstairs-vrx8aK6L/00/000/012.completed"
41436 Sep 22 23:15:09.095 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41437 Sep 22 23:15:09.095 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41438 Sep 22 23:15:09.096 DEBG [0] It's time to notify for 278
41439 Sep 22 23:15:09.096 INFO Completion from [0] id:278 status:true
41440 Sep 22 23:15:09.096 INFO [279/752] Repair commands completed
41441 Sep 22 23:15:09.096 INFO Pop front: ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }, state: ClientData([New, New, New]) }
41442 Sep 22 23:15:09.096 INFO Sent repair work, now wait for resp
41443 Sep 22 23:15:09.096 INFO [0] received reconcile message
41444 Sep 22 23:15:09.096 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }, state: ClientData([InProgress, New, New]) }, : downstairs
41445 Sep 22 23:15:09.096 INFO [0] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }
41446 Sep 22 23:15:09.096 INFO [1] received reconcile message
41447 Sep 22 23:15:09.096 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41448 Sep 22 23:15:09.096 INFO [1] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }
41449 Sep 22 23:15:09.096 INFO [2] received reconcile message
41450 Sep 22 23:15:09.096 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(279), op: ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41451 Sep 22 23:15:09.096 INFO [2] client ExtentReopen { repair_id: ReconciliationId(279), extent_id: 18 }
41452 Sep 22 23:15:09.096 DEBG 279 Reopen extent 18
41453 Sep 22 23:15:09.097 DEBG 279 Reopen extent 18
41454 Sep 22 23:15:09.097 DEBG 279 Reopen extent 18
41455 Sep 22 23:15:09.098 DEBG [2] It's time to notify for 279
41456 Sep 22 23:15:09.098 INFO Completion from [2] id:279 status:true
41457 Sep 22 23:15:09.098 INFO [280/752] Repair commands completed
41458 Sep 22 23:15:09.098 INFO Pop front: ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41459 Sep 22 23:15:09.098 INFO Sent repair work, now wait for resp
41460 Sep 22 23:15:09.098 INFO [0] received reconcile message
41461 Sep 22 23:15:09.098 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41462 Sep 22 23:15:09.098 INFO [0] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41463 Sep 22 23:15:09.098 INFO [1] received reconcile message
41464 Sep 22 23:15:09.098 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41465 Sep 22 23:15:09.098 INFO [1] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41466 Sep 22 23:15:09.098 INFO [2] received reconcile message
41467 Sep 22 23:15:09.098 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(280), op: ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41468 Sep 22 23:15:09.098 INFO [2] client ExtentFlush { repair_id: ReconciliationId(280), extent_id: 138, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41469 Sep 22 23:15:09.098 DEBG 280 Flush extent 138 with f:2 g:2
41470 Sep 22 23:15:09.098 DEBG Flush just extent 138 with f:2 and g:2
41471 Sep 22 23:15:09.099 DEBG [1] It's time to notify for 280
41472 Sep 22 23:15:09.099 INFO Completion from [1] id:280 status:true
41473 Sep 22 23:15:09.099 INFO [281/752] Repair commands completed
41474 Sep 22 23:15:09.099 INFO Pop front: ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }, state: ClientData([New, New, New]) }
41475 Sep 22 23:15:09.099 INFO Sent repair work, now wait for resp
41476 Sep 22 23:15:09.099 INFO [0] received reconcile message
41477 Sep 22 23:15:09.099 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }, state: ClientData([InProgress, New, New]) }, : downstairs
41478 Sep 22 23:15:09.099 INFO [0] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }
41479 Sep 22 23:15:09.099 INFO [1] received reconcile message
41480 Sep 22 23:15:09.099 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41481 Sep 22 23:15:09.099 INFO [1] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }
41482 Sep 22 23:15:09.099 INFO [2] received reconcile message
41483 Sep 22 23:15:09.099 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(281), op: ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41484 Sep 22 23:15:09.099 INFO [2] client ExtentClose { repair_id: ReconciliationId(281), extent_id: 138 }
41485 Sep 22 23:15:09.099 DEBG 281 Close extent 138
41486 Sep 22 23:15:09.099 DEBG 281 Close extent 138
41487 Sep 22 23:15:09.100 DEBG 281 Close extent 138
41488 Sep 22 23:15:09.100 DEBG [2] It's time to notify for 281
41489 Sep 22 23:15:09.100 INFO Completion from [2] id:281 status:true
41490 Sep 22 23:15:09.100 INFO [282/752] Repair commands completed
41491 Sep 22 23:15:09.100 INFO Pop front: ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41492 Sep 22 23:15:09.100 INFO Sent repair work, now wait for resp
41493 Sep 22 23:15:09.100 INFO [0] received reconcile message
41494 Sep 22 23:15:09.100 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41495 Sep 22 23:15:09.100 INFO [0] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41496 Sep 22 23:15:09.100 INFO [0] Sending repair request ReconciliationId(282)
41497 Sep 22 23:15:09.100 INFO [1] received reconcile message
41498 Sep 22 23:15:09.100 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41499 Sep 22 23:15:09.100 INFO [1] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41500 Sep 22 23:15:09.100 INFO [1] No action required ReconciliationId(282)
41501 Sep 22 23:15:09.100 INFO [2] received reconcile message
41502 Sep 22 23:15:09.100 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(282), op: ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41503 Sep 22 23:15:09.100 INFO [2] client ExtentRepair { repair_id: ReconciliationId(282), extent_id: 138, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41504 Sep 22 23:15:09.100 INFO [2] No action required ReconciliationId(282)
41505 Sep 22 23:15:09.100 DEBG 282 Repair extent 138 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41506 Sep 22 23:15:09.101 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08A.copy"
41507 Sep 22 23:15:09.162 INFO accepted connection, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41508 Sep 22 23:15:09.163 TRCE incoming request, uri: /extent/138/files, method: GET, req_id: 78543559-ffd9-4cf2-9a57-38d01d59d88a, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41509 Sep 22 23:15:09.163 INFO request completed, latency_us: 243, response_code: 200, uri: /extent/138/files, method: GET, req_id: 78543559-ffd9-4cf2-9a57-38d01d59d88a, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41510 Sep 22 23:15:09.163 INFO eid:138 Found repair files: ["08A", "08A.db"]
41511 Sep 22 23:15:09.164 TRCE incoming request, uri: /newextent/138/data, method: GET, req_id: fcc43585-6ab5-403c-b2ae-0fa763745671, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41512 Sep 22 23:15:09.164 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/138/data, method: GET, req_id: fcc43585-6ab5-403c-b2ae-0fa763745671, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41513 Sep 22 23:15:09.169 TRCE incoming request, uri: /newextent/138/db, method: GET, req_id: 0e7e9c0f-6b76-423f-84f4-bb8939bf22b9, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41514 Sep 22 23:15:09.169 INFO request completed, latency_us: 290, response_code: 200, uri: /newextent/138/db, method: GET, req_id: 0e7e9c0f-6b76-423f-84f4-bb8939bf22b9, remote_addr: 127.0.0.1:39632, local_addr: 127.0.0.1:46213, task: repair
41515 Sep 22 23:15:09.171 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08A.replace"
41516 Sep 22 23:15:09.171 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41517 Sep 22 23:15:09.171 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41518 Sep 22 23:15:09.172 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08A"
41519 Sep 22 23:15:09.172 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08A.db"
41520 Sep 22 23:15:09.172 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41521 Sep 22 23:15:09.172 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08A.completed"
41522 Sep 22 23:15:09.172 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41523 Sep 22 23:15:09.172 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41524 Sep 22 23:15:09.172 DEBG [0] It's time to notify for 282
41525 Sep 22 23:15:09.172 INFO Completion from [0] id:282 status:true
41526 Sep 22 23:15:09.172 INFO [283/752] Repair commands completed
41527 Sep 22 23:15:09.172 INFO Pop front: ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }, state: ClientData([New, New, New]) }
41528 Sep 22 23:15:09.172 INFO Sent repair work, now wait for resp
41529 Sep 22 23:15:09.172 INFO [0] received reconcile message
41530 Sep 22 23:15:09.172 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }, state: ClientData([InProgress, New, New]) }, : downstairs
41531 Sep 22 23:15:09.172 INFO [0] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }
41532 Sep 22 23:15:09.172 INFO [1] received reconcile message
41533 Sep 22 23:15:09.172 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41534 Sep 22 23:15:09.172 INFO [1] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }
41535 Sep 22 23:15:09.173 INFO [2] received reconcile message
41536 Sep 22 23:15:09.173 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(283), op: ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41537 Sep 22 23:15:09.173 INFO [2] client ExtentReopen { repair_id: ReconciliationId(283), extent_id: 138 }
41538 Sep 22 23:15:09.173 DEBG 283 Reopen extent 138
41539 Sep 22 23:15:09.173 DEBG 283 Reopen extent 138
41540 Sep 22 23:15:09.174 DEBG 283 Reopen extent 138
41541 Sep 22 23:15:09.175 DEBG [2] It's time to notify for 283
41542 Sep 22 23:15:09.175 INFO Completion from [2] id:283 status:true
41543 Sep 22 23:15:09.175 INFO [284/752] Repair commands completed
41544 Sep 22 23:15:09.175 INFO Pop front: ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41545 Sep 22 23:15:09.175 INFO Sent repair work, now wait for resp
41546 Sep 22 23:15:09.175 INFO [0] received reconcile message
41547 Sep 22 23:15:09.175 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41548 Sep 22 23:15:09.175 INFO [0] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41549 Sep 22 23:15:09.175 INFO [1] received reconcile message
41550 Sep 22 23:15:09.175 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41551 Sep 22 23:15:09.175 INFO [1] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41552 Sep 22 23:15:09.175 INFO [2] received reconcile message
41553 Sep 22 23:15:09.175 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(284), op: ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41554 Sep 22 23:15:09.175 INFO [2] client ExtentFlush { repair_id: ReconciliationId(284), extent_id: 100, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41555 Sep 22 23:15:09.175 DEBG 284 Flush extent 100 with f:2 g:2
41556 Sep 22 23:15:09.175 DEBG Flush just extent 100 with f:2 and g:2
41557 Sep 22 23:15:09.175 DEBG [1] It's time to notify for 284
41558 Sep 22 23:15:09.175 INFO Completion from [1] id:284 status:true
41559 Sep 22 23:15:09.175 INFO [285/752] Repair commands completed
41560 Sep 22 23:15:09.175 INFO Pop front: ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }, state: ClientData([New, New, New]) }
41561 Sep 22 23:15:09.175 INFO Sent repair work, now wait for resp
41562 Sep 22 23:15:09.175 INFO [0] received reconcile message
41563 Sep 22 23:15:09.175 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }, state: ClientData([InProgress, New, New]) }, : downstairs
41564 Sep 22 23:15:09.175 INFO [0] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }
41565 Sep 22 23:15:09.175 INFO [1] received reconcile message
41566 Sep 22 23:15:09.175 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41567 Sep 22 23:15:09.175 INFO [1] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }
41568 Sep 22 23:15:09.175 INFO [2] received reconcile message
41569 Sep 22 23:15:09.175 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(285), op: ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41570 Sep 22 23:15:09.175 INFO [2] client ExtentClose { repair_id: ReconciliationId(285), extent_id: 100 }
41571 Sep 22 23:15:09.176 DEBG 285 Close extent 100
41572 Sep 22 23:15:09.176 DEBG 285 Close extent 100
41573 Sep 22 23:15:09.176 DEBG 285 Close extent 100
41574 Sep 22 23:15:09.177 DEBG [2] It's time to notify for 285
41575 Sep 22 23:15:09.177 INFO Completion from [2] id:285 status:true
41576 Sep 22 23:15:09.177 INFO [286/752] Repair commands completed
41577 Sep 22 23:15:09.177 INFO Pop front: ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41578 Sep 22 23:15:09.177 INFO Sent repair work, now wait for resp
41579 Sep 22 23:15:09.177 INFO [0] received reconcile message
41580 Sep 22 23:15:09.177 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41581 Sep 22 23:15:09.177 INFO [0] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41582 Sep 22 23:15:09.177 INFO [0] Sending repair request ReconciliationId(286)
41583 Sep 22 23:15:09.177 INFO [1] received reconcile message
41584 Sep 22 23:15:09.177 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41585 Sep 22 23:15:09.177 INFO [1] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41586 Sep 22 23:15:09.177 INFO [1] No action required ReconciliationId(286)
41587 Sep 22 23:15:09.177 INFO [2] received reconcile message
41588 Sep 22 23:15:09.177 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(286), op: ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41589 Sep 22 23:15:09.177 INFO [2] client ExtentRepair { repair_id: ReconciliationId(286), extent_id: 100, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41590 Sep 22 23:15:09.177 INFO [2] No action required ReconciliationId(286)
41591 Sep 22 23:15:09.177 DEBG 286 Repair extent 100 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41592 Sep 22 23:15:09.177 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/064.copy"
41593 Sep 22 23:15:09.241 INFO accepted connection, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41594 Sep 22 23:15:09.241 TRCE incoming request, uri: /extent/100/files, method: GET, req_id: 40a0c71e-9925-402b-b559-c95aef17a84e, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41595 Sep 22 23:15:09.241 INFO request completed, latency_us: 225, response_code: 200, uri: /extent/100/files, method: GET, req_id: 40a0c71e-9925-402b-b559-c95aef17a84e, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41596 Sep 22 23:15:09.241 INFO eid:100 Found repair files: ["064", "064.db"]
41597 Sep 22 23:15:09.242 TRCE incoming request, uri: /newextent/100/data, method: GET, req_id: 624548a2-bdae-4018-b3d0-846abe0e89ec, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41598 Sep 22 23:15:09.242 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/100/data, method: GET, req_id: 624548a2-bdae-4018-b3d0-846abe0e89ec, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41599 Sep 22 23:15:09.247 TRCE incoming request, uri: /newextent/100/db, method: GET, req_id: 1e277602-6ad6-49af-9002-c37040960090, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41600 Sep 22 23:15:09.248 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/100/db, method: GET, req_id: 1e277602-6ad6-49af-9002-c37040960090, remote_addr: 127.0.0.1:43266, local_addr: 127.0.0.1:46213, task: repair
41601 Sep 22 23:15:09.249 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/064.copy" to "/tmp/downstairs-vrx8aK6L/00/000/064.replace"
41602 Sep 22 23:15:09.249 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41603 Sep 22 23:15:09.250 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/064.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41604 Sep 22 23:15:09.250 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/064"
41605 Sep 22 23:15:09.250 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/064.db"
41606 Sep 22 23:15:09.250 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41607 Sep 22 23:15:09.250 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/064.replace" to "/tmp/downstairs-vrx8aK6L/00/000/064.completed"
41608 Sep 22 23:15:09.250 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41609 Sep 22 23:15:09.250 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41610 Sep 22 23:15:09.250 DEBG [0] It's time to notify for 286
41611 Sep 22 23:15:09.250 INFO Completion from [0] id:286 status:true
41612 Sep 22 23:15:09.250 INFO [287/752] Repair commands completed
41613 Sep 22 23:15:09.250 INFO Pop front: ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }, state: ClientData([New, New, New]) }
41614 Sep 22 23:15:09.251 INFO Sent repair work, now wait for resp
41615 Sep 22 23:15:09.251 INFO [0] received reconcile message
41616 Sep 22 23:15:09.251 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }, state: ClientData([InProgress, New, New]) }, : downstairs
41617 Sep 22 23:15:09.251 INFO [0] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }
41618 Sep 22 23:15:09.251 INFO [1] received reconcile message
41619 Sep 22 23:15:09.251 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41620 Sep 22 23:15:09.251 INFO [1] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }
41621 Sep 22 23:15:09.251 INFO [2] received reconcile message
41622 Sep 22 23:15:09.251 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(287), op: ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41623 Sep 22 23:15:09.251 INFO [2] client ExtentReopen { repair_id: ReconciliationId(287), extent_id: 100 }
41624 Sep 22 23:15:09.251 DEBG 287 Reopen extent 100
41625 Sep 22 23:15:09.252 DEBG 287 Reopen extent 100
41626 Sep 22 23:15:09.252 DEBG 287 Reopen extent 100
41627 Sep 22 23:15:09.253 DEBG [2] It's time to notify for 287
41628 Sep 22 23:15:09.253 INFO Completion from [2] id:287 status:true
41629 Sep 22 23:15:09.253 INFO [288/752] Repair commands completed
41630 Sep 22 23:15:09.253 INFO Pop front: ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41631 Sep 22 23:15:09.253 INFO Sent repair work, now wait for resp
41632 Sep 22 23:15:09.253 INFO [0] received reconcile message
41633 Sep 22 23:15:09.253 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41634 Sep 22 23:15:09.253 INFO [0] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41635 Sep 22 23:15:09.253 INFO [1] received reconcile message
41636 Sep 22 23:15:09.253 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41637 Sep 22 23:15:09.253 INFO [1] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41638 Sep 22 23:15:09.253 INFO [2] received reconcile message
41639 Sep 22 23:15:09.253 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(288), op: ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41640 Sep 22 23:15:09.253 INFO [2] client ExtentFlush { repair_id: ReconciliationId(288), extent_id: 123, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41641 Sep 22 23:15:09.253 DEBG 288 Flush extent 123 with f:2 g:2
41642 Sep 22 23:15:09.253 DEBG Flush just extent 123 with f:2 and g:2
41643 Sep 22 23:15:09.253 DEBG [1] It's time to notify for 288
41644 Sep 22 23:15:09.253 INFO Completion from [1] id:288 status:true
41645 Sep 22 23:15:09.253 INFO [289/752] Repair commands completed
41646 Sep 22 23:15:09.253 INFO Pop front: ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }, state: ClientData([New, New, New]) }
41647 Sep 22 23:15:09.253 INFO Sent repair work, now wait for resp
41648 Sep 22 23:15:09.253 INFO [0] received reconcile message
41649 Sep 22 23:15:09.253 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }, state: ClientData([InProgress, New, New]) }, : downstairs
41650 Sep 22 23:15:09.253 INFO [0] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }
41651 Sep 22 23:15:09.253 INFO [1] received reconcile message
41652 Sep 22 23:15:09.254 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41653 Sep 22 23:15:09.254 INFO [1] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }
41654 Sep 22 23:15:09.254 INFO [2] received reconcile message
41655 Sep 22 23:15:09.254 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(289), op: ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41656 Sep 22 23:15:09.254 INFO [2] client ExtentClose { repair_id: ReconciliationId(289), extent_id: 123 }
41657 Sep 22 23:15:09.254 DEBG 289 Close extent 123
41658 Sep 22 23:15:09.254 DEBG 289 Close extent 123
41659 Sep 22 23:15:09.254 DEBG 289 Close extent 123
41660 Sep 22 23:15:09.255 DEBG [2] It's time to notify for 289
41661 Sep 22 23:15:09.255 INFO Completion from [2] id:289 status:true
41662 Sep 22 23:15:09.255 INFO [290/752] Repair commands completed
41663 Sep 22 23:15:09.255 INFO Pop front: ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41664 Sep 22 23:15:09.255 INFO Sent repair work, now wait for resp
41665 Sep 22 23:15:09.255 INFO [0] received reconcile message
41666 Sep 22 23:15:09.255 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41667 Sep 22 23:15:09.255 INFO [0] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41668 Sep 22 23:15:09.255 INFO [0] Sending repair request ReconciliationId(290)
41669 Sep 22 23:15:09.255 INFO [1] received reconcile message
41670 Sep 22 23:15:09.255 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41671 Sep 22 23:15:09.255 INFO [1] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41672 Sep 22 23:15:09.255 INFO [1] No action required ReconciliationId(290)
41673 Sep 22 23:15:09.255 INFO [2] received reconcile message
41674 Sep 22 23:15:09.255 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(290), op: ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41675 Sep 22 23:15:09.255 INFO [2] client ExtentRepair { repair_id: ReconciliationId(290), extent_id: 123, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41676 Sep 22 23:15:09.255 INFO [2] No action required ReconciliationId(290)
41677 Sep 22 23:15:09.255 DEBG 290 Repair extent 123 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41678 Sep 22 23:15:09.255 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07B.copy"
41679 Sep 22 23:15:09.319 INFO accepted connection, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41680 Sep 22 23:15:09.319 TRCE incoming request, uri: /extent/123/files, method: GET, req_id: 2ca770b5-e4b4-44f4-a8f4-0935e3cca897, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41681 Sep 22 23:15:09.319 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/123/files, method: GET, req_id: 2ca770b5-e4b4-44f4-a8f4-0935e3cca897, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41682 Sep 22 23:15:09.319 INFO eid:123 Found repair files: ["07B", "07B.db"]
41683 Sep 22 23:15:09.320 TRCE incoming request, uri: /newextent/123/data, method: GET, req_id: 219edfbc-7f84-4c0f-b1ec-9607a7ae1d20, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41684 Sep 22 23:15:09.320 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/123/data, method: GET, req_id: 219edfbc-7f84-4c0f-b1ec-9607a7ae1d20, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41685 Sep 22 23:15:09.325 TRCE incoming request, uri: /newextent/123/db, method: GET, req_id: 1478200b-e0ae-486b-ace8-26204d330287, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41686 Sep 22 23:15:09.325 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/123/db, method: GET, req_id: 1478200b-e0ae-486b-ace8-26204d330287, remote_addr: 127.0.0.1:57416, local_addr: 127.0.0.1:46213, task: repair
41687 Sep 22 23:15:09.327 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07B.replace"
41688 Sep 22 23:15:09.327 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41689 Sep 22 23:15:09.327 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41690 Sep 22 23:15:09.328 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07B"
41691 Sep 22 23:15:09.328 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07B.db"
41692 Sep 22 23:15:09.328 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41693 Sep 22 23:15:09.328 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07B.completed"
41694 Sep 22 23:15:09.328 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41695 Sep 22 23:15:09.328 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41696 Sep 22 23:15:09.328 DEBG [0] It's time to notify for 290
41697 Sep 22 23:15:09.328 INFO Completion from [0] id:290 status:true
41698 Sep 22 23:15:09.328 INFO [291/752] Repair commands completed
41699 Sep 22 23:15:09.328 INFO Pop front: ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }, state: ClientData([New, New, New]) }
41700 Sep 22 23:15:09.328 INFO Sent repair work, now wait for resp
41701 Sep 22 23:15:09.328 INFO [0] received reconcile message
41702 Sep 22 23:15:09.328 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }, state: ClientData([InProgress, New, New]) }, : downstairs
41703 Sep 22 23:15:09.328 INFO [0] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }
41704 Sep 22 23:15:09.328 INFO [1] received reconcile message
41705 Sep 22 23:15:09.328 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41706 Sep 22 23:15:09.328 INFO [1] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }
41707 Sep 22 23:15:09.328 INFO [2] received reconcile message
41708 Sep 22 23:15:09.329 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(291), op: ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41709 Sep 22 23:15:09.329 INFO [2] client ExtentReopen { repair_id: ReconciliationId(291), extent_id: 123 }
41710 Sep 22 23:15:09.329 DEBG 291 Reopen extent 123
41711 Sep 22 23:15:09.329 DEBG 291 Reopen extent 123
41712 Sep 22 23:15:09.330 DEBG 291 Reopen extent 123
41713 Sep 22 23:15:09.330 DEBG [2] It's time to notify for 291
41714 Sep 22 23:15:09.330 INFO Completion from [2] id:291 status:true
41715 Sep 22 23:15:09.330 INFO [292/752] Repair commands completed
41716 Sep 22 23:15:09.330 INFO Pop front: ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41717 Sep 22 23:15:09.330 INFO Sent repair work, now wait for resp
41718 Sep 22 23:15:09.330 INFO [0] received reconcile message
41719 Sep 22 23:15:09.330 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41720 Sep 22 23:15:09.331 INFO [0] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41721 Sep 22 23:15:09.331 INFO [1] received reconcile message
41722 Sep 22 23:15:09.331 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41723 Sep 22 23:15:09.331 INFO [1] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41724 Sep 22 23:15:09.331 INFO [2] received reconcile message
41725 Sep 22 23:15:09.331 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(292), op: ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41726 Sep 22 23:15:09.331 INFO [2] client ExtentFlush { repair_id: ReconciliationId(292), extent_id: 96, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41727 Sep 22 23:15:09.331 DEBG 292 Flush extent 96 with f:2 g:2
41728 Sep 22 23:15:09.331 DEBG Flush just extent 96 with f:2 and g:2
41729 Sep 22 23:15:09.331 DEBG [1] It's time to notify for 292
41730 Sep 22 23:15:09.331 INFO Completion from [1] id:292 status:true
41731 Sep 22 23:15:09.331 INFO [293/752] Repair commands completed
41732 Sep 22 23:15:09.331 INFO Pop front: ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }, state: ClientData([New, New, New]) }
41733 Sep 22 23:15:09.331 INFO Sent repair work, now wait for resp
41734 Sep 22 23:15:09.331 INFO [0] received reconcile message
41735 Sep 22 23:15:09.331 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }, state: ClientData([InProgress, New, New]) }, : downstairs
41736 Sep 22 23:15:09.331 INFO [0] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }
41737 Sep 22 23:15:09.331 INFO [1] received reconcile message
41738 Sep 22 23:15:09.331 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41739 Sep 22 23:15:09.331 INFO [1] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }
41740 Sep 22 23:15:09.331 INFO [2] received reconcile message
41741 Sep 22 23:15:09.331 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(293), op: ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41742 Sep 22 23:15:09.331 INFO [2] client ExtentClose { repair_id: ReconciliationId(293), extent_id: 96 }
41743 Sep 22 23:15:09.331 DEBG 293 Close extent 96
41744 Sep 22 23:15:09.332 DEBG 293 Close extent 96
41745 Sep 22 23:15:09.332 DEBG 293 Close extent 96
41746 Sep 22 23:15:09.332 DEBG [2] It's time to notify for 293
41747 Sep 22 23:15:09.332 INFO Completion from [2] id:293 status:true
41748 Sep 22 23:15:09.332 INFO [294/752] Repair commands completed
41749 Sep 22 23:15:09.332 INFO Pop front: ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41750 Sep 22 23:15:09.332 INFO Sent repair work, now wait for resp
41751 Sep 22 23:15:09.332 INFO [0] received reconcile message
41752 Sep 22 23:15:09.332 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41753 Sep 22 23:15:09.332 INFO [0] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41754 Sep 22 23:15:09.332 INFO [0] Sending repair request ReconciliationId(294)
41755 Sep 22 23:15:09.333 INFO [1] received reconcile message
41756 Sep 22 23:15:09.333 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41757 Sep 22 23:15:09.333 INFO [1] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41758 Sep 22 23:15:09.333 INFO [1] No action required ReconciliationId(294)
41759 Sep 22 23:15:09.333 INFO [2] received reconcile message
41760 Sep 22 23:15:09.333 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(294), op: ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41761 Sep 22 23:15:09.333 INFO [2] client ExtentRepair { repair_id: ReconciliationId(294), extent_id: 96, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41762 Sep 22 23:15:09.333 INFO [2] No action required ReconciliationId(294)
41763 Sep 22 23:15:09.333 DEBG 294 Repair extent 96 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41764 Sep 22 23:15:09.333 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/060.copy"
41765 Sep 22 23:15:09.397 INFO accepted connection, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41766 Sep 22 23:15:09.397 TRCE incoming request, uri: /extent/96/files, method: GET, req_id: 3df62914-d664-4563-aebc-d0143a732ff2, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41767 Sep 22 23:15:09.397 INFO request completed, latency_us: 224, response_code: 200, uri: /extent/96/files, method: GET, req_id: 3df62914-d664-4563-aebc-d0143a732ff2, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41768 Sep 22 23:15:09.398 INFO eid:96 Found repair files: ["060", "060.db"]
41769 Sep 22 23:15:09.398 TRCE incoming request, uri: /newextent/96/data, method: GET, req_id: 813b681e-29bf-42eb-a2f2-cc8f654f415f, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41770 Sep 22 23:15:09.398 INFO request completed, latency_us: 335, response_code: 200, uri: /newextent/96/data, method: GET, req_id: 813b681e-29bf-42eb-a2f2-cc8f654f415f, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41771 Sep 22 23:15:09.403 TRCE incoming request, uri: /newextent/96/db, method: GET, req_id: 5d13749e-d6c4-4a53-a5e7-46209e88de2c, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41772 Sep 22 23:15:09.404 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/96/db, method: GET, req_id: 5d13749e-d6c4-4a53-a5e7-46209e88de2c, remote_addr: 127.0.0.1:51861, local_addr: 127.0.0.1:46213, task: repair
41773 Sep 22 23:15:09.405 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/060.copy" to "/tmp/downstairs-vrx8aK6L/00/000/060.replace"
41774 Sep 22 23:15:09.405 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41775 Sep 22 23:15:09.406 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/060.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41776 Sep 22 23:15:09.406 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/060"
41777 Sep 22 23:15:09.406 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/060.db"
41778 Sep 22 23:15:09.406 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41779 Sep 22 23:15:09.406 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/060.replace" to "/tmp/downstairs-vrx8aK6L/00/000/060.completed"
41780 Sep 22 23:15:09.406 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41781 Sep 22 23:15:09.406 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41782 Sep 22 23:15:09.406 DEBG [0] It's time to notify for 294
41783 Sep 22 23:15:09.407 INFO Completion from [0] id:294 status:true
41784 Sep 22 23:15:09.407 INFO [295/752] Repair commands completed
41785 Sep 22 23:15:09.407 INFO Pop front: ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }, state: ClientData([New, New, New]) }
41786 Sep 22 23:15:09.407 INFO Sent repair work, now wait for resp
41787 Sep 22 23:15:09.407 INFO [0] received reconcile message
41788 Sep 22 23:15:09.407 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }, state: ClientData([InProgress, New, New]) }, : downstairs
41789 Sep 22 23:15:09.407 INFO [0] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }
41790 Sep 22 23:15:09.407 INFO [1] received reconcile message
41791 Sep 22 23:15:09.407 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41792 Sep 22 23:15:09.407 INFO [1] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }
41793 Sep 22 23:15:09.407 INFO [2] received reconcile message
41794 Sep 22 23:15:09.407 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(295), op: ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41795 Sep 22 23:15:09.407 INFO [2] client ExtentReopen { repair_id: ReconciliationId(295), extent_id: 96 }
41796 Sep 22 23:15:09.407 DEBG 295 Reopen extent 96
41797 Sep 22 23:15:09.408 DEBG 295 Reopen extent 96
41798 Sep 22 23:15:09.408 DEBG 295 Reopen extent 96
41799 Sep 22 23:15:09.409 DEBG [2] It's time to notify for 295
41800 Sep 22 23:15:09.409 INFO Completion from [2] id:295 status:true
41801 Sep 22 23:15:09.409 INFO [296/752] Repair commands completed
41802 Sep 22 23:15:09.409 INFO Pop front: ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41803 Sep 22 23:15:09.409 INFO Sent repair work, now wait for resp
41804 Sep 22 23:15:09.409 INFO [0] received reconcile message
41805 Sep 22 23:15:09.409 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41806 Sep 22 23:15:09.409 INFO [0] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41807 Sep 22 23:15:09.409 INFO [1] received reconcile message
41808 Sep 22 23:15:09.409 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41809 Sep 22 23:15:09.409 INFO [1] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41810 Sep 22 23:15:09.409 INFO [2] received reconcile message
41811 Sep 22 23:15:09.409 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(296), op: ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41812 Sep 22 23:15:09.409 INFO [2] client ExtentFlush { repair_id: ReconciliationId(296), extent_id: 28, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41813 Sep 22 23:15:09.409 DEBG 296 Flush extent 28 with f:2 g:2
41814 Sep 22 23:15:09.409 DEBG Flush just extent 28 with f:2 and g:2
41815 Sep 22 23:15:09.409 DEBG [1] It's time to notify for 296
41816 Sep 22 23:15:09.409 INFO Completion from [1] id:296 status:true
41817 Sep 22 23:15:09.409 INFO [297/752] Repair commands completed
41818 Sep 22 23:15:09.409 INFO Pop front: ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }, state: ClientData([New, New, New]) }
41819 Sep 22 23:15:09.409 INFO Sent repair work, now wait for resp
41820 Sep 22 23:15:09.409 INFO [0] received reconcile message
41821 Sep 22 23:15:09.409 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }, state: ClientData([InProgress, New, New]) }, : downstairs
41822 Sep 22 23:15:09.409 INFO [0] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }
41823 Sep 22 23:15:09.410 INFO [1] received reconcile message
41824 Sep 22 23:15:09.410 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41825 Sep 22 23:15:09.410 INFO [1] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }
41826 Sep 22 23:15:09.410 INFO [2] received reconcile message
41827 Sep 22 23:15:09.410 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(297), op: ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41828 Sep 22 23:15:09.410 INFO [2] client ExtentClose { repair_id: ReconciliationId(297), extent_id: 28 }
41829 Sep 22 23:15:09.410 DEBG 297 Close extent 28
41830 Sep 22 23:15:09.410 DEBG 297 Close extent 28
41831 Sep 22 23:15:09.410 DEBG 297 Close extent 28
41832 Sep 22 23:15:09.411 DEBG [2] It's time to notify for 297
41833 Sep 22 23:15:09.411 INFO Completion from [2] id:297 status:true
41834 Sep 22 23:15:09.411 INFO [298/752] Repair commands completed
41835 Sep 22 23:15:09.411 INFO Pop front: ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41836 Sep 22 23:15:09.411 INFO Sent repair work, now wait for resp
41837 Sep 22 23:15:09.411 INFO [0] received reconcile message
41838 Sep 22 23:15:09.411 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41839 Sep 22 23:15:09.411 INFO [0] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41840 Sep 22 23:15:09.411 INFO [0] Sending repair request ReconciliationId(298)
41841 Sep 22 23:15:09.411 INFO [1] received reconcile message
41842 Sep 22 23:15:09.411 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41843 Sep 22 23:15:09.411 INFO [1] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41844 Sep 22 23:15:09.411 INFO [1] No action required ReconciliationId(298)
41845 Sep 22 23:15:09.411 INFO [2] received reconcile message
41846 Sep 22 23:15:09.411 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(298), op: ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41847 Sep 22 23:15:09.411 INFO [2] client ExtentRepair { repair_id: ReconciliationId(298), extent_id: 28, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41848 Sep 22 23:15:09.411 INFO [2] No action required ReconciliationId(298)
41849 Sep 22 23:15:09.411 DEBG 298 Repair extent 28 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41850 Sep 22 23:15:09.411 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01C.copy"
41851 Sep 22 23:15:09.444 DEBG [2] Read AckReady 1080, : downstairs
41852 Sep 22 23:15:09.445 DEBG up_ds_listen was notified
41853 Sep 22 23:15:09.445 DEBG up_ds_listen process 1080
41854 Sep 22 23:15:09.446 DEBG [A] ack job 1080:81, : downstairs
41855 Sep 22 23:15:09.472 INFO accepted connection, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41856 Sep 22 23:15:09.472 TRCE incoming request, uri: /extent/28/files, method: GET, req_id: f0428ef2-b9de-4ea5-b101-407f4782d0bf, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41857 Sep 22 23:15:09.472 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/28/files, method: GET, req_id: f0428ef2-b9de-4ea5-b101-407f4782d0bf, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41858 Sep 22 23:15:09.473 INFO eid:28 Found repair files: ["01C", "01C.db"]
41859 Sep 22 23:15:09.473 TRCE incoming request, uri: /newextent/28/data, method: GET, req_id: 25382c69-63cb-4f78-a41a-0770f9aec17b, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41860 Sep 22 23:15:09.473 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/28/data, method: GET, req_id: 25382c69-63cb-4f78-a41a-0770f9aec17b, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41861 Sep 22 23:15:09.479 TRCE incoming request, uri: /newextent/28/db, method: GET, req_id: 99d5edb5-a9ba-4c7b-b3c1-5cddc9077c00, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41862 Sep 22 23:15:09.479 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/28/db, method: GET, req_id: 99d5edb5-a9ba-4c7b-b3c1-5cddc9077c00, remote_addr: 127.0.0.1:42693, local_addr: 127.0.0.1:46213, task: repair
41863 Sep 22 23:15:09.480 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01C.replace"
41864 Sep 22 23:15:09.480 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41865 Sep 22 23:15:09.481 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41866 Sep 22 23:15:09.481 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01C"
41867 Sep 22 23:15:09.481 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01C.db"
41868 Sep 22 23:15:09.481 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41869 Sep 22 23:15:09.481 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01C.completed"
41870 Sep 22 23:15:09.481 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41871 Sep 22 23:15:09.481 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41872 Sep 22 23:15:09.481 DEBG [0] It's time to notify for 298
41873 Sep 22 23:15:09.482 INFO Completion from [0] id:298 status:true
41874 Sep 22 23:15:09.482 INFO [299/752] Repair commands completed
41875 Sep 22 23:15:09.482 INFO Pop front: ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }, state: ClientData([New, New, New]) }
41876 Sep 22 23:15:09.482 INFO Sent repair work, now wait for resp
41877 Sep 22 23:15:09.482 INFO [0] received reconcile message
41878 Sep 22 23:15:09.482 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }, state: ClientData([InProgress, New, New]) }, : downstairs
41879 Sep 22 23:15:09.482 INFO [0] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }
41880 Sep 22 23:15:09.482 INFO [1] received reconcile message
41881 Sep 22 23:15:09.482 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41882 Sep 22 23:15:09.482 INFO [1] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }
41883 Sep 22 23:15:09.482 INFO [2] received reconcile message
41884 Sep 22 23:15:09.482 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(299), op: ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41885 Sep 22 23:15:09.482 INFO [2] client ExtentReopen { repair_id: ReconciliationId(299), extent_id: 28 }
41886 Sep 22 23:15:09.482 DEBG 299 Reopen extent 28
41887 Sep 22 23:15:09.483 DEBG 299 Reopen extent 28
41888 Sep 22 23:15:09.483 DEBG 299 Reopen extent 28
41889 Sep 22 23:15:09.484 DEBG [2] It's time to notify for 299
41890 Sep 22 23:15:09.484 INFO Completion from [2] id:299 status:true
41891 Sep 22 23:15:09.484 INFO [300/752] Repair commands completed
41892 Sep 22 23:15:09.484 INFO Pop front: ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41893 Sep 22 23:15:09.484 INFO Sent repair work, now wait for resp
41894 Sep 22 23:15:09.484 INFO [0] received reconcile message
41895 Sep 22 23:15:09.484 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41896 Sep 22 23:15:09.484 INFO [0] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41897 Sep 22 23:15:09.484 INFO [1] received reconcile message
41898 Sep 22 23:15:09.484 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41899 Sep 22 23:15:09.484 INFO [1] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41900 Sep 22 23:15:09.484 INFO [2] received reconcile message
41901 Sep 22 23:15:09.484 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(300), op: ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41902 Sep 22 23:15:09.484 INFO [2] client ExtentFlush { repair_id: ReconciliationId(300), extent_id: 178, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41903 Sep 22 23:15:09.484 DEBG 300 Flush extent 178 with f:2 g:2
41904 Sep 22 23:15:09.484 DEBG Flush just extent 178 with f:2 and g:2
41905 Sep 22 23:15:09.484 DEBG [1] It's time to notify for 300
41906 Sep 22 23:15:09.484 INFO Completion from [1] id:300 status:true
41907 Sep 22 23:15:09.484 INFO [301/752] Repair commands completed
41908 Sep 22 23:15:09.484 INFO Pop front: ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }, state: ClientData([New, New, New]) }
41909 Sep 22 23:15:09.484 INFO Sent repair work, now wait for resp
41910 Sep 22 23:15:09.484 INFO [0] received reconcile message
41911 Sep 22 23:15:09.484 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }, state: ClientData([InProgress, New, New]) }, : downstairs
41912 Sep 22 23:15:09.484 INFO [0] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }
41913 Sep 22 23:15:09.485 INFO [1] received reconcile message
41914 Sep 22 23:15:09.485 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41915 Sep 22 23:15:09.485 INFO [1] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }
41916 Sep 22 23:15:09.485 INFO [2] received reconcile message
41917 Sep 22 23:15:09.485 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(301), op: ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41918 Sep 22 23:15:09.485 INFO [2] client ExtentClose { repair_id: ReconciliationId(301), extent_id: 178 }
41919 Sep 22 23:15:09.485 DEBG 301 Close extent 178
41920 Sep 22 23:15:09.485 DEBG 301 Close extent 178
41921 Sep 22 23:15:09.485 DEBG 301 Close extent 178
41922 Sep 22 23:15:09.486 DEBG [2] It's time to notify for 301
41923 Sep 22 23:15:09.486 INFO Completion from [2] id:301 status:true
41924 Sep 22 23:15:09.486 INFO [302/752] Repair commands completed
41925 Sep 22 23:15:09.486 INFO Pop front: ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
41926 Sep 22 23:15:09.486 INFO Sent repair work, now wait for resp
41927 Sep 22 23:15:09.486 INFO [0] received reconcile message
41928 Sep 22 23:15:09.486 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
41929 Sep 22 23:15:09.486 INFO [0] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41930 Sep 22 23:15:09.486 INFO [0] Sending repair request ReconciliationId(302)
41931 Sep 22 23:15:09.486 INFO [1] received reconcile message
41932 Sep 22 23:15:09.486 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41933 Sep 22 23:15:09.486 INFO [1] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41934 Sep 22 23:15:09.486 INFO [1] No action required ReconciliationId(302)
41935 Sep 22 23:15:09.486 INFO [2] received reconcile message
41936 Sep 22 23:15:09.486 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(302), op: ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
41937 Sep 22 23:15:09.486 INFO [2] client ExtentRepair { repair_id: ReconciliationId(302), extent_id: 178, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
41938 Sep 22 23:15:09.486 INFO [2] No action required ReconciliationId(302)
41939 Sep 22 23:15:09.486 DEBG 302 Repair extent 178 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
41940 Sep 22 23:15:09.486 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B2.copy"
41941 Sep 22 23:15:09.499 DEBG up_ds_listen checked 1 jobs, back to waiting
41942 Sep 22 23:15:09.501 INFO [lossy] skipping 1081
41943 Sep 22 23:15:09.501 DEBG Flush :1081 extent_limit None deps:[JobId(1080)] res:true f:30 g:1
41944 Sep 22 23:15:09.501 INFO [lossy] sleeping 1 second
41945 Sep 22 23:15:09.503 INFO [lossy] sleeping 1 second
41946 Sep 22 23:15:09.551 INFO accepted connection, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41947 Sep 22 23:15:09.551 TRCE incoming request, uri: /extent/178/files, method: GET, req_id: 3a56285e-9698-4cd7-8e9a-3b380d3b2b2d, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41948 Sep 22 23:15:09.552 INFO request completed, latency_us: 266, response_code: 200, uri: /extent/178/files, method: GET, req_id: 3a56285e-9698-4cd7-8e9a-3b380d3b2b2d, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41949 Sep 22 23:15:09.552 INFO eid:178 Found repair files: ["0B2", "0B2.db"]
41950 Sep 22 23:15:09.552 TRCE incoming request, uri: /newextent/178/data, method: GET, req_id: ed27d62b-a2db-4ae2-bb0a-5ebc27f272ea, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41951 Sep 22 23:15:09.553 INFO request completed, latency_us: 343, response_code: 200, uri: /newextent/178/data, method: GET, req_id: ed27d62b-a2db-4ae2-bb0a-5ebc27f272ea, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41952 Sep 22 23:15:09.558 TRCE incoming request, uri: /newextent/178/db, method: GET, req_id: 141be047-48c7-40b0-9e94-3edb96ee702a, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41953 Sep 22 23:15:09.558 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/178/db, method: GET, req_id: 141be047-48c7-40b0-9e94-3edb96ee702a, remote_addr: 127.0.0.1:35760, local_addr: 127.0.0.1:46213, task: repair
41954 Sep 22 23:15:09.559 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B2.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B2.replace"
41955 Sep 22 23:15:09.559 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41956 Sep 22 23:15:09.560 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B2.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
41957 Sep 22 23:15:09.561 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B2"
41958 Sep 22 23:15:09.561 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B2.db"
41959 Sep 22 23:15:09.561 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41960 Sep 22 23:15:09.561 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B2.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B2.completed"
41961 Sep 22 23:15:09.561 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41962 Sep 22 23:15:09.561 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
41963 Sep 22 23:15:09.561 DEBG [0] It's time to notify for 302
41964 Sep 22 23:15:09.561 INFO Completion from [0] id:302 status:true
41965 Sep 22 23:15:09.561 INFO [303/752] Repair commands completed
41966 Sep 22 23:15:09.561 INFO Pop front: ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }, state: ClientData([New, New, New]) }
41967 Sep 22 23:15:09.561 INFO Sent repair work, now wait for resp
41968 Sep 22 23:15:09.561 INFO [0] received reconcile message
41969 Sep 22 23:15:09.561 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }, state: ClientData([InProgress, New, New]) }, : downstairs
41970 Sep 22 23:15:09.561 INFO [0] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }
41971 Sep 22 23:15:09.561 INFO [1] received reconcile message
41972 Sep 22 23:15:09.561 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
41973 Sep 22 23:15:09.562 INFO [1] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }
41974 Sep 22 23:15:09.562 INFO [2] received reconcile message
41975 Sep 22 23:15:09.562 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(303), op: ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
41976 Sep 22 23:15:09.562 INFO [2] client ExtentReopen { repair_id: ReconciliationId(303), extent_id: 178 }
41977 Sep 22 23:15:09.562 DEBG 303 Reopen extent 178
41978 Sep 22 23:15:09.563 DEBG 303 Reopen extent 178
41979 Sep 22 23:15:09.563 DEBG 303 Reopen extent 178
41980 Sep 22 23:15:09.564 DEBG [2] It's time to notify for 303
41981 Sep 22 23:15:09.564 INFO Completion from [2] id:303 status:true
41982 Sep 22 23:15:09.564 INFO [304/752] Repair commands completed
41983 Sep 22 23:15:09.564 INFO Pop front: ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
41984 Sep 22 23:15:09.564 INFO Sent repair work, now wait for resp
41985 Sep 22 23:15:09.564 INFO [0] received reconcile message
41986 Sep 22 23:15:09.564 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
41987 Sep 22 23:15:09.564 INFO [0] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41988 Sep 22 23:15:09.564 INFO [1] received reconcile message
41989 Sep 22 23:15:09.564 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
41990 Sep 22 23:15:09.564 INFO [1] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41991 Sep 22 23:15:09.564 INFO [2] received reconcile message
41992 Sep 22 23:15:09.564 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(304), op: ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
41993 Sep 22 23:15:09.564 INFO [2] client ExtentFlush { repair_id: ReconciliationId(304), extent_id: 36, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
41994 Sep 22 23:15:09.564 DEBG 304 Flush extent 36 with f:2 g:2
41995 Sep 22 23:15:09.564 DEBG Flush just extent 36 with f:2 and g:2
41996 Sep 22 23:15:09.564 DEBG [1] It's time to notify for 304
41997 Sep 22 23:15:09.564 INFO Completion from [1] id:304 status:true
41998 Sep 22 23:15:09.564 INFO [305/752] Repair commands completed
41999 Sep 22 23:15:09.564 INFO Pop front: ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }, state: ClientData([New, New, New]) }
42000 Sep 22 23:15:09.564 INFO Sent repair work, now wait for resp
42001 Sep 22 23:15:09.564 INFO [0] received reconcile message
42002 Sep 22 23:15:09.564 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }, state: ClientData([InProgress, New, New]) }, : downstairs
42003 Sep 22 23:15:09.564 INFO [0] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }
42004 Sep 22 23:15:09.564 INFO [1] received reconcile message
42005 Sep 22 23:15:09.564 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42006 Sep 22 23:15:09.564 INFO [1] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }
42007 Sep 22 23:15:09.564 INFO [2] received reconcile message
42008 Sep 22 23:15:09.565 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(305), op: ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42009 Sep 22 23:15:09.565 INFO [2] client ExtentClose { repair_id: ReconciliationId(305), extent_id: 36 }
42010 Sep 22 23:15:09.565 DEBG 305 Close extent 36
42011 Sep 22 23:15:09.565 DEBG 305 Close extent 36
42012 Sep 22 23:15:09.565 DEBG 305 Close extent 36
42013 Sep 22 23:15:09.566 DEBG [2] It's time to notify for 305
42014 Sep 22 23:15:09.566 INFO Completion from [2] id:305 status:true
42015 Sep 22 23:15:09.566 INFO [306/752] Repair commands completed
42016 Sep 22 23:15:09.566 INFO Pop front: ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42017 Sep 22 23:15:09.566 INFO Sent repair work, now wait for resp
42018 Sep 22 23:15:09.566 INFO [0] received reconcile message
42019 Sep 22 23:15:09.566 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42020 Sep 22 23:15:09.566 INFO [0] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42021 Sep 22 23:15:09.566 INFO [0] Sending repair request ReconciliationId(306)
42022 Sep 22 23:15:09.566 INFO [1] received reconcile message
42023 Sep 22 23:15:09.566 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42024 Sep 22 23:15:09.566 INFO [1] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42025 Sep 22 23:15:09.566 INFO [1] No action required ReconciliationId(306)
42026 Sep 22 23:15:09.566 INFO [2] received reconcile message
42027 Sep 22 23:15:09.566 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(306), op: ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42028 Sep 22 23:15:09.566 INFO [2] client ExtentRepair { repair_id: ReconciliationId(306), extent_id: 36, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42029 Sep 22 23:15:09.566 INFO [2] No action required ReconciliationId(306)
42030 Sep 22 23:15:09.566 DEBG 306 Repair extent 36 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42031 Sep 22 23:15:09.566 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/024.copy"
42032 Sep 22 23:15:09.600 DEBG IO Read 1082 has deps [JobId(1081)]
42033 Sep 22 23:15:09.629 INFO accepted connection, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42034 Sep 22 23:15:09.629 TRCE incoming request, uri: /extent/36/files, method: GET, req_id: 0836efef-0c30-49a6-8f85-dca2ff77050a, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42035 Sep 22 23:15:09.629 INFO request completed, latency_us: 256, response_code: 200, uri: /extent/36/files, method: GET, req_id: 0836efef-0c30-49a6-8f85-dca2ff77050a, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42036 Sep 22 23:15:09.629 INFO eid:36 Found repair files: ["024", "024.db"]
42037 Sep 22 23:15:09.630 TRCE incoming request, uri: /newextent/36/data, method: GET, req_id: fd1a5aea-80c3-49e6-9429-bfc5a777ffa4, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42038 Sep 22 23:15:09.630 INFO request completed, latency_us: 363, response_code: 200, uri: /newextent/36/data, method: GET, req_id: fd1a5aea-80c3-49e6-9429-bfc5a777ffa4, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42039 Sep 22 23:15:09.635 TRCE incoming request, uri: /newextent/36/db, method: GET, req_id: 95e13cbb-984c-4297-9708-690b4bcdf696, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42040 Sep 22 23:15:09.636 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/36/db, method: GET, req_id: 95e13cbb-984c-4297-9708-690b4bcdf696, remote_addr: 127.0.0.1:64703, local_addr: 127.0.0.1:46213, task: repair
42041 Sep 22 23:15:09.637 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/024.copy" to "/tmp/downstairs-vrx8aK6L/00/000/024.replace"
42042 Sep 22 23:15:09.637 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42043 Sep 22 23:15:09.638 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/024.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42044 Sep 22 23:15:09.638 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/024"
42045 Sep 22 23:15:09.638 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/024.db"
42046 Sep 22 23:15:09.638 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42047 Sep 22 23:15:09.638 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/024.replace" to "/tmp/downstairs-vrx8aK6L/00/000/024.completed"
42048 Sep 22 23:15:09.638 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42049 Sep 22 23:15:09.638 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42050 Sep 22 23:15:09.638 DEBG [0] It's time to notify for 306
42051 Sep 22 23:15:09.639 INFO Completion from [0] id:306 status:true
42052 Sep 22 23:15:09.639 INFO [307/752] Repair commands completed
42053 Sep 22 23:15:09.639 INFO Pop front: ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }, state: ClientData([New, New, New]) }
42054 Sep 22 23:15:09.639 INFO Sent repair work, now wait for resp
42055 Sep 22 23:15:09.639 INFO [0] received reconcile message
42056 Sep 22 23:15:09.639 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }, state: ClientData([InProgress, New, New]) }, : downstairs
42057 Sep 22 23:15:09.639 INFO [0] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }
42058 Sep 22 23:15:09.639 INFO [1] received reconcile message
42059 Sep 22 23:15:09.639 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42060 Sep 22 23:15:09.639 INFO [1] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }
42061 Sep 22 23:15:09.639 INFO [2] received reconcile message
42062 Sep 22 23:15:09.639 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(307), op: ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42063 Sep 22 23:15:09.639 INFO [2] client ExtentReopen { repair_id: ReconciliationId(307), extent_id: 36 }
42064 Sep 22 23:15:09.639 DEBG 307 Reopen extent 36
42065 Sep 22 23:15:09.640 DEBG 307 Reopen extent 36
42066 Sep 22 23:15:09.640 DEBG 307 Reopen extent 36
42067 Sep 22 23:15:09.641 DEBG [2] It's time to notify for 307
42068 Sep 22 23:15:09.641 INFO Completion from [2] id:307 status:true
42069 Sep 22 23:15:09.641 INFO [308/752] Repair commands completed
42070 Sep 22 23:15:09.641 INFO Pop front: ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42071 Sep 22 23:15:09.641 INFO Sent repair work, now wait for resp
42072 Sep 22 23:15:09.641 INFO [0] received reconcile message
42073 Sep 22 23:15:09.641 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42074 Sep 22 23:15:09.641 INFO [0] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42075 Sep 22 23:15:09.641 INFO [1] received reconcile message
42076 Sep 22 23:15:09.641 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42077 Sep 22 23:15:09.641 INFO [1] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42078 Sep 22 23:15:09.641 INFO [2] received reconcile message
42079 Sep 22 23:15:09.641 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(308), op: ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42080 Sep 22 23:15:09.641 INFO [2] client ExtentFlush { repair_id: ReconciliationId(308), extent_id: 92, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42081 Sep 22 23:15:09.641 DEBG 308 Flush extent 92 with f:2 g:2
42082 Sep 22 23:15:09.641 DEBG Flush just extent 92 with f:2 and g:2
42083 Sep 22 23:15:09.641 DEBG [1] It's time to notify for 308
42084 Sep 22 23:15:09.642 INFO Completion from [1] id:308 status:true
42085 Sep 22 23:15:09.642 INFO [309/752] Repair commands completed
42086 Sep 22 23:15:09.642 INFO Pop front: ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }, state: ClientData([New, New, New]) }
42087 Sep 22 23:15:09.642 INFO Sent repair work, now wait for resp
42088 Sep 22 23:15:09.642 INFO [0] received reconcile message
42089 Sep 22 23:15:09.642 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }, state: ClientData([InProgress, New, New]) }, : downstairs
42090 Sep 22 23:15:09.642 INFO [0] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }
42091 Sep 22 23:15:09.642 INFO [1] received reconcile message
42092 Sep 22 23:15:09.642 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42093 Sep 22 23:15:09.642 INFO [1] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }
42094 Sep 22 23:15:09.642 INFO [2] received reconcile message
42095 Sep 22 23:15:09.642 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(309), op: ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42096 Sep 22 23:15:09.642 INFO [2] client ExtentClose { repair_id: ReconciliationId(309), extent_id: 92 }
42097 Sep 22 23:15:09.642 DEBG 309 Close extent 92
42098 Sep 22 23:15:09.642 DEBG 309 Close extent 92
42099 Sep 22 23:15:09.643 DEBG 309 Close extent 92
42100 Sep 22 23:15:09.643 DEBG [2] It's time to notify for 309
42101 Sep 22 23:15:09.643 INFO Completion from [2] id:309 status:true
42102 Sep 22 23:15:09.643 INFO [310/752] Repair commands completed
42103 Sep 22 23:15:09.643 INFO Pop front: ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42104 Sep 22 23:15:09.643 INFO Sent repair work, now wait for resp
42105 Sep 22 23:15:09.643 INFO [0] received reconcile message
42106 Sep 22 23:15:09.643 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42107 Sep 22 23:15:09.643 INFO [0] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42108 Sep 22 23:15:09.643 INFO [0] Sending repair request ReconciliationId(310)
42109 Sep 22 23:15:09.643 INFO [1] received reconcile message
42110 Sep 22 23:15:09.643 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42111 Sep 22 23:15:09.643 INFO [1] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42112 Sep 22 23:15:09.643 INFO [1] No action required ReconciliationId(310)
42113 Sep 22 23:15:09.643 INFO [2] received reconcile message
42114 Sep 22 23:15:09.643 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(310), op: ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42115 Sep 22 23:15:09.643 INFO [2] client ExtentRepair { repair_id: ReconciliationId(310), extent_id: 92, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42116 Sep 22 23:15:09.643 INFO [2] No action required ReconciliationId(310)
42117 Sep 22 23:15:09.643 DEBG 310 Repair extent 92 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42118 Sep 22 23:15:09.643 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05C.copy"
42119 Sep 22 23:15:09.707 INFO accepted connection, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42120 Sep 22 23:15:09.707 TRCE incoming request, uri: /extent/92/files, method: GET, req_id: 86f01bff-af3d-43d7-a1ae-6c24fdeb8ab3, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42121 Sep 22 23:15:09.707 INFO request completed, latency_us: 222, response_code: 200, uri: /extent/92/files, method: GET, req_id: 86f01bff-af3d-43d7-a1ae-6c24fdeb8ab3, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42122 Sep 22 23:15:09.708 INFO eid:92 Found repair files: ["05C", "05C.db"]
42123 Sep 22 23:15:09.708 TRCE incoming request, uri: /newextent/92/data, method: GET, req_id: d8c93b2e-41df-4c3a-b695-a37205ae6a77, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42124 Sep 22 23:15:09.708 INFO request completed, latency_us: 337, response_code: 200, uri: /newextent/92/data, method: GET, req_id: d8c93b2e-41df-4c3a-b695-a37205ae6a77, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42125 Sep 22 23:15:09.713 TRCE incoming request, uri: /newextent/92/db, method: GET, req_id: 9cebed4a-123d-4e3b-85f0-9f92a55b28aa, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42126 Sep 22 23:15:09.714 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/92/db, method: GET, req_id: 9cebed4a-123d-4e3b-85f0-9f92a55b28aa, remote_addr: 127.0.0.1:44147, local_addr: 127.0.0.1:46213, task: repair
42127 Sep 22 23:15:09.715 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05C.replace"
42128 Sep 22 23:15:09.715 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42129 Sep 22 23:15:09.716 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42130 Sep 22 23:15:09.716 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05C"
42131 Sep 22 23:15:09.716 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05C.db"
42132 Sep 22 23:15:09.716 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42133 Sep 22 23:15:09.716 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05C.completed"
42134 Sep 22 23:15:09.716 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42135 Sep 22 23:15:09.716 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42136 Sep 22 23:15:09.716 DEBG [0] It's time to notify for 310
42137 Sep 22 23:15:09.717 INFO Completion from [0] id:310 status:true
42138 Sep 22 23:15:09.717 INFO [311/752] Repair commands completed
42139 Sep 22 23:15:09.717 INFO Pop front: ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }, state: ClientData([New, New, New]) }
42140 Sep 22 23:15:09.717 INFO Sent repair work, now wait for resp
42141 Sep 22 23:15:09.717 INFO [0] received reconcile message
42142 Sep 22 23:15:09.717 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }, state: ClientData([InProgress, New, New]) }, : downstairs
42143 Sep 22 23:15:09.717 INFO [0] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }
42144 Sep 22 23:15:09.717 INFO [1] received reconcile message
42145 Sep 22 23:15:09.717 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42146 Sep 22 23:15:09.717 INFO [1] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }
42147 Sep 22 23:15:09.717 INFO [2] received reconcile message
42148 Sep 22 23:15:09.717 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(311), op: ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42149 Sep 22 23:15:09.717 INFO [2] client ExtentReopen { repair_id: ReconciliationId(311), extent_id: 92 }
42150 Sep 22 23:15:09.717 DEBG 311 Reopen extent 92
42151 Sep 22 23:15:09.718 DEBG 311 Reopen extent 92
42152 Sep 22 23:15:09.718 DEBG 311 Reopen extent 92
42153 Sep 22 23:15:09.719 DEBG [2] It's time to notify for 311
42154 Sep 22 23:15:09.719 INFO Completion from [2] id:311 status:true
42155 Sep 22 23:15:09.719 INFO [312/752] Repair commands completed
42156 Sep 22 23:15:09.719 INFO Pop front: ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42157 Sep 22 23:15:09.719 INFO Sent repair work, now wait for resp
42158 Sep 22 23:15:09.719 INFO [0] received reconcile message
42159 Sep 22 23:15:09.719 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42160 Sep 22 23:15:09.719 INFO [0] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42161 Sep 22 23:15:09.719 INFO [1] received reconcile message
42162 Sep 22 23:15:09.719 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42163 Sep 22 23:15:09.719 INFO [1] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42164 Sep 22 23:15:09.719 INFO [2] received reconcile message
42165 Sep 22 23:15:09.719 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(312), op: ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42166 Sep 22 23:15:09.719 INFO [2] client ExtentFlush { repair_id: ReconciliationId(312), extent_id: 147, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42167 Sep 22 23:15:09.719 DEBG 312 Flush extent 147 with f:2 g:2
42168 Sep 22 23:15:09.719 DEBG Flush just extent 147 with f:2 and g:2
42169 Sep 22 23:15:09.719 DEBG [1] It's time to notify for 312
42170 Sep 22 23:15:09.719 INFO Completion from [1] id:312 status:true
42171 Sep 22 23:15:09.719 INFO [313/752] Repair commands completed
42172 Sep 22 23:15:09.719 INFO Pop front: ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }, state: ClientData([New, New, New]) }
42173 Sep 22 23:15:09.719 INFO Sent repair work, now wait for resp
42174 Sep 22 23:15:09.719 INFO [0] received reconcile message
42175 Sep 22 23:15:09.720 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }, state: ClientData([InProgress, New, New]) }, : downstairs
42176 Sep 22 23:15:09.720 INFO [0] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }
42177 Sep 22 23:15:09.720 INFO [1] received reconcile message
42178 Sep 22 23:15:09.720 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42179 Sep 22 23:15:09.720 INFO [1] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }
42180 Sep 22 23:15:09.720 INFO [2] received reconcile message
42181 Sep 22 23:15:09.720 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(313), op: ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42182 Sep 22 23:15:09.720 INFO [2] client ExtentClose { repair_id: ReconciliationId(313), extent_id: 147 }
42183 Sep 22 23:15:09.720 DEBG 313 Close extent 147
42184 Sep 22 23:15:09.720 DEBG 313 Close extent 147
42185 Sep 22 23:15:09.720 DEBG 313 Close extent 147
42186 Sep 22 23:15:09.721 DEBG [2] It's time to notify for 313
42187 Sep 22 23:15:09.721 INFO Completion from [2] id:313 status:true
42188 Sep 22 23:15:09.721 INFO [314/752] Repair commands completed
42189 Sep 22 23:15:09.721 INFO Pop front: ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42190 Sep 22 23:15:09.721 INFO Sent repair work, now wait for resp
42191 Sep 22 23:15:09.721 INFO [0] received reconcile message
42192 Sep 22 23:15:09.721 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42193 Sep 22 23:15:09.721 INFO [0] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42194 Sep 22 23:15:09.721 INFO [0] Sending repair request ReconciliationId(314)
42195 Sep 22 23:15:09.721 INFO [1] received reconcile message
42196 Sep 22 23:15:09.721 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42197 Sep 22 23:15:09.721 INFO [1] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42198 Sep 22 23:15:09.721 INFO [1] No action required ReconciliationId(314)
42199 Sep 22 23:15:09.721 INFO [2] received reconcile message
42200 Sep 22 23:15:09.721 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(314), op: ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42201 Sep 22 23:15:09.721 INFO [2] client ExtentRepair { repair_id: ReconciliationId(314), extent_id: 147, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42202 Sep 22 23:15:09.721 INFO [2] No action required ReconciliationId(314)
42203 Sep 22 23:15:09.721 DEBG 314 Repair extent 147 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42204 Sep 22 23:15:09.721 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/093.copy"
42205 Sep 22 23:15:09.782 INFO accepted connection, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42206 Sep 22 23:15:09.782 TRCE incoming request, uri: /extent/147/files, method: GET, req_id: 20031d70-0c35-4397-b021-e141acdd59c4, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42207 Sep 22 23:15:09.783 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/147/files, method: GET, req_id: 20031d70-0c35-4397-b021-e141acdd59c4, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42208 Sep 22 23:15:09.783 INFO eid:147 Found repair files: ["093", "093.db"]
42209 Sep 22 23:15:09.783 TRCE incoming request, uri: /newextent/147/data, method: GET, req_id: 1c62ead1-9800-4f65-8a6b-dbab3be9147b, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42210 Sep 22 23:15:09.784 INFO request completed, latency_us: 330, response_code: 200, uri: /newextent/147/data, method: GET, req_id: 1c62ead1-9800-4f65-8a6b-dbab3be9147b, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42211 Sep 22 23:15:09.789 TRCE incoming request, uri: /newextent/147/db, method: GET, req_id: f3924cc3-7bb4-4008-af40-91ce74de18aa, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42212 Sep 22 23:15:09.789 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/147/db, method: GET, req_id: f3924cc3-7bb4-4008-af40-91ce74de18aa, remote_addr: 127.0.0.1:46870, local_addr: 127.0.0.1:46213, task: repair
42213 Sep 22 23:15:09.790 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/093.copy" to "/tmp/downstairs-vrx8aK6L/00/000/093.replace"
42214 Sep 22 23:15:09.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42215 Sep 22 23:15:09.791 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/093.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42216 Sep 22 23:15:09.791 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/093"
42217 Sep 22 23:15:09.791 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/093.db"
42218 Sep 22 23:15:09.791 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42219 Sep 22 23:15:09.791 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/093.replace" to "/tmp/downstairs-vrx8aK6L/00/000/093.completed"
42220 Sep 22 23:15:09.791 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42221 Sep 22 23:15:09.791 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42222 Sep 22 23:15:09.792 DEBG [0] It's time to notify for 314
42223 Sep 22 23:15:09.792 INFO Completion from [0] id:314 status:true
42224 Sep 22 23:15:09.792 INFO [315/752] Repair commands completed
42225 Sep 22 23:15:09.792 INFO Pop front: ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }, state: ClientData([New, New, New]) }
42226 Sep 22 23:15:09.792 INFO Sent repair work, now wait for resp
42227 Sep 22 23:15:09.792 INFO [0] received reconcile message
42228 Sep 22 23:15:09.792 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }, state: ClientData([InProgress, New, New]) }, : downstairs
42229 Sep 22 23:15:09.792 INFO [0] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }
42230 Sep 22 23:15:09.792 INFO [1] received reconcile message
42231 Sep 22 23:15:09.792 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42232 Sep 22 23:15:09.792 INFO [1] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }
42233 Sep 22 23:15:09.792 INFO [2] received reconcile message
42234 Sep 22 23:15:09.792 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(315), op: ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42235 Sep 22 23:15:09.792 INFO [2] client ExtentReopen { repair_id: ReconciliationId(315), extent_id: 147 }
42236 Sep 22 23:15:09.792 DEBG 315 Reopen extent 147
42237 Sep 22 23:15:09.793 DEBG 315 Reopen extent 147
42238 Sep 22 23:15:09.793 DEBG 315 Reopen extent 147
42239 Sep 22 23:15:09.794 DEBG [2] It's time to notify for 315
42240 Sep 22 23:15:09.794 INFO Completion from [2] id:315 status:true
42241 Sep 22 23:15:09.794 INFO [316/752] Repair commands completed
42242 Sep 22 23:15:09.794 INFO Pop front: ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42243 Sep 22 23:15:09.794 INFO Sent repair work, now wait for resp
42244 Sep 22 23:15:09.794 INFO [0] received reconcile message
42245 Sep 22 23:15:09.794 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42246 Sep 22 23:15:09.794 INFO [0] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42247 Sep 22 23:15:09.794 INFO [1] received reconcile message
42248 Sep 22 23:15:09.794 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42249 Sep 22 23:15:09.794 INFO [1] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42250 Sep 22 23:15:09.794 INFO [2] received reconcile message
42251 Sep 22 23:15:09.794 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(316), op: ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42252 Sep 22 23:15:09.794 INFO [2] client ExtentFlush { repair_id: ReconciliationId(316), extent_id: 111, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42253 Sep 22 23:15:09.794 DEBG 316 Flush extent 111 with f:2 g:2
42254 Sep 22 23:15:09.794 DEBG Flush just extent 111 with f:2 and g:2
42255 Sep 22 23:15:09.795 DEBG [1] It's time to notify for 316
42256 Sep 22 23:15:09.795 INFO Completion from [1] id:316 status:true
42257 Sep 22 23:15:09.795 INFO [317/752] Repair commands completed
42258 Sep 22 23:15:09.795 INFO Pop front: ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }, state: ClientData([New, New, New]) }
42259 Sep 22 23:15:09.795 INFO Sent repair work, now wait for resp
42260 Sep 22 23:15:09.795 INFO [0] received reconcile message
42261 Sep 22 23:15:09.795 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }, state: ClientData([InProgress, New, New]) }, : downstairs
42262 Sep 22 23:15:09.795 INFO [0] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }
42263 Sep 22 23:15:09.795 INFO [1] received reconcile message
42264 Sep 22 23:15:09.795 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42265 Sep 22 23:15:09.795 INFO [1] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }
42266 Sep 22 23:15:09.795 INFO [2] received reconcile message
42267 Sep 22 23:15:09.795 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(317), op: ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42268 Sep 22 23:15:09.795 INFO [2] client ExtentClose { repair_id: ReconciliationId(317), extent_id: 111 }
42269 Sep 22 23:15:09.795 DEBG 317 Close extent 111
42270 Sep 22 23:15:09.795 DEBG 317 Close extent 111
42271 Sep 22 23:15:09.796 DEBG 317 Close extent 111
42272 Sep 22 23:15:09.796 DEBG [2] It's time to notify for 317
42273 Sep 22 23:15:09.796 INFO Completion from [2] id:317 status:true
42274 Sep 22 23:15:09.796 INFO [318/752] Repair commands completed
42275 Sep 22 23:15:09.796 INFO Pop front: ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42276 Sep 22 23:15:09.796 INFO Sent repair work, now wait for resp
42277 Sep 22 23:15:09.796 INFO [0] received reconcile message
42278 Sep 22 23:15:09.796 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42279 Sep 22 23:15:09.796 INFO [0] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42280 Sep 22 23:15:09.796 INFO [0] Sending repair request ReconciliationId(318)
42281 Sep 22 23:15:09.796 INFO [1] received reconcile message
42282 Sep 22 23:15:09.796 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42283 Sep 22 23:15:09.796 INFO [1] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42284 Sep 22 23:15:09.796 INFO [1] No action required ReconciliationId(318)
42285 Sep 22 23:15:09.796 INFO [2] received reconcile message
42286 Sep 22 23:15:09.796 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(318), op: ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42287 Sep 22 23:15:09.796 INFO [2] client ExtentRepair { repair_id: ReconciliationId(318), extent_id: 111, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42288 Sep 22 23:15:09.796 INFO [2] No action required ReconciliationId(318)
42289 Sep 22 23:15:09.796 DEBG 318 Repair extent 111 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42290 Sep 22 23:15:09.796 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06F.copy"
42291 Sep 22 23:15:09.861 INFO accepted connection, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42292 Sep 22 23:15:09.861 TRCE incoming request, uri: /extent/111/files, method: GET, req_id: a8e9f900-2f5b-4ca5-a68e-2e73d8b87642, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42293 Sep 22 23:15:09.861 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/111/files, method: GET, req_id: a8e9f900-2f5b-4ca5-a68e-2e73d8b87642, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42294 Sep 22 23:15:09.862 INFO eid:111 Found repair files: ["06F", "06F.db"]
42295 Sep 22 23:15:09.862 TRCE incoming request, uri: /newextent/111/data, method: GET, req_id: ca891a44-b9b6-44e7-86a7-03ba10eb6159, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42296 Sep 22 23:15:09.862 INFO request completed, latency_us: 337, response_code: 200, uri: /newextent/111/data, method: GET, req_id: ca891a44-b9b6-44e7-86a7-03ba10eb6159, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42297 Sep 22 23:15:09.867 TRCE incoming request, uri: /newextent/111/db, method: GET, req_id: a59cab81-144e-41ea-92fc-6822d4d1344c, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42298 Sep 22 23:15:09.868 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/111/db, method: GET, req_id: a59cab81-144e-41ea-92fc-6822d4d1344c, remote_addr: 127.0.0.1:44048, local_addr: 127.0.0.1:46213, task: repair
42299 Sep 22 23:15:09.869 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06F.replace"
42300 Sep 22 23:15:09.869 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42301 Sep 22 23:15:09.870 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42302 Sep 22 23:15:09.870 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06F"
42303 Sep 22 23:15:09.870 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06F.db"
42304 Sep 22 23:15:09.870 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42305 Sep 22 23:15:09.870 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06F.completed"
42306 Sep 22 23:15:09.870 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42307 Sep 22 23:15:09.870 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42308 Sep 22 23:15:09.870 DEBG [0] It's time to notify for 318
42309 Sep 22 23:15:09.871 INFO Completion from [0] id:318 status:true
42310 Sep 22 23:15:09.871 INFO [319/752] Repair commands completed
42311 Sep 22 23:15:09.871 INFO Pop front: ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }, state: ClientData([New, New, New]) }
42312 Sep 22 23:15:09.871 INFO Sent repair work, now wait for resp
42313 Sep 22 23:15:09.871 INFO [0] received reconcile message
42314 Sep 22 23:15:09.871 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }, state: ClientData([InProgress, New, New]) }, : downstairs
42315 Sep 22 23:15:09.871 INFO [0] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }
42316 Sep 22 23:15:09.871 INFO [1] received reconcile message
42317 Sep 22 23:15:09.871 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42318 Sep 22 23:15:09.871 INFO [1] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }
42319 Sep 22 23:15:09.871 INFO [2] received reconcile message
42320 Sep 22 23:15:09.871 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(319), op: ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42321 Sep 22 23:15:09.871 INFO [2] client ExtentReopen { repair_id: ReconciliationId(319), extent_id: 111 }
42322 Sep 22 23:15:09.871 DEBG 319 Reopen extent 111
42323 Sep 22 23:15:09.872 DEBG 319 Reopen extent 111
42324 Sep 22 23:15:09.872 DEBG 319 Reopen extent 111
42325 Sep 22 23:15:09.873 DEBG [2] It's time to notify for 319
42326 Sep 22 23:15:09.873 INFO Completion from [2] id:319 status:true
42327 Sep 22 23:15:09.873 INFO [320/752] Repair commands completed
42328 Sep 22 23:15:09.873 INFO Pop front: ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42329 Sep 22 23:15:09.873 INFO Sent repair work, now wait for resp
42330 Sep 22 23:15:09.873 INFO [0] received reconcile message
42331 Sep 22 23:15:09.873 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42332 Sep 22 23:15:09.873 INFO [0] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42333 Sep 22 23:15:09.873 INFO [1] received reconcile message
42334 Sep 22 23:15:09.873 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42335 Sep 22 23:15:09.873 INFO [1] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42336 Sep 22 23:15:09.873 INFO [2] received reconcile message
42337 Sep 22 23:15:09.873 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(320), op: ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42338 Sep 22 23:15:09.873 INFO [2] client ExtentFlush { repair_id: ReconciliationId(320), extent_id: 187, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42339 Sep 22 23:15:09.873 DEBG 320 Flush extent 187 with f:2 g:2
42340 Sep 22 23:15:09.873 DEBG Flush just extent 187 with f:2 and g:2
42341 Sep 22 23:15:09.873 DEBG [1] It's time to notify for 320
42342 Sep 22 23:15:09.873 INFO Completion from [1] id:320 status:true
42343 Sep 22 23:15:09.873 INFO [321/752] Repair commands completed
42344 Sep 22 23:15:09.873 INFO Pop front: ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }, state: ClientData([New, New, New]) }
42345 Sep 22 23:15:09.873 INFO Sent repair work, now wait for resp
42346 Sep 22 23:15:09.873 INFO [0] received reconcile message
42347 Sep 22 23:15:09.873 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }, state: ClientData([InProgress, New, New]) }, : downstairs
42348 Sep 22 23:15:09.873 INFO [0] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }
42349 Sep 22 23:15:09.874 INFO [1] received reconcile message
42350 Sep 22 23:15:09.874 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42351 Sep 22 23:15:09.874 INFO [1] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }
42352 Sep 22 23:15:09.874 INFO [2] received reconcile message
42353 Sep 22 23:15:09.874 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(321), op: ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42354 Sep 22 23:15:09.874 INFO [2] client ExtentClose { repair_id: ReconciliationId(321), extent_id: 187 }
42355 Sep 22 23:15:09.874 DEBG 321 Close extent 187
42356 Sep 22 23:15:09.874 DEBG 321 Close extent 187
42357 Sep 22 23:15:09.874 DEBG 321 Close extent 187
42358 Sep 22 23:15:09.875 DEBG [2] It's time to notify for 321
42359 Sep 22 23:15:09.875 INFO Completion from [2] id:321 status:true
42360 Sep 22 23:15:09.875 INFO [322/752] Repair commands completed
42361 Sep 22 23:15:09.875 INFO Pop front: ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42362 Sep 22 23:15:09.875 INFO Sent repair work, now wait for resp
42363 Sep 22 23:15:09.875 INFO [0] received reconcile message
42364 Sep 22 23:15:09.875 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42365 Sep 22 23:15:09.875 INFO [0] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42366 Sep 22 23:15:09.875 INFO [0] Sending repair request ReconciliationId(322)
42367 Sep 22 23:15:09.875 INFO [1] received reconcile message
42368 Sep 22 23:15:09.875 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42369 Sep 22 23:15:09.875 INFO [1] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42370 Sep 22 23:15:09.875 INFO [1] No action required ReconciliationId(322)
42371 Sep 22 23:15:09.875 INFO [2] received reconcile message
42372 Sep 22 23:15:09.875 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(322), op: ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42373 Sep 22 23:15:09.875 INFO [2] client ExtentRepair { repair_id: ReconciliationId(322), extent_id: 187, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42374 Sep 22 23:15:09.875 INFO [2] No action required ReconciliationId(322)
42375 Sep 22 23:15:09.875 DEBG 322 Repair extent 187 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42376 Sep 22 23:15:09.875 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0BB.copy"
42377 Sep 22 23:15:09.937 INFO accepted connection, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42378 Sep 22 23:15:09.937 TRCE incoming request, uri: /extent/187/files, method: GET, req_id: 42d4932b-2b56-4667-8bf3-1f4ad76e6381, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42379 Sep 22 23:15:09.937 INFO request completed, latency_us: 217, response_code: 200, uri: /extent/187/files, method: GET, req_id: 42d4932b-2b56-4667-8bf3-1f4ad76e6381, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42380 Sep 22 23:15:09.937 INFO eid:187 Found repair files: ["0BB", "0BB.db"]
42381 Sep 22 23:15:09.938 TRCE incoming request, uri: /newextent/187/data, method: GET, req_id: be66fccc-1647-415e-9e28-e6bcb1564b8e, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42382 Sep 22 23:15:09.938 INFO request completed, latency_us: 332, response_code: 200, uri: /newextent/187/data, method: GET, req_id: be66fccc-1647-415e-9e28-e6bcb1564b8e, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42383 Sep 22 23:15:09.943 TRCE incoming request, uri: /newextent/187/db, method: GET, req_id: e4a9f6d7-2b16-4f2d-a725-e6b57bab09c6, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42384 Sep 22 23:15:09.943 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/187/db, method: GET, req_id: e4a9f6d7-2b16-4f2d-a725-e6b57bab09c6, remote_addr: 127.0.0.1:59897, local_addr: 127.0.0.1:46213, task: repair
42385 Sep 22 23:15:09.945 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0BB.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0BB.replace"
42386 Sep 22 23:15:09.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42387 Sep 22 23:15:09.946 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0BB.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42388 Sep 22 23:15:09.946 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0BB"
42389 Sep 22 23:15:09.946 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0BB.db"
42390 Sep 22 23:15:09.946 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42391 Sep 22 23:15:09.946 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0BB.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0BB.completed"
42392 Sep 22 23:15:09.946 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42393 Sep 22 23:15:09.946 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42394 Sep 22 23:15:09.946 DEBG [0] It's time to notify for 322
42395 Sep 22 23:15:09.946 INFO Completion from [0] id:322 status:true
42396 Sep 22 23:15:09.946 INFO [323/752] Repair commands completed
42397 Sep 22 23:15:09.946 INFO Pop front: ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }, state: ClientData([New, New, New]) }
42398 Sep 22 23:15:09.946 INFO Sent repair work, now wait for resp
42399 Sep 22 23:15:09.946 INFO [0] received reconcile message
42400 Sep 22 23:15:09.946 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }, state: ClientData([InProgress, New, New]) }, : downstairs
42401 Sep 22 23:15:09.946 INFO [0] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }
42402 Sep 22 23:15:09.947 INFO [1] received reconcile message
42403 Sep 22 23:15:09.947 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42404 Sep 22 23:15:09.947 INFO [1] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }
42405 Sep 22 23:15:09.947 INFO [2] received reconcile message
42406 Sep 22 23:15:09.947 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(323), op: ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42407 Sep 22 23:15:09.947 INFO [2] client ExtentReopen { repair_id: ReconciliationId(323), extent_id: 187 }
42408 Sep 22 23:15:09.947 DEBG 323 Reopen extent 187
42409 Sep 22 23:15:09.947 DEBG 323 Reopen extent 187
42410 Sep 22 23:15:09.948 DEBG 323 Reopen extent 187
42411 Sep 22 23:15:09.948 DEBG [2] It's time to notify for 323
42412 Sep 22 23:15:09.949 INFO Completion from [2] id:323 status:true
42413 Sep 22 23:15:09.949 INFO [324/752] Repair commands completed
42414 Sep 22 23:15:09.949 INFO Pop front: ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42415 Sep 22 23:15:09.949 INFO Sent repair work, now wait for resp
42416 Sep 22 23:15:09.949 INFO [0] received reconcile message
42417 Sep 22 23:15:09.949 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42418 Sep 22 23:15:09.949 INFO [0] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42419 Sep 22 23:15:09.949 INFO [1] received reconcile message
42420 Sep 22 23:15:09.949 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42421 Sep 22 23:15:09.949 INFO [1] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42422 Sep 22 23:15:09.949 INFO [2] received reconcile message
42423 Sep 22 23:15:09.949 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(324), op: ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42424 Sep 22 23:15:09.949 INFO [2] client ExtentFlush { repair_id: ReconciliationId(324), extent_id: 149, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42425 Sep 22 23:15:09.949 DEBG 324 Flush extent 149 with f:2 g:2
42426 Sep 22 23:15:09.949 DEBG Flush just extent 149 with f:2 and g:2
42427 Sep 22 23:15:09.949 DEBG [1] It's time to notify for 324
42428 Sep 22 23:15:09.949 INFO Completion from [1] id:324 status:true
42429 Sep 22 23:15:09.949 INFO [325/752] Repair commands completed
42430 Sep 22 23:15:09.949 INFO Pop front: ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }, state: ClientData([New, New, New]) }
42431 Sep 22 23:15:09.949 INFO Sent repair work, now wait for resp
42432 Sep 22 23:15:09.949 INFO [0] received reconcile message
42433 Sep 22 23:15:09.949 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }, state: ClientData([InProgress, New, New]) }, : downstairs
42434 Sep 22 23:15:09.949 INFO [0] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }
42435 Sep 22 23:15:09.949 INFO [1] received reconcile message
42436 Sep 22 23:15:09.949 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42437 Sep 22 23:15:09.949 INFO [1] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }
42438 Sep 22 23:15:09.949 INFO [2] received reconcile message
42439 Sep 22 23:15:09.949 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(325), op: ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42440 Sep 22 23:15:09.949 INFO [2] client ExtentClose { repair_id: ReconciliationId(325), extent_id: 149 }
42441 Sep 22 23:15:09.950 DEBG 325 Close extent 149
42442 Sep 22 23:15:09.950 DEBG 325 Close extent 149
42443 Sep 22 23:15:09.950 DEBG 325 Close extent 149
42444 Sep 22 23:15:09.950 DEBG [2] It's time to notify for 325
42445 Sep 22 23:15:09.951 INFO Completion from [2] id:325 status:true
42446 Sep 22 23:15:09.951 INFO [326/752] Repair commands completed
42447 Sep 22 23:15:09.951 INFO Pop front: ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42448 Sep 22 23:15:09.951 INFO Sent repair work, now wait for resp
42449 Sep 22 23:15:09.951 INFO [0] received reconcile message
42450 Sep 22 23:15:09.951 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42451 Sep 22 23:15:09.951 INFO [0] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42452 Sep 22 23:15:09.951 INFO [0] Sending repair request ReconciliationId(326)
42453 Sep 22 23:15:09.951 INFO [1] received reconcile message
42454 Sep 22 23:15:09.951 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42455 Sep 22 23:15:09.951 INFO [1] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42456 Sep 22 23:15:09.951 INFO [1] No action required ReconciliationId(326)
42457 Sep 22 23:15:09.951 INFO [2] received reconcile message
42458 Sep 22 23:15:09.951 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(326), op: ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42459 Sep 22 23:15:09.951 INFO [2] client ExtentRepair { repair_id: ReconciliationId(326), extent_id: 149, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42460 Sep 22 23:15:09.951 INFO [2] No action required ReconciliationId(326)
42461 Sep 22 23:15:09.951 DEBG 326 Repair extent 149 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42462 Sep 22 23:15:09.951 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/095.copy"
42463 Sep 22 23:15:10.015 INFO accepted connection, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42464 Sep 22 23:15:10.015 TRCE incoming request, uri: /extent/149/files, method: GET, req_id: 03e49669-379c-4c59-8037-a8f792dfd807, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42465 Sep 22 23:15:10.016 INFO request completed, latency_us: 269, response_code: 200, uri: /extent/149/files, method: GET, req_id: 03e49669-379c-4c59-8037-a8f792dfd807, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42466 Sep 22 23:15:10.016 INFO eid:149 Found repair files: ["095", "095.db"]
42467 Sep 22 23:15:10.016 TRCE incoming request, uri: /newextent/149/data, method: GET, req_id: f67b9d05-450b-4499-b8c9-4f93af7250bf, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42468 Sep 22 23:15:10.017 INFO request completed, latency_us: 379, response_code: 200, uri: /newextent/149/data, method: GET, req_id: f67b9d05-450b-4499-b8c9-4f93af7250bf, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42469 Sep 22 23:15:10.022 TRCE incoming request, uri: /newextent/149/db, method: GET, req_id: 2fd5f5b0-3d85-40a2-80eb-6bdd487517f3, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42470 Sep 22 23:15:10.022 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/149/db, method: GET, req_id: 2fd5f5b0-3d85-40a2-80eb-6bdd487517f3, remote_addr: 127.0.0.1:56724, local_addr: 127.0.0.1:46213, task: repair
42471 Sep 22 23:15:10.023 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/095.copy" to "/tmp/downstairs-vrx8aK6L/00/000/095.replace"
42472 Sep 22 23:15:10.023 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42473 Sep 22 23:15:10.025 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/095.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42474 Sep 22 23:15:10.025 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/095"
42475 Sep 22 23:15:10.025 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/095.db"
42476 Sep 22 23:15:10.025 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42477 Sep 22 23:15:10.025 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/095.replace" to "/tmp/downstairs-vrx8aK6L/00/000/095.completed"
42478 Sep 22 23:15:10.025 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42479 Sep 22 23:15:10.025 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42480 Sep 22 23:15:10.025 DEBG [0] It's time to notify for 326
42481 Sep 22 23:15:10.025 INFO Completion from [0] id:326 status:true
42482 Sep 22 23:15:10.025 INFO [327/752] Repair commands completed
42483 Sep 22 23:15:10.025 INFO Pop front: ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }, state: ClientData([New, New, New]) }
42484 Sep 22 23:15:10.025 INFO Sent repair work, now wait for resp
42485 Sep 22 23:15:10.025 INFO [0] received reconcile message
42486 Sep 22 23:15:10.025 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }, state: ClientData([InProgress, New, New]) }, : downstairs
42487 Sep 22 23:15:10.026 INFO [0] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }
42488 Sep 22 23:15:10.026 INFO [1] received reconcile message
42489 Sep 22 23:15:10.026 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42490 Sep 22 23:15:10.026 INFO [1] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }
42491 Sep 22 23:15:10.026 INFO [2] received reconcile message
42492 Sep 22 23:15:10.026 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(327), op: ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42493 Sep 22 23:15:10.026 INFO [2] client ExtentReopen { repair_id: ReconciliationId(327), extent_id: 149 }
42494 Sep 22 23:15:10.026 DEBG 327 Reopen extent 149
42495 Sep 22 23:15:10.027 DEBG 327 Reopen extent 149
42496 Sep 22 23:15:10.027 DEBG 327 Reopen extent 149
42497 Sep 22 23:15:10.028 DEBG [2] It's time to notify for 327
42498 Sep 22 23:15:10.028 INFO Completion from [2] id:327 status:true
42499 Sep 22 23:15:10.028 INFO [328/752] Repair commands completed
42500 Sep 22 23:15:10.028 INFO Pop front: ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42501 Sep 22 23:15:10.028 INFO Sent repair work, now wait for resp
42502 Sep 22 23:15:10.028 INFO [0] received reconcile message
42503 Sep 22 23:15:10.028 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42504 Sep 22 23:15:10.028 INFO [0] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42505 Sep 22 23:15:10.028 INFO [1] received reconcile message
42506 Sep 22 23:15:10.028 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42507 Sep 22 23:15:10.028 INFO [1] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42508 Sep 22 23:15:10.028 INFO [2] received reconcile message
42509 Sep 22 23:15:10.028 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(328), op: ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42510 Sep 22 23:15:10.028 INFO [2] client ExtentFlush { repair_id: ReconciliationId(328), extent_id: 160, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42511 Sep 22 23:15:10.028 DEBG 328 Flush extent 160 with f:2 g:2
42512 Sep 22 23:15:10.028 DEBG Flush just extent 160 with f:2 and g:2
42513 Sep 22 23:15:10.028 DEBG [1] It's time to notify for 328
42514 Sep 22 23:15:10.028 INFO Completion from [1] id:328 status:true
42515 Sep 22 23:15:10.028 INFO [329/752] Repair commands completed
42516 Sep 22 23:15:10.028 INFO Pop front: ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }, state: ClientData([New, New, New]) }
42517 Sep 22 23:15:10.028 INFO Sent repair work, now wait for resp
42518 Sep 22 23:15:10.028 INFO [0] received reconcile message
42519 Sep 22 23:15:10.028 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }, state: ClientData([InProgress, New, New]) }, : downstairs
42520 Sep 22 23:15:10.028 INFO [0] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }
42521 Sep 22 23:15:10.029 INFO [1] received reconcile message
42522 Sep 22 23:15:10.029 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42523 Sep 22 23:15:10.029 INFO [1] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }
42524 Sep 22 23:15:10.029 INFO [2] received reconcile message
42525 Sep 22 23:15:10.029 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(329), op: ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42526 Sep 22 23:15:10.029 INFO [2] client ExtentClose { repair_id: ReconciliationId(329), extent_id: 160 }
42527 Sep 22 23:15:10.029 DEBG 329 Close extent 160
42528 Sep 22 23:15:10.029 DEBG 329 Close extent 160
42529 Sep 22 23:15:10.029 DEBG 329 Close extent 160
42530 Sep 22 23:15:10.030 DEBG [2] It's time to notify for 329
42531 Sep 22 23:15:10.030 INFO Completion from [2] id:329 status:true
42532 Sep 22 23:15:10.030 INFO [330/752] Repair commands completed
42533 Sep 22 23:15:10.030 INFO Pop front: ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42534 Sep 22 23:15:10.030 INFO Sent repair work, now wait for resp
42535 Sep 22 23:15:10.030 INFO [0] received reconcile message
42536 Sep 22 23:15:10.030 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42537 Sep 22 23:15:10.030 INFO [0] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42538 Sep 22 23:15:10.030 INFO [0] Sending repair request ReconciliationId(330)
42539 Sep 22 23:15:10.030 INFO [1] received reconcile message
42540 Sep 22 23:15:10.030 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42541 Sep 22 23:15:10.030 INFO [1] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42542 Sep 22 23:15:10.030 INFO [1] No action required ReconciliationId(330)
42543 Sep 22 23:15:10.030 INFO [2] received reconcile message
42544 Sep 22 23:15:10.030 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(330), op: ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42545 Sep 22 23:15:10.030 INFO [2] client ExtentRepair { repair_id: ReconciliationId(330), extent_id: 160, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42546 Sep 22 23:15:10.030 INFO [2] No action required ReconciliationId(330)
42547 Sep 22 23:15:10.030 DEBG 330 Repair extent 160 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42548 Sep 22 23:15:10.030 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A0.copy"
42549 Sep 22 23:15:10.094 INFO accepted connection, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42550 Sep 22 23:15:10.094 TRCE incoming request, uri: /extent/160/files, method: GET, req_id: 9e57b6ea-ac05-4977-989a-a3caf010aaaf, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42551 Sep 22 23:15:10.095 INFO request completed, latency_us: 241, response_code: 200, uri: /extent/160/files, method: GET, req_id: 9e57b6ea-ac05-4977-989a-a3caf010aaaf, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42552 Sep 22 23:15:10.095 INFO eid:160 Found repair files: ["0A0", "0A0.db"]
42553 Sep 22 23:15:10.095 TRCE incoming request, uri: /newextent/160/data, method: GET, req_id: 6783773b-67e2-4387-b959-004d777537e6, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42554 Sep 22 23:15:10.096 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/160/data, method: GET, req_id: 6783773b-67e2-4387-b959-004d777537e6, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42555 Sep 22 23:15:10.101 TRCE incoming request, uri: /newextent/160/db, method: GET, req_id: 97e1a3fa-86c3-47d7-8408-936e57550e2f, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42556 Sep 22 23:15:10.101 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/160/db, method: GET, req_id: 97e1a3fa-86c3-47d7-8408-936e57550e2f, remote_addr: 127.0.0.1:46651, local_addr: 127.0.0.1:46213, task: repair
42557 Sep 22 23:15:10.102 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A0.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A0.replace"
42558 Sep 22 23:15:10.102 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42559 Sep 22 23:15:10.103 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A0.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42560 Sep 22 23:15:10.103 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A0"
42561 Sep 22 23:15:10.103 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A0.db"
42562 Sep 22 23:15:10.103 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42563 Sep 22 23:15:10.103 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A0.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A0.completed"
42564 Sep 22 23:15:10.103 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42565 Sep 22 23:15:10.103 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42566 Sep 22 23:15:10.104 DEBG [0] It's time to notify for 330
42567 Sep 22 23:15:10.104 INFO Completion from [0] id:330 status:true
42568 Sep 22 23:15:10.104 INFO [331/752] Repair commands completed
42569 Sep 22 23:15:10.104 INFO Pop front: ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }, state: ClientData([New, New, New]) }
42570 Sep 22 23:15:10.104 INFO Sent repair work, now wait for resp
42571 Sep 22 23:15:10.104 INFO [0] received reconcile message
42572 Sep 22 23:15:10.104 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }, state: ClientData([InProgress, New, New]) }, : downstairs
42573 Sep 22 23:15:10.104 INFO [0] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }
42574 Sep 22 23:15:10.104 INFO [1] received reconcile message
42575 Sep 22 23:15:10.104 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42576 Sep 22 23:15:10.104 INFO [1] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }
42577 Sep 22 23:15:10.104 INFO [2] received reconcile message
42578 Sep 22 23:15:10.104 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(331), op: ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42579 Sep 22 23:15:10.104 INFO [2] client ExtentReopen { repair_id: ReconciliationId(331), extent_id: 160 }
42580 Sep 22 23:15:10.104 DEBG 331 Reopen extent 160
42581 Sep 22 23:15:10.105 DEBG 331 Reopen extent 160
42582 Sep 22 23:15:10.106 DEBG 331 Reopen extent 160
42583 Sep 22 23:15:10.106 DEBG [2] It's time to notify for 331
42584 Sep 22 23:15:10.106 INFO Completion from [2] id:331 status:true
42585 Sep 22 23:15:10.106 INFO [332/752] Repair commands completed
42586 Sep 22 23:15:10.106 INFO Pop front: ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42587 Sep 22 23:15:10.106 INFO Sent repair work, now wait for resp
42588 Sep 22 23:15:10.106 INFO [0] received reconcile message
42589 Sep 22 23:15:10.106 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42590 Sep 22 23:15:10.106 INFO [0] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42591 Sep 22 23:15:10.106 INFO [1] received reconcile message
42592 Sep 22 23:15:10.106 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42593 Sep 22 23:15:10.106 INFO [1] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42594 Sep 22 23:15:10.106 INFO [2] received reconcile message
42595 Sep 22 23:15:10.106 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(332), op: ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42596 Sep 22 23:15:10.106 INFO [2] client ExtentFlush { repair_id: ReconciliationId(332), extent_id: 122, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42597 Sep 22 23:15:10.107 DEBG 332 Flush extent 122 with f:2 g:2
42598 Sep 22 23:15:10.107 DEBG Flush just extent 122 with f:2 and g:2
42599 Sep 22 23:15:10.107 DEBG [1] It's time to notify for 332
42600 Sep 22 23:15:10.107 INFO Completion from [1] id:332 status:true
42601 Sep 22 23:15:10.107 INFO [333/752] Repair commands completed
42602 Sep 22 23:15:10.107 INFO Pop front: ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }, state: ClientData([New, New, New]) }
42603 Sep 22 23:15:10.107 INFO Sent repair work, now wait for resp
42604 Sep 22 23:15:10.107 INFO [0] received reconcile message
42605 Sep 22 23:15:10.107 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }, state: ClientData([InProgress, New, New]) }, : downstairs
42606 Sep 22 23:15:10.107 INFO [0] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }
42607 Sep 22 23:15:10.107 INFO [1] received reconcile message
42608 Sep 22 23:15:10.107 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42609 Sep 22 23:15:10.107 INFO [1] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }
42610 Sep 22 23:15:10.107 INFO [2] received reconcile message
42611 Sep 22 23:15:10.107 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(333), op: ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42612 Sep 22 23:15:10.107 INFO [2] client ExtentClose { repair_id: ReconciliationId(333), extent_id: 122 }
42613 Sep 22 23:15:10.107 DEBG 333 Close extent 122
42614 Sep 22 23:15:10.107 DEBG 333 Close extent 122
42615 Sep 22 23:15:10.108 DEBG 333 Close extent 122
42616 Sep 22 23:15:10.108 DEBG [2] It's time to notify for 333
42617 Sep 22 23:15:10.108 INFO Completion from [2] id:333 status:true
42618 Sep 22 23:15:10.108 INFO [334/752] Repair commands completed
42619 Sep 22 23:15:10.108 INFO Pop front: ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42620 Sep 22 23:15:10.108 INFO Sent repair work, now wait for resp
42621 Sep 22 23:15:10.108 INFO [0] received reconcile message
42622 Sep 22 23:15:10.108 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42623 Sep 22 23:15:10.108 INFO [0] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42624 Sep 22 23:15:10.108 INFO [0] Sending repair request ReconciliationId(334)
42625 Sep 22 23:15:10.108 INFO [1] received reconcile message
42626 Sep 22 23:15:10.108 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42627 Sep 22 23:15:10.108 INFO [1] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42628 Sep 22 23:15:10.108 INFO [1] No action required ReconciliationId(334)
42629 Sep 22 23:15:10.108 INFO [2] received reconcile message
42630 Sep 22 23:15:10.108 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(334), op: ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42631 Sep 22 23:15:10.108 INFO [2] client ExtentRepair { repair_id: ReconciliationId(334), extent_id: 122, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42632 Sep 22 23:15:10.108 INFO [2] No action required ReconciliationId(334)
42633 Sep 22 23:15:10.109 DEBG 334 Repair extent 122 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42634 Sep 22 23:15:10.109 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07A.copy"
42635 Sep 22 23:15:10.172 INFO accepted connection, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42636 Sep 22 23:15:10.173 TRCE incoming request, uri: /extent/122/files, method: GET, req_id: 49cdad64-0775-4fd3-93a7-1a1d5bc005af, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42637 Sep 22 23:15:10.173 INFO request completed, latency_us: 272, response_code: 200, uri: /extent/122/files, method: GET, req_id: 49cdad64-0775-4fd3-93a7-1a1d5bc005af, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42638 Sep 22 23:15:10.173 INFO eid:122 Found repair files: ["07A", "07A.db"]
42639 Sep 22 23:15:10.174 TRCE incoming request, uri: /newextent/122/data, method: GET, req_id: a9062b19-25a5-401e-9ec1-c3363bfc7428, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42640 Sep 22 23:15:10.174 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/122/data, method: GET, req_id: a9062b19-25a5-401e-9ec1-c3363bfc7428, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42641 Sep 22 23:15:10.179 TRCE incoming request, uri: /newextent/122/db, method: GET, req_id: ef91452f-f889-4537-9240-2686e7dc3f9a, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42642 Sep 22 23:15:10.179 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/122/db, method: GET, req_id: ef91452f-f889-4537-9240-2686e7dc3f9a, remote_addr: 127.0.0.1:52236, local_addr: 127.0.0.1:46213, task: repair
42643 Sep 22 23:15:10.181 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07A.replace"
42644 Sep 22 23:15:10.181 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42645 Sep 22 23:15:10.182 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42646 Sep 22 23:15:10.182 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07A"
42647 Sep 22 23:15:10.182 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07A.db"
42648 Sep 22 23:15:10.182 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42649 Sep 22 23:15:10.182 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07A.completed"
42650 Sep 22 23:15:10.182 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42651 Sep 22 23:15:10.183 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42652 Sep 22 23:15:10.183 DEBG [0] It's time to notify for 334
42653 Sep 22 23:15:10.183 INFO Completion from [0] id:334 status:true
42654 Sep 22 23:15:10.183 INFO [335/752] Repair commands completed
42655 Sep 22 23:15:10.183 INFO Pop front: ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }, state: ClientData([New, New, New]) }
42656 Sep 22 23:15:10.183 INFO Sent repair work, now wait for resp
42657 Sep 22 23:15:10.183 INFO [0] received reconcile message
42658 Sep 22 23:15:10.183 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }, state: ClientData([InProgress, New, New]) }, : downstairs
42659 Sep 22 23:15:10.183 INFO [0] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }
42660 Sep 22 23:15:10.183 INFO [1] received reconcile message
42661 Sep 22 23:15:10.183 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42662 Sep 22 23:15:10.183 INFO [1] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }
42663 Sep 22 23:15:10.183 INFO [2] received reconcile message
42664 Sep 22 23:15:10.183 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(335), op: ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42665 Sep 22 23:15:10.183 INFO [2] client ExtentReopen { repair_id: ReconciliationId(335), extent_id: 122 }
42666 Sep 22 23:15:10.183 DEBG 335 Reopen extent 122
42667 Sep 22 23:15:10.184 DEBG 335 Reopen extent 122
42668 Sep 22 23:15:10.185 DEBG 335 Reopen extent 122
42669 Sep 22 23:15:10.185 DEBG [2] It's time to notify for 335
42670 Sep 22 23:15:10.185 INFO Completion from [2] id:335 status:true
42671 Sep 22 23:15:10.185 INFO [336/752] Repair commands completed
42672 Sep 22 23:15:10.185 INFO Pop front: ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42673 Sep 22 23:15:10.185 INFO Sent repair work, now wait for resp
42674 Sep 22 23:15:10.185 INFO [0] received reconcile message
42675 Sep 22 23:15:10.185 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42676 Sep 22 23:15:10.185 INFO [0] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42677 Sep 22 23:15:10.186 INFO [1] received reconcile message
42678 Sep 22 23:15:10.186 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42679 Sep 22 23:15:10.186 INFO [1] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42680 Sep 22 23:15:10.186 INFO [2] received reconcile message
42681 Sep 22 23:15:10.186 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(336), op: ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42682 Sep 22 23:15:10.186 INFO [2] client ExtentFlush { repair_id: ReconciliationId(336), extent_id: 77, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42683 Sep 22 23:15:10.186 DEBG 336 Flush extent 77 with f:2 g:2
42684 Sep 22 23:15:10.186 DEBG Flush just extent 77 with f:2 and g:2
42685 Sep 22 23:15:10.186 DEBG [1] It's time to notify for 336
42686 Sep 22 23:15:10.186 INFO Completion from [1] id:336 status:true
42687 Sep 22 23:15:10.186 INFO [337/752] Repair commands completed
42688 Sep 22 23:15:10.186 INFO Pop front: ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }, state: ClientData([New, New, New]) }
42689 Sep 22 23:15:10.186 INFO Sent repair work, now wait for resp
42690 Sep 22 23:15:10.186 INFO [0] received reconcile message
42691 Sep 22 23:15:10.186 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }, state: ClientData([InProgress, New, New]) }, : downstairs
42692 Sep 22 23:15:10.186 INFO [0] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }
42693 Sep 22 23:15:10.186 INFO [1] received reconcile message
42694 Sep 22 23:15:10.186 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42695 Sep 22 23:15:10.186 INFO [1] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }
42696 Sep 22 23:15:10.186 INFO [2] received reconcile message
42697 Sep 22 23:15:10.186 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(337), op: ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42698 Sep 22 23:15:10.186 INFO [2] client ExtentClose { repair_id: ReconciliationId(337), extent_id: 77 }
42699 Sep 22 23:15:10.186 DEBG 337 Close extent 77
42700 Sep 22 23:15:10.187 DEBG 337 Close extent 77
42701 Sep 22 23:15:10.187 DEBG 337 Close extent 77
42702 Sep 22 23:15:10.187 DEBG [2] It's time to notify for 337
42703 Sep 22 23:15:10.187 INFO Completion from [2] id:337 status:true
42704 Sep 22 23:15:10.187 INFO [338/752] Repair commands completed
42705 Sep 22 23:15:10.187 INFO Pop front: ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42706 Sep 22 23:15:10.187 INFO Sent repair work, now wait for resp
42707 Sep 22 23:15:10.187 INFO [0] received reconcile message
42708 Sep 22 23:15:10.187 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42709 Sep 22 23:15:10.187 INFO [0] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42710 Sep 22 23:15:10.187 INFO [0] Sending repair request ReconciliationId(338)
42711 Sep 22 23:15:10.188 INFO [1] received reconcile message
42712 Sep 22 23:15:10.188 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42713 Sep 22 23:15:10.188 INFO [1] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42714 Sep 22 23:15:10.188 INFO [1] No action required ReconciliationId(338)
42715 Sep 22 23:15:10.188 INFO [2] received reconcile message
42716 Sep 22 23:15:10.188 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(338), op: ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42717 Sep 22 23:15:10.188 INFO [2] client ExtentRepair { repair_id: ReconciliationId(338), extent_id: 77, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42718 Sep 22 23:15:10.188 INFO [2] No action required ReconciliationId(338)
42719 Sep 22 23:15:10.188 DEBG 338 Repair extent 77 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42720 Sep 22 23:15:10.188 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04D.copy"
42721 Sep 22 23:15:10.253 INFO accepted connection, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42722 Sep 22 23:15:10.254 TRCE incoming request, uri: /extent/77/files, method: GET, req_id: 42b1156e-e643-4cae-abf1-4fc4498bcc15, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42723 Sep 22 23:15:10.254 INFO request completed, latency_us: 285, response_code: 200, uri: /extent/77/files, method: GET, req_id: 42b1156e-e643-4cae-abf1-4fc4498bcc15, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42724 Sep 22 23:15:10.254 INFO eid:77 Found repair files: ["04D", "04D.db"]
42725 Sep 22 23:15:10.254 TRCE incoming request, uri: /newextent/77/data, method: GET, req_id: e33f287b-c026-4d75-bb86-9ccdc0c6bf95, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42726 Sep 22 23:15:10.255 INFO request completed, latency_us: 350, response_code: 200, uri: /newextent/77/data, method: GET, req_id: e33f287b-c026-4d75-bb86-9ccdc0c6bf95, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42727 Sep 22 23:15:10.260 TRCE incoming request, uri: /newextent/77/db, method: GET, req_id: 3359a5d9-b79a-4f36-a126-6becc0f33873, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42728 Sep 22 23:15:10.260 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/77/db, method: GET, req_id: 3359a5d9-b79a-4f36-a126-6becc0f33873, remote_addr: 127.0.0.1:44518, local_addr: 127.0.0.1:46213, task: repair
42729 Sep 22 23:15:10.261 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04D.replace"
42730 Sep 22 23:15:10.261 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42731 Sep 22 23:15:10.262 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42732 Sep 22 23:15:10.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04D"
42733 Sep 22 23:15:10.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04D.db"
42734 Sep 22 23:15:10.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42735 Sep 22 23:15:10.263 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04D.completed"
42736 Sep 22 23:15:10.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42737 Sep 22 23:15:10.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42738 Sep 22 23:15:10.263 DEBG [0] It's time to notify for 338
42739 Sep 22 23:15:10.263 INFO Completion from [0] id:338 status:true
42740 Sep 22 23:15:10.263 INFO [339/752] Repair commands completed
42741 Sep 22 23:15:10.263 INFO Pop front: ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }, state: ClientData([New, New, New]) }
42742 Sep 22 23:15:10.263 INFO Sent repair work, now wait for resp
42743 Sep 22 23:15:10.263 INFO [0] received reconcile message
42744 Sep 22 23:15:10.263 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }, state: ClientData([InProgress, New, New]) }, : downstairs
42745 Sep 22 23:15:10.263 INFO [0] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }
42746 Sep 22 23:15:10.264 INFO [1] received reconcile message
42747 Sep 22 23:15:10.264 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42748 Sep 22 23:15:10.264 INFO [1] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }
42749 Sep 22 23:15:10.264 INFO [2] received reconcile message
42750 Sep 22 23:15:10.264 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(339), op: ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42751 Sep 22 23:15:10.264 INFO [2] client ExtentReopen { repair_id: ReconciliationId(339), extent_id: 77 }
42752 Sep 22 23:15:10.264 DEBG 339 Reopen extent 77
42753 Sep 22 23:15:10.265 DEBG 339 Reopen extent 77
42754 Sep 22 23:15:10.265 DEBG 339 Reopen extent 77
42755 Sep 22 23:15:10.266 DEBG [2] It's time to notify for 339
42756 Sep 22 23:15:10.266 INFO Completion from [2] id:339 status:true
42757 Sep 22 23:15:10.266 INFO [340/752] Repair commands completed
42758 Sep 22 23:15:10.266 INFO Pop front: ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42759 Sep 22 23:15:10.266 INFO Sent repair work, now wait for resp
42760 Sep 22 23:15:10.266 INFO [0] received reconcile message
42761 Sep 22 23:15:10.266 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42762 Sep 22 23:15:10.266 INFO [0] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42763 Sep 22 23:15:10.266 INFO [1] received reconcile message
42764 Sep 22 23:15:10.266 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42765 Sep 22 23:15:10.266 INFO [1] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42766 Sep 22 23:15:10.266 INFO [2] received reconcile message
42767 Sep 22 23:15:10.266 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(340), op: ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42768 Sep 22 23:15:10.266 INFO [2] client ExtentFlush { repair_id: ReconciliationId(340), extent_id: 136, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42769 Sep 22 23:15:10.266 DEBG 340 Flush extent 136 with f:2 g:2
42770 Sep 22 23:15:10.266 DEBG Flush just extent 136 with f:2 and g:2
42771 Sep 22 23:15:10.266 DEBG [1] It's time to notify for 340
42772 Sep 22 23:15:10.266 INFO Completion from [1] id:340 status:true
42773 Sep 22 23:15:10.266 INFO [341/752] Repair commands completed
42774 Sep 22 23:15:10.266 INFO Pop front: ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }, state: ClientData([New, New, New]) }
42775 Sep 22 23:15:10.266 INFO Sent repair work, now wait for resp
42776 Sep 22 23:15:10.266 INFO [0] received reconcile message
42777 Sep 22 23:15:10.266 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }, state: ClientData([InProgress, New, New]) }, : downstairs
42778 Sep 22 23:15:10.266 INFO [0] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }
42779 Sep 22 23:15:10.267 INFO [1] received reconcile message
42780 Sep 22 23:15:10.267 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42781 Sep 22 23:15:10.267 INFO [1] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }
42782 Sep 22 23:15:10.267 INFO [2] received reconcile message
42783 Sep 22 23:15:10.267 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(341), op: ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42784 Sep 22 23:15:10.267 INFO [2] client ExtentClose { repair_id: ReconciliationId(341), extent_id: 136 }
42785 Sep 22 23:15:10.267 DEBG 341 Close extent 136
42786 Sep 22 23:15:10.267 DEBG 341 Close extent 136
42787 Sep 22 23:15:10.267 DEBG 341 Close extent 136
42788 Sep 22 23:15:10.268 DEBG [2] It's time to notify for 341
42789 Sep 22 23:15:10.268 INFO Completion from [2] id:341 status:true
42790 Sep 22 23:15:10.268 INFO [342/752] Repair commands completed
42791 Sep 22 23:15:10.268 INFO Pop front: ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42792 Sep 22 23:15:10.268 INFO Sent repair work, now wait for resp
42793 Sep 22 23:15:10.268 INFO [0] received reconcile message
42794 Sep 22 23:15:10.268 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42795 Sep 22 23:15:10.268 INFO [0] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42796 Sep 22 23:15:10.268 INFO [0] Sending repair request ReconciliationId(342)
42797 Sep 22 23:15:10.268 INFO [1] received reconcile message
42798 Sep 22 23:15:10.268 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42799 Sep 22 23:15:10.268 INFO [1] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42800 Sep 22 23:15:10.268 INFO [1] No action required ReconciliationId(342)
42801 Sep 22 23:15:10.268 INFO [2] received reconcile message
42802 Sep 22 23:15:10.268 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(342), op: ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42803 Sep 22 23:15:10.268 INFO [2] client ExtentRepair { repair_id: ReconciliationId(342), extent_id: 136, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42804 Sep 22 23:15:10.268 INFO [2] No action required ReconciliationId(342)
42805 Sep 22 23:15:10.268 DEBG 342 Repair extent 136 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42806 Sep 22 23:15:10.268 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/088.copy"
42807 Sep 22 23:15:10.271 DEBG up_ds_listen was notified
42808 Sep 22 23:15:10.271 DEBG up_ds_listen process 1081
42809 Sep 22 23:15:10.271 DEBG [A] ack job 1081:82, : downstairs
42810 Sep 22 23:15:10.271 DEBG up_ds_listen checked 1 jobs, back to waiting
42811 Sep 22 23:15:10.278 DEBG IO Flush 1083 has deps [JobId(1082), JobId(1081)]
42812 Sep 22 23:15:10.278 INFO [lossy] sleeping 1 second
42813 Sep 22 23:15:10.334 INFO accepted connection, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42814 Sep 22 23:15:10.334 TRCE incoming request, uri: /extent/136/files, method: GET, req_id: 3a5634e7-c00d-4520-8b85-67f909c1934c, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42815 Sep 22 23:15:10.334 INFO request completed, latency_us: 260, response_code: 200, uri: /extent/136/files, method: GET, req_id: 3a5634e7-c00d-4520-8b85-67f909c1934c, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42816 Sep 22 23:15:10.335 INFO eid:136 Found repair files: ["088", "088.db"]
42817 Sep 22 23:15:10.335 TRCE incoming request, uri: /newextent/136/data, method: GET, req_id: 243f0fba-3f9c-4d67-86db-13aee6bc2efc, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42818 Sep 22 23:15:10.335 INFO request completed, latency_us: 365, response_code: 200, uri: /newextent/136/data, method: GET, req_id: 243f0fba-3f9c-4d67-86db-13aee6bc2efc, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42819 Sep 22 23:15:10.340 TRCE incoming request, uri: /newextent/136/db, method: GET, req_id: d49eaf16-cb43-4ede-ac66-874f208665d0, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42820 Sep 22 23:15:10.341 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/136/db, method: GET, req_id: d49eaf16-cb43-4ede-ac66-874f208665d0, remote_addr: 127.0.0.1:42290, local_addr: 127.0.0.1:46213, task: repair
42821 Sep 22 23:15:10.342 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/088.copy" to "/tmp/downstairs-vrx8aK6L/00/000/088.replace"
42822 Sep 22 23:15:10.342 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42823 Sep 22 23:15:10.343 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/088.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42824 Sep 22 23:15:10.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/088"
42825 Sep 22 23:15:10.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/088.db"
42826 Sep 22 23:15:10.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42827 Sep 22 23:15:10.343 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/088.replace" to "/tmp/downstairs-vrx8aK6L/00/000/088.completed"
42828 Sep 22 23:15:10.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42829 Sep 22 23:15:10.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42830 Sep 22 23:15:10.343 DEBG [0] It's time to notify for 342
42831 Sep 22 23:15:10.344 INFO Completion from [0] id:342 status:true
42832 Sep 22 23:15:10.344 INFO [343/752] Repair commands completed
42833 Sep 22 23:15:10.344 INFO Pop front: ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }, state: ClientData([New, New, New]) }
42834 Sep 22 23:15:10.344 INFO Sent repair work, now wait for resp
42835 Sep 22 23:15:10.344 INFO [0] received reconcile message
42836 Sep 22 23:15:10.344 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }, state: ClientData([InProgress, New, New]) }, : downstairs
42837 Sep 22 23:15:10.344 INFO [0] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }
42838 Sep 22 23:15:10.344 INFO [1] received reconcile message
42839 Sep 22 23:15:10.344 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42840 Sep 22 23:15:10.344 INFO [1] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }
42841 Sep 22 23:15:10.344 INFO [2] received reconcile message
42842 Sep 22 23:15:10.344 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(343), op: ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42843 Sep 22 23:15:10.344 INFO [2] client ExtentReopen { repair_id: ReconciliationId(343), extent_id: 136 }
42844 Sep 22 23:15:10.344 DEBG 343 Reopen extent 136
42845 Sep 22 23:15:10.345 DEBG 343 Reopen extent 136
42846 Sep 22 23:15:10.345 DEBG 343 Reopen extent 136
42847 Sep 22 23:15:10.346 DEBG [2] It's time to notify for 343
42848 Sep 22 23:15:10.346 INFO Completion from [2] id:343 status:true
42849 Sep 22 23:15:10.346 INFO [344/752] Repair commands completed
42850 Sep 22 23:15:10.346 INFO Pop front: ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42851 Sep 22 23:15:10.346 INFO Sent repair work, now wait for resp
42852 Sep 22 23:15:10.346 INFO [0] received reconcile message
42853 Sep 22 23:15:10.346 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42854 Sep 22 23:15:10.346 INFO [0] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42855 Sep 22 23:15:10.346 INFO [1] received reconcile message
42856 Sep 22 23:15:10.346 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42857 Sep 22 23:15:10.346 INFO [1] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42858 Sep 22 23:15:10.346 INFO [2] received reconcile message
42859 Sep 22 23:15:10.346 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(344), op: ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42860 Sep 22 23:15:10.346 INFO [2] client ExtentFlush { repair_id: ReconciliationId(344), extent_id: 176, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42861 Sep 22 23:15:10.346 DEBG 344 Flush extent 176 with f:2 g:2
42862 Sep 22 23:15:10.346 DEBG Flush just extent 176 with f:2 and g:2
42863 Sep 22 23:15:10.347 DEBG [1] It's time to notify for 344
42864 Sep 22 23:15:10.347 INFO Completion from [1] id:344 status:true
42865 Sep 22 23:15:10.347 INFO [345/752] Repair commands completed
42866 Sep 22 23:15:10.347 INFO Pop front: ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }, state: ClientData([New, New, New]) }
42867 Sep 22 23:15:10.347 INFO Sent repair work, now wait for resp
42868 Sep 22 23:15:10.347 INFO [0] received reconcile message
42869 Sep 22 23:15:10.347 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }, state: ClientData([InProgress, New, New]) }, : downstairs
42870 Sep 22 23:15:10.347 INFO [0] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }
42871 Sep 22 23:15:10.347 INFO [1] received reconcile message
42872 Sep 22 23:15:10.347 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42873 Sep 22 23:15:10.347 INFO [1] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }
42874 Sep 22 23:15:10.347 INFO [2] received reconcile message
42875 Sep 22 23:15:10.347 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(345), op: ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42876 Sep 22 23:15:10.347 INFO [2] client ExtentClose { repair_id: ReconciliationId(345), extent_id: 176 }
42877 Sep 22 23:15:10.347 DEBG 345 Close extent 176
42878 Sep 22 23:15:10.347 DEBG 345 Close extent 176
42879 Sep 22 23:15:10.347 DEBG 345 Close extent 176
42880 Sep 22 23:15:10.348 DEBG [2] It's time to notify for 345
42881 Sep 22 23:15:10.348 INFO Completion from [2] id:345 status:true
42882 Sep 22 23:15:10.348 INFO [346/752] Repair commands completed
42883 Sep 22 23:15:10.348 INFO Pop front: ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42884 Sep 22 23:15:10.348 INFO Sent repair work, now wait for resp
42885 Sep 22 23:15:10.348 INFO [0] received reconcile message
42886 Sep 22 23:15:10.348 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42887 Sep 22 23:15:10.348 INFO [0] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42888 Sep 22 23:15:10.348 INFO [0] Sending repair request ReconciliationId(346)
42889 Sep 22 23:15:10.348 INFO [1] received reconcile message
42890 Sep 22 23:15:10.348 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42891 Sep 22 23:15:10.348 INFO [1] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42892 Sep 22 23:15:10.348 INFO [1] No action required ReconciliationId(346)
42893 Sep 22 23:15:10.348 INFO [2] received reconcile message
42894 Sep 22 23:15:10.348 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(346), op: ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42895 Sep 22 23:15:10.348 INFO [2] client ExtentRepair { repair_id: ReconciliationId(346), extent_id: 176, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42896 Sep 22 23:15:10.348 INFO [2] No action required ReconciliationId(346)
42897 Sep 22 23:15:10.348 DEBG 346 Repair extent 176 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42898 Sep 22 23:15:10.348 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B0.copy"
42899 Sep 22 23:15:10.412 INFO accepted connection, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42900 Sep 22 23:15:10.412 TRCE incoming request, uri: /extent/176/files, method: GET, req_id: fb775b24-9680-4ca8-8975-4843c619161d, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42901 Sep 22 23:15:10.412 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/176/files, method: GET, req_id: fb775b24-9680-4ca8-8975-4843c619161d, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42902 Sep 22 23:15:10.412 INFO eid:176 Found repair files: ["0B0", "0B0.db"]
42903 Sep 22 23:15:10.413 TRCE incoming request, uri: /newextent/176/data, method: GET, req_id: 32907a1f-247c-414a-b1f9-d59f0fb65c44, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42904 Sep 22 23:15:10.413 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/176/data, method: GET, req_id: 32907a1f-247c-414a-b1f9-d59f0fb65c44, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42905 Sep 22 23:15:10.418 TRCE incoming request, uri: /newextent/176/db, method: GET, req_id: aa3c9787-5d3c-4696-936a-4989710bcd12, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42906 Sep 22 23:15:10.418 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/176/db, method: GET, req_id: aa3c9787-5d3c-4696-936a-4989710bcd12, remote_addr: 127.0.0.1:43530, local_addr: 127.0.0.1:46213, task: repair
42907 Sep 22 23:15:10.419 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B0.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B0.replace"
42908 Sep 22 23:15:10.420 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42909 Sep 22 23:15:10.420 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B0.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42910 Sep 22 23:15:10.420 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B0"
42911 Sep 22 23:15:10.421 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B0.db"
42912 Sep 22 23:15:10.421 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42913 Sep 22 23:15:10.421 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B0.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B0.completed"
42914 Sep 22 23:15:10.421 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42915 Sep 22 23:15:10.421 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42916 Sep 22 23:15:10.421 DEBG [0] It's time to notify for 346
42917 Sep 22 23:15:10.421 INFO Completion from [0] id:346 status:true
42918 Sep 22 23:15:10.421 INFO [347/752] Repair commands completed
42919 Sep 22 23:15:10.421 INFO Pop front: ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }, state: ClientData([New, New, New]) }
42920 Sep 22 23:15:10.421 INFO Sent repair work, now wait for resp
42921 Sep 22 23:15:10.421 INFO [0] received reconcile message
42922 Sep 22 23:15:10.421 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }, state: ClientData([InProgress, New, New]) }, : downstairs
42923 Sep 22 23:15:10.421 INFO [0] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }
42924 Sep 22 23:15:10.421 INFO [1] received reconcile message
42925 Sep 22 23:15:10.421 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42926 Sep 22 23:15:10.421 INFO [1] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }
42927 Sep 22 23:15:10.421 INFO [2] received reconcile message
42928 Sep 22 23:15:10.421 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(347), op: ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42929 Sep 22 23:15:10.421 INFO [2] client ExtentReopen { repair_id: ReconciliationId(347), extent_id: 176 }
42930 Sep 22 23:15:10.421 DEBG 347 Reopen extent 176
42931 Sep 22 23:15:10.422 DEBG 347 Reopen extent 176
42932 Sep 22 23:15:10.422 DEBG 347 Reopen extent 176
42933 Sep 22 23:15:10.423 DEBG [2] It's time to notify for 347
42934 Sep 22 23:15:10.423 INFO Completion from [2] id:347 status:true
42935 Sep 22 23:15:10.423 INFO [348/752] Repair commands completed
42936 Sep 22 23:15:10.423 INFO Pop front: ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
42937 Sep 22 23:15:10.423 INFO Sent repair work, now wait for resp
42938 Sep 22 23:15:10.423 INFO [0] received reconcile message
42939 Sep 22 23:15:10.423 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
42940 Sep 22 23:15:10.423 INFO [0] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42941 Sep 22 23:15:10.423 INFO [1] received reconcile message
42942 Sep 22 23:15:10.423 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
42943 Sep 22 23:15:10.423 INFO [1] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42944 Sep 22 23:15:10.423 INFO [2] received reconcile message
42945 Sep 22 23:15:10.423 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(348), op: ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
42946 Sep 22 23:15:10.423 INFO [2] client ExtentFlush { repair_id: ReconciliationId(348), extent_id: 125, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
42947 Sep 22 23:15:10.423 DEBG 348 Flush extent 125 with f:2 g:2
42948 Sep 22 23:15:10.423 DEBG Flush just extent 125 with f:2 and g:2
42949 Sep 22 23:15:10.424 DEBG [1] It's time to notify for 348
42950 Sep 22 23:15:10.424 INFO Completion from [1] id:348 status:true
42951 Sep 22 23:15:10.424 INFO [349/752] Repair commands completed
42952 Sep 22 23:15:10.424 INFO Pop front: ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }, state: ClientData([New, New, New]) }
42953 Sep 22 23:15:10.424 INFO Sent repair work, now wait for resp
42954 Sep 22 23:15:10.424 INFO [0] received reconcile message
42955 Sep 22 23:15:10.424 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }, state: ClientData([InProgress, New, New]) }, : downstairs
42956 Sep 22 23:15:10.424 INFO [0] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }
42957 Sep 22 23:15:10.424 INFO [1] received reconcile message
42958 Sep 22 23:15:10.424 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42959 Sep 22 23:15:10.424 INFO [1] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }
42960 Sep 22 23:15:10.424 INFO [2] received reconcile message
42961 Sep 22 23:15:10.424 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(349), op: ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
42962 Sep 22 23:15:10.424 INFO [2] client ExtentClose { repair_id: ReconciliationId(349), extent_id: 125 }
42963 Sep 22 23:15:10.424 DEBG 349 Close extent 125
42964 Sep 22 23:15:10.424 DEBG 349 Close extent 125
42965 Sep 22 23:15:10.425 DEBG 349 Close extent 125
42966 Sep 22 23:15:10.425 DEBG [2] It's time to notify for 349
42967 Sep 22 23:15:10.425 INFO Completion from [2] id:349 status:true
42968 Sep 22 23:15:10.425 INFO [350/752] Repair commands completed
42969 Sep 22 23:15:10.425 INFO Pop front: ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
42970 Sep 22 23:15:10.425 INFO Sent repair work, now wait for resp
42971 Sep 22 23:15:10.425 INFO [0] received reconcile message
42972 Sep 22 23:15:10.425 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
42973 Sep 22 23:15:10.425 INFO [0] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42974 Sep 22 23:15:10.425 INFO [0] Sending repair request ReconciliationId(350)
42975 Sep 22 23:15:10.425 INFO [1] received reconcile message
42976 Sep 22 23:15:10.425 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
42977 Sep 22 23:15:10.425 INFO [1] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42978 Sep 22 23:15:10.425 INFO [1] No action required ReconciliationId(350)
42979 Sep 22 23:15:10.425 INFO [2] received reconcile message
42980 Sep 22 23:15:10.425 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(350), op: ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
42981 Sep 22 23:15:10.425 INFO [2] client ExtentRepair { repair_id: ReconciliationId(350), extent_id: 125, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
42982 Sep 22 23:15:10.425 INFO [2] No action required ReconciliationId(350)
42983 Sep 22 23:15:10.425 DEBG 350 Repair extent 125 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
42984 Sep 22 23:15:10.425 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07D.copy"
42985 Sep 22 23:15:10.490 INFO accepted connection, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42986 Sep 22 23:15:10.490 TRCE incoming request, uri: /extent/125/files, method: GET, req_id: d5b88c7e-704a-499f-b964-1a5cee27bb60, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42987 Sep 22 23:15:10.490 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/125/files, method: GET, req_id: d5b88c7e-704a-499f-b964-1a5cee27bb60, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42988 Sep 22 23:15:10.491 INFO eid:125 Found repair files: ["07D", "07D.db"]
42989 Sep 22 23:15:10.491 TRCE incoming request, uri: /newextent/125/data, method: GET, req_id: 0f1dd2d1-2ceb-4827-807e-72cfdc7edabb, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42990 Sep 22 23:15:10.491 INFO request completed, latency_us: 260, response_code: 200, uri: /newextent/125/data, method: GET, req_id: 0f1dd2d1-2ceb-4827-807e-72cfdc7edabb, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42991 Sep 22 23:15:10.496 TRCE incoming request, uri: /newextent/125/db, method: GET, req_id: e0535523-61ad-446d-8e9b-b3a3176bd6d2, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42992 Sep 22 23:15:10.497 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/125/db, method: GET, req_id: e0535523-61ad-446d-8e9b-b3a3176bd6d2, remote_addr: 127.0.0.1:43501, local_addr: 127.0.0.1:46213, task: repair
42993 Sep 22 23:15:10.498 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07D.replace"
42994 Sep 22 23:15:10.498 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42995 Sep 22 23:15:10.498 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
42996 Sep 22 23:15:10.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07D"
42997 Sep 22 23:15:10.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07D.db"
42998 Sep 22 23:15:10.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
42999 Sep 22 23:15:10.499 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07D.completed"
43000 Sep 22 23:15:10.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43001 Sep 22 23:15:10.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43002 Sep 22 23:15:10.499 DEBG [0] It's time to notify for 350
43003 Sep 22 23:15:10.499 INFO Completion from [0] id:350 status:true
43004 Sep 22 23:15:10.499 INFO [351/752] Repair commands completed
43005 Sep 22 23:15:10.499 INFO Pop front: ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }, state: ClientData([New, New, New]) }
43006 Sep 22 23:15:10.499 INFO Sent repair work, now wait for resp
43007 Sep 22 23:15:10.499 INFO [0] received reconcile message
43008 Sep 22 23:15:10.499 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }, state: ClientData([InProgress, New, New]) }, : downstairs
43009 Sep 22 23:15:10.499 INFO [0] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }
43010 Sep 22 23:15:10.499 INFO [1] received reconcile message
43011 Sep 22 23:15:10.499 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43012 Sep 22 23:15:10.499 INFO [1] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }
43013 Sep 22 23:15:10.500 INFO [2] received reconcile message
43014 Sep 22 23:15:10.500 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(351), op: ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43015 Sep 22 23:15:10.500 INFO [2] client ExtentReopen { repair_id: ReconciliationId(351), extent_id: 125 }
43016 Sep 22 23:15:10.500 DEBG 351 Reopen extent 125
43017 Sep 22 23:15:10.500 DEBG 351 Reopen extent 125
43018 Sep 22 23:15:10.501 DEBG 351 Reopen extent 125
43019 Sep 22 23:15:10.501 DEBG [2] It's time to notify for 351
43020 Sep 22 23:15:10.501 INFO Completion from [2] id:351 status:true
43021 Sep 22 23:15:10.501 INFO [352/752] Repair commands completed
43022 Sep 22 23:15:10.501 INFO Pop front: ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43023 Sep 22 23:15:10.501 INFO Sent repair work, now wait for resp
43024 Sep 22 23:15:10.502 INFO [0] received reconcile message
43025 Sep 22 23:15:10.502 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43026 Sep 22 23:15:10.502 INFO [0] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43027 Sep 22 23:15:10.502 INFO [1] received reconcile message
43028 Sep 22 23:15:10.502 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43029 Sep 22 23:15:10.502 INFO [1] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43030 Sep 22 23:15:10.502 INFO [2] received reconcile message
43031 Sep 22 23:15:10.502 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(352), op: ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43032 Sep 22 23:15:10.502 INFO [2] client ExtentFlush { repair_id: ReconciliationId(352), extent_id: 90, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43033 Sep 22 23:15:10.502 DEBG 352 Flush extent 90 with f:2 g:2
43034 Sep 22 23:15:10.502 DEBG Flush just extent 90 with f:2 and g:2
43035 Sep 22 23:15:10.502 DEBG [1] It's time to notify for 352
43036 Sep 22 23:15:10.502 INFO [lossy] skipping 1082
43037 Sep 22 23:15:10.502 INFO Completion from [1] id:352 status:true
43038 Sep 22 23:15:10.502 INFO [353/752] Repair commands completed
43039 Sep 22 23:15:10.502 INFO [lossy] skipping 1082
43040 Sep 22 23:15:10.502 INFO Pop front: ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }, state: ClientData([New, New, New]) }
43041 Sep 22 23:15:10.502 INFO Sent repair work, now wait for resp
43042 Sep 22 23:15:10.502 INFO [0] received reconcile message
43043 Sep 22 23:15:10.502 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }, state: ClientData([InProgress, New, New]) }, : downstairs
43044 Sep 22 23:15:10.502 INFO [0] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }
43045 Sep 22 23:15:10.502 INFO [1] received reconcile message
43046 Sep 22 23:15:10.502 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43047 Sep 22 23:15:10.502 INFO [1] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }
43048 Sep 22 23:15:10.502 INFO [2] received reconcile message
43049 Sep 22 23:15:10.502 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(353), op: ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43050 Sep 22 23:15:10.502 INFO [2] client ExtentClose { repair_id: ReconciliationId(353), extent_id: 90 }
43051 Sep 22 23:15:10.502 DEBG 353 Close extent 90
43052 Sep 22 23:15:10.503 DEBG 353 Close extent 90
43053 Sep 22 23:15:10.503 DEBG 353 Close extent 90
43054 Sep 22 23:15:10.503 DEBG [2] It's time to notify for 353
43055 Sep 22 23:15:10.503 INFO Completion from [2] id:353 status:true
43056 Sep 22 23:15:10.503 INFO [354/752] Repair commands completed
43057 Sep 22 23:15:10.503 INFO Pop front: ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43058 Sep 22 23:15:10.503 INFO Sent repair work, now wait for resp
43059 Sep 22 23:15:10.503 INFO [0] received reconcile message
43060 Sep 22 23:15:10.504 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43061 Sep 22 23:15:10.504 INFO [0] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43062 Sep 22 23:15:10.504 INFO [0] Sending repair request ReconciliationId(354)
43063 Sep 22 23:15:10.504 INFO [1] received reconcile message
43064 Sep 22 23:15:10.504 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43065 Sep 22 23:15:10.504 INFO [1] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43066 Sep 22 23:15:10.504 INFO [1] No action required ReconciliationId(354)
43067 Sep 22 23:15:10.504 INFO [2] received reconcile message
43068 Sep 22 23:15:10.504 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(354), op: ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43069 Sep 22 23:15:10.504 INFO [2] client ExtentRepair { repair_id: ReconciliationId(354), extent_id: 90, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43070 Sep 22 23:15:10.504 INFO [2] No action required ReconciliationId(354)
43071 Sep 22 23:15:10.504 DEBG 354 Repair extent 90 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43072 Sep 22 23:15:10.504 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05A.copy"
43073 Sep 22 23:15:10.508 DEBG Read :1082 deps:[JobId(1081)] res:true
43074 Sep 22 23:15:10.530 DEBG Flush :1081 extent_limit None deps:[JobId(1080)] res:true f:30 g:1
43075 Sep 22 23:15:10.537 DEBG Read :1082 deps:[JobId(1081)] res:true
43076 Sep 22 23:15:10.558 DEBG [rc] retire 1081 clears [JobId(1080), JobId(1081)], : downstairs
43077 Sep 22 23:15:10.561 DEBG Flush :1083 extent_limit None deps:[JobId(1082), JobId(1081)] res:true f:31 g:1
43078 Sep 22 23:15:10.561 INFO [lossy] sleeping 1 second
43079 Sep 22 23:15:10.565 INFO accepted connection, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43080 Sep 22 23:15:10.565 TRCE incoming request, uri: /extent/90/files, method: GET, req_id: 7a6a6a87-5bb7-4d0a-9b9f-f14a496cd1fb, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43081 Sep 22 23:15:10.566 INFO request completed, latency_us: 230, response_code: 200, uri: /extent/90/files, method: GET, req_id: 7a6a6a87-5bb7-4d0a-9b9f-f14a496cd1fb, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43082 Sep 22 23:15:10.566 INFO eid:90 Found repair files: ["05A", "05A.db"]
43083 Sep 22 23:15:10.566 TRCE incoming request, uri: /newextent/90/data, method: GET, req_id: 42a459ec-ba3e-4c6a-9b63-ee47b24302a7, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43084 Sep 22 23:15:10.567 INFO request completed, latency_us: 345, response_code: 200, uri: /newextent/90/data, method: GET, req_id: 42a459ec-ba3e-4c6a-9b63-ee47b24302a7, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43085 Sep 22 23:15:10.572 TRCE incoming request, uri: /newextent/90/db, method: GET, req_id: 21092c30-d57e-4f68-98cf-1bae1ffbe5f4, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43086 Sep 22 23:15:10.572 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/90/db, method: GET, req_id: 21092c30-d57e-4f68-98cf-1bae1ffbe5f4, remote_addr: 127.0.0.1:44671, local_addr: 127.0.0.1:46213, task: repair
43087 Sep 22 23:15:10.573 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05A.replace"
43088 Sep 22 23:15:10.573 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43089 Sep 22 23:15:10.574 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43090 Sep 22 23:15:10.574 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05A"
43091 Sep 22 23:15:10.575 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05A.db"
43092 Sep 22 23:15:10.575 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43093 Sep 22 23:15:10.575 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05A.completed"
43094 Sep 22 23:15:10.575 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43095 Sep 22 23:15:10.575 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43096 Sep 22 23:15:10.575 DEBG [0] It's time to notify for 354
43097 Sep 22 23:15:10.575 INFO Completion from [0] id:354 status:true
43098 Sep 22 23:15:10.575 INFO [355/752] Repair commands completed
43099 Sep 22 23:15:10.575 INFO Pop front: ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }, state: ClientData([New, New, New]) }
43100 Sep 22 23:15:10.575 INFO Sent repair work, now wait for resp
43101 Sep 22 23:15:10.575 INFO [0] received reconcile message
43102 Sep 22 23:15:10.575 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }, state: ClientData([InProgress, New, New]) }, : downstairs
43103 Sep 22 23:15:10.575 INFO [0] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }
43104 Sep 22 23:15:10.575 INFO [1] received reconcile message
43105 Sep 22 23:15:10.575 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43106 Sep 22 23:15:10.575 INFO [1] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }
43107 Sep 22 23:15:10.575 INFO [2] received reconcile message
43108 Sep 22 23:15:10.575 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(355), op: ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43109 Sep 22 23:15:10.575 INFO [2] client ExtentReopen { repair_id: ReconciliationId(355), extent_id: 90 }
43110 Sep 22 23:15:10.575 DEBG 355 Reopen extent 90
43111 Sep 22 23:15:10.576 DEBG 355 Reopen extent 90
43112 Sep 22 23:15:10.577 DEBG 355 Reopen extent 90
43113 Sep 22 23:15:10.577 DEBG [2] It's time to notify for 355
43114 Sep 22 23:15:10.577 INFO Completion from [2] id:355 status:true
43115 Sep 22 23:15:10.577 INFO [356/752] Repair commands completed
43116 Sep 22 23:15:10.577 INFO Pop front: ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43117 Sep 22 23:15:10.577 INFO Sent repair work, now wait for resp
43118 Sep 22 23:15:10.577 INFO [0] received reconcile message
43119 Sep 22 23:15:10.577 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43120 Sep 22 23:15:10.577 INFO [0] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43121 Sep 22 23:15:10.577 INFO [1] received reconcile message
43122 Sep 22 23:15:10.577 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43123 Sep 22 23:15:10.577 INFO [1] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43124 Sep 22 23:15:10.578 INFO [2] received reconcile message
43125 Sep 22 23:15:10.578 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(356), op: ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43126 Sep 22 23:15:10.578 INFO [2] client ExtentFlush { repair_id: ReconciliationId(356), extent_id: 144, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43127 Sep 22 23:15:10.578 DEBG 356 Flush extent 144 with f:2 g:2
43128 Sep 22 23:15:10.578 DEBG Flush just extent 144 with f:2 and g:2
43129 Sep 22 23:15:10.578 DEBG [1] It's time to notify for 356
43130 Sep 22 23:15:10.578 INFO Completion from [1] id:356 status:true
43131 Sep 22 23:15:10.578 INFO [357/752] Repair commands completed
43132 Sep 22 23:15:10.578 INFO Pop front: ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }, state: ClientData([New, New, New]) }
43133 Sep 22 23:15:10.578 INFO Sent repair work, now wait for resp
43134 Sep 22 23:15:10.578 INFO [0] received reconcile message
43135 Sep 22 23:15:10.578 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }, state: ClientData([InProgress, New, New]) }, : downstairs
43136 Sep 22 23:15:10.578 INFO [0] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }
43137 Sep 22 23:15:10.578 INFO [1] received reconcile message
43138 Sep 22 23:15:10.578 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43139 Sep 22 23:15:10.578 INFO [1] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }
43140 Sep 22 23:15:10.578 INFO [2] received reconcile message
43141 Sep 22 23:15:10.578 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(357), op: ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43142 Sep 22 23:15:10.578 INFO [2] client ExtentClose { repair_id: ReconciliationId(357), extent_id: 144 }
43143 Sep 22 23:15:10.578 DEBG 357 Close extent 144
43144 Sep 22 23:15:10.578 DEBG 357 Close extent 144
43145 Sep 22 23:15:10.579 DEBG 357 Close extent 144
43146 Sep 22 23:15:10.579 DEBG [2] It's time to notify for 357
43147 Sep 22 23:15:10.579 INFO Completion from [2] id:357 status:true
43148 Sep 22 23:15:10.579 INFO [358/752] Repair commands completed
43149 Sep 22 23:15:10.579 INFO Pop front: ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43150 Sep 22 23:15:10.579 INFO Sent repair work, now wait for resp
43151 Sep 22 23:15:10.579 INFO [0] received reconcile message
43152 Sep 22 23:15:10.579 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43153 Sep 22 23:15:10.579 INFO [0] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43154 Sep 22 23:15:10.579 INFO [0] Sending repair request ReconciliationId(358)
43155 Sep 22 23:15:10.579 INFO [1] received reconcile message
43156 Sep 22 23:15:10.579 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43157 Sep 22 23:15:10.579 INFO [1] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43158 Sep 22 23:15:10.580 INFO [1] No action required ReconciliationId(358)
43159 Sep 22 23:15:10.580 INFO [2] received reconcile message
43160 Sep 22 23:15:10.580 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(358), op: ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43161 Sep 22 23:15:10.580 INFO [2] client ExtentRepair { repair_id: ReconciliationId(358), extent_id: 144, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43162 Sep 22 23:15:10.580 INFO [2] No action required ReconciliationId(358)
43163 Sep 22 23:15:10.580 DEBG 358 Repair extent 144 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43164 Sep 22 23:15:10.580 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/090.copy"
43165 Sep 22 23:15:10.609 WARN returning error on flush!
43166 Sep 22 23:15:10.609 DEBG Flush :1083 extent_limit None deps:[JobId(1082), JobId(1081)] res:false f:31 g:1
43167 Sep 22 23:15:10.609 WARN returning error on flush!
43168 Sep 22 23:15:10.609 DEBG Flush :1083 extent_limit None deps:[JobId(1082), JobId(1081)] res:false f:31 g:1
43169 Sep 22 23:15:10.609 DEBG Flush :1083 extent_limit None deps:[JobId(1082), JobId(1081)] res:true f:31 g:1
43170 Sep 22 23:15:10.609 INFO [lossy] sleeping 1 second
43171 Sep 22 23:15:10.642 INFO accepted connection, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43172 Sep 22 23:15:10.642 TRCE incoming request, uri: /extent/144/files, method: GET, req_id: abba7a0e-afcf-49f0-a1b0-c0f41d603f2f, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43173 Sep 22 23:15:10.642 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/144/files, method: GET, req_id: abba7a0e-afcf-49f0-a1b0-c0f41d603f2f, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43174 Sep 22 23:15:10.642 INFO eid:144 Found repair files: ["090", "090.db"]
43175 Sep 22 23:15:10.643 TRCE incoming request, uri: /newextent/144/data, method: GET, req_id: d8b39c29-a6b4-43ef-a4a6-bdb9660bf227, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43176 Sep 22 23:15:10.643 INFO request completed, latency_us: 313, response_code: 200, uri: /newextent/144/data, method: GET, req_id: d8b39c29-a6b4-43ef-a4a6-bdb9660bf227, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43177 Sep 22 23:15:10.648 TRCE incoming request, uri: /newextent/144/db, method: GET, req_id: 6d897a7c-a234-4792-a2f1-9b62fcb5b1a0, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43178 Sep 22 23:15:10.648 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/144/db, method: GET, req_id: 6d897a7c-a234-4792-a2f1-9b62fcb5b1a0, remote_addr: 127.0.0.1:65157, local_addr: 127.0.0.1:46213, task: repair
43179 Sep 22 23:15:10.649 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/090.copy" to "/tmp/downstairs-vrx8aK6L/00/000/090.replace"
43180 Sep 22 23:15:10.649 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43181 Sep 22 23:15:10.650 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/090.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43182 Sep 22 23:15:10.651 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/090"
43183 Sep 22 23:15:10.651 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/090.db"
43184 Sep 22 23:15:10.651 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43185 Sep 22 23:15:10.651 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/090.replace" to "/tmp/downstairs-vrx8aK6L/00/000/090.completed"
43186 Sep 22 23:15:10.651 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43187 Sep 22 23:15:10.651 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43188 Sep 22 23:15:10.651 DEBG [0] It's time to notify for 358
43189 Sep 22 23:15:10.651 INFO Completion from [0] id:358 status:true
43190 Sep 22 23:15:10.651 INFO [359/752] Repair commands completed
43191 Sep 22 23:15:10.651 INFO Pop front: ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }, state: ClientData([New, New, New]) }
43192 Sep 22 23:15:10.651 INFO Sent repair work, now wait for resp
43193 Sep 22 23:15:10.651 INFO [0] received reconcile message
43194 Sep 22 23:15:10.651 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }, state: ClientData([InProgress, New, New]) }, : downstairs
43195 Sep 22 23:15:10.651 INFO [0] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }
43196 Sep 22 23:15:10.651 INFO [1] received reconcile message
43197 Sep 22 23:15:10.651 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43198 Sep 22 23:15:10.651 INFO [1] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }
43199 Sep 22 23:15:10.651 INFO [2] received reconcile message
43200 Sep 22 23:15:10.651 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(359), op: ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43201 Sep 22 23:15:10.651 INFO [2] client ExtentReopen { repair_id: ReconciliationId(359), extent_id: 144 }
43202 Sep 22 23:15:10.652 DEBG 359 Reopen extent 144
43203 Sep 22 23:15:10.652 DEBG 359 Reopen extent 144
43204 Sep 22 23:15:10.653 DEBG 359 Reopen extent 144
43205 Sep 22 23:15:10.653 DEBG [2] It's time to notify for 359
43206 Sep 22 23:15:10.653 INFO Completion from [2] id:359 status:true
43207 Sep 22 23:15:10.653 INFO [360/752] Repair commands completed
43208 Sep 22 23:15:10.653 INFO Pop front: ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43209 Sep 22 23:15:10.653 INFO Sent repair work, now wait for resp
43210 Sep 22 23:15:10.653 INFO [0] received reconcile message
43211 Sep 22 23:15:10.653 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43212 Sep 22 23:15:10.653 INFO [0] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43213 Sep 22 23:15:10.653 INFO [1] received reconcile message
43214 Sep 22 23:15:10.654 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43215 Sep 22 23:15:10.654 INFO [1] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43216 Sep 22 23:15:10.654 INFO [2] received reconcile message
43217 Sep 22 23:15:10.654 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(360), op: ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43218 Sep 22 23:15:10.654 INFO [2] client ExtentFlush { repair_id: ReconciliationId(360), extent_id: 156, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43219 Sep 22 23:15:10.654 DEBG 360 Flush extent 156 with f:2 g:2
43220 Sep 22 23:15:10.654 DEBG Flush just extent 156 with f:2 and g:2
43221 Sep 22 23:15:10.654 DEBG [1] It's time to notify for 360
43222 Sep 22 23:15:10.654 INFO Completion from [1] id:360 status:true
43223 Sep 22 23:15:10.654 INFO [361/752] Repair commands completed
43224 Sep 22 23:15:10.654 INFO Pop front: ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }, state: ClientData([New, New, New]) }
43225 Sep 22 23:15:10.654 INFO Sent repair work, now wait for resp
43226 Sep 22 23:15:10.654 INFO [0] received reconcile message
43227 Sep 22 23:15:10.654 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }, state: ClientData([InProgress, New, New]) }, : downstairs
43228 Sep 22 23:15:10.654 INFO [0] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }
43229 Sep 22 23:15:10.654 INFO [1] received reconcile message
43230 Sep 22 23:15:10.654 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43231 Sep 22 23:15:10.654 INFO [1] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }
43232 Sep 22 23:15:10.654 INFO [2] received reconcile message
43233 Sep 22 23:15:10.654 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(361), op: ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43234 Sep 22 23:15:10.654 INFO [2] client ExtentClose { repair_id: ReconciliationId(361), extent_id: 156 }
43235 Sep 22 23:15:10.654 DEBG 361 Close extent 156
43236 Sep 22 23:15:10.655 DEBG 361 Close extent 156
43237 Sep 22 23:15:10.655 DEBG 361 Close extent 156
43238 Sep 22 23:15:10.655 DEBG [2] It's time to notify for 361
43239 Sep 22 23:15:10.655 INFO Completion from [2] id:361 status:true
43240 Sep 22 23:15:10.655 INFO [362/752] Repair commands completed
43241 Sep 22 23:15:10.655 INFO Pop front: ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43242 Sep 22 23:15:10.655 INFO Sent repair work, now wait for resp
43243 Sep 22 23:15:10.655 INFO [0] received reconcile message
43244 Sep 22 23:15:10.655 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43245 Sep 22 23:15:10.655 INFO [0] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43246 Sep 22 23:15:10.655 INFO [0] Sending repair request ReconciliationId(362)
43247 Sep 22 23:15:10.655 INFO [1] received reconcile message
43248 Sep 22 23:15:10.656 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43249 Sep 22 23:15:10.656 INFO [1] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43250 Sep 22 23:15:10.656 INFO [1] No action required ReconciliationId(362)
43251 Sep 22 23:15:10.656 INFO [2] received reconcile message
43252 Sep 22 23:15:10.656 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(362), op: ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43253 Sep 22 23:15:10.656 INFO [2] client ExtentRepair { repair_id: ReconciliationId(362), extent_id: 156, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43254 Sep 22 23:15:10.656 INFO [2] No action required ReconciliationId(362)
43255 Sep 22 23:15:10.656 DEBG 362 Repair extent 156 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43256 Sep 22 23:15:10.656 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09C.copy"
43257 Sep 22 23:15:10.717 INFO accepted connection, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43258 Sep 22 23:15:10.718 TRCE incoming request, uri: /extent/156/files, method: GET, req_id: 2dff1d28-5851-482e-bde7-dbfce6e7e751, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43259 Sep 22 23:15:10.718 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/156/files, method: GET, req_id: 2dff1d28-5851-482e-bde7-dbfce6e7e751, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43260 Sep 22 23:15:10.718 INFO eid:156 Found repair files: ["09C", "09C.db"]
43261 Sep 22 23:15:10.718 TRCE incoming request, uri: /newextent/156/data, method: GET, req_id: 5dd4a693-bd0b-47a9-ab59-e626a55dbbb9, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43262 Sep 22 23:15:10.719 INFO request completed, latency_us: 313, response_code: 200, uri: /newextent/156/data, method: GET, req_id: 5dd4a693-bd0b-47a9-ab59-e626a55dbbb9, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43263 Sep 22 23:15:10.724 TRCE incoming request, uri: /newextent/156/db, method: GET, req_id: f8705a8f-e230-4cd2-99f9-65d61c8bd17c, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43264 Sep 22 23:15:10.724 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/156/db, method: GET, req_id: f8705a8f-e230-4cd2-99f9-65d61c8bd17c, remote_addr: 127.0.0.1:46629, local_addr: 127.0.0.1:46213, task: repair
43265 Sep 22 23:15:10.725 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09C.replace"
43266 Sep 22 23:15:10.725 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43267 Sep 22 23:15:10.726 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43268 Sep 22 23:15:10.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09C"
43269 Sep 22 23:15:10.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09C.db"
43270 Sep 22 23:15:10.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43271 Sep 22 23:15:10.726 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09C.completed"
43272 Sep 22 23:15:10.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43273 Sep 22 23:15:10.727 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43274 Sep 22 23:15:10.727 DEBG [0] It's time to notify for 362
43275 Sep 22 23:15:10.727 INFO Completion from [0] id:362 status:true
43276 Sep 22 23:15:10.727 INFO [363/752] Repair commands completed
43277 Sep 22 23:15:10.727 INFO Pop front: ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }, state: ClientData([New, New, New]) }
43278 Sep 22 23:15:10.727 INFO Sent repair work, now wait for resp
43279 Sep 22 23:15:10.727 INFO [0] received reconcile message
43280 Sep 22 23:15:10.727 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }, state: ClientData([InProgress, New, New]) }, : downstairs
43281 Sep 22 23:15:10.727 INFO [0] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }
43282 Sep 22 23:15:10.727 INFO [1] received reconcile message
43283 Sep 22 23:15:10.727 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43284 Sep 22 23:15:10.727 INFO [1] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }
43285 Sep 22 23:15:10.727 INFO [2] received reconcile message
43286 Sep 22 23:15:10.727 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(363), op: ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43287 Sep 22 23:15:10.727 INFO [2] client ExtentReopen { repair_id: ReconciliationId(363), extent_id: 156 }
43288 Sep 22 23:15:10.727 DEBG 363 Reopen extent 156
43289 Sep 22 23:15:10.728 DEBG 363 Reopen extent 156
43290 Sep 22 23:15:10.728 DEBG 363 Reopen extent 156
43291 Sep 22 23:15:10.729 DEBG [2] It's time to notify for 363
43292 Sep 22 23:15:10.729 INFO Completion from [2] id:363 status:true
43293 Sep 22 23:15:10.729 INFO [364/752] Repair commands completed
43294 Sep 22 23:15:10.729 INFO Pop front: ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43295 Sep 22 23:15:10.729 INFO Sent repair work, now wait for resp
43296 Sep 22 23:15:10.729 INFO [0] received reconcile message
43297 Sep 22 23:15:10.729 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43298 Sep 22 23:15:10.729 INFO [0] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43299 Sep 22 23:15:10.729 INFO [1] received reconcile message
43300 Sep 22 23:15:10.729 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43301 Sep 22 23:15:10.729 INFO [1] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43302 Sep 22 23:15:10.729 INFO [2] received reconcile message
43303 Sep 22 23:15:10.729 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(364), op: ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43304 Sep 22 23:15:10.729 INFO [2] client ExtentFlush { repair_id: ReconciliationId(364), extent_id: 33, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43305 Sep 22 23:15:10.729 DEBG 364 Flush extent 33 with f:2 g:2
43306 Sep 22 23:15:10.729 DEBG Flush just extent 33 with f:2 and g:2
43307 Sep 22 23:15:10.730 DEBG [1] It's time to notify for 364
43308 Sep 22 23:15:10.730 INFO Completion from [1] id:364 status:true
43309 Sep 22 23:15:10.730 INFO [365/752] Repair commands completed
43310 Sep 22 23:15:10.730 INFO Pop front: ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }, state: ClientData([New, New, New]) }
43311 Sep 22 23:15:10.730 INFO Sent repair work, now wait for resp
43312 Sep 22 23:15:10.730 INFO [0] received reconcile message
43313 Sep 22 23:15:10.730 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }, state: ClientData([InProgress, New, New]) }, : downstairs
43314 Sep 22 23:15:10.730 INFO [0] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }
43315 Sep 22 23:15:10.730 INFO [1] received reconcile message
43316 Sep 22 23:15:10.730 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43317 Sep 22 23:15:10.730 INFO [1] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }
43318 Sep 22 23:15:10.730 INFO [2] received reconcile message
43319 Sep 22 23:15:10.730 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(365), op: ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43320 Sep 22 23:15:10.730 INFO [2] client ExtentClose { repair_id: ReconciliationId(365), extent_id: 33 }
43321 Sep 22 23:15:10.730 DEBG 365 Close extent 33
43322 Sep 22 23:15:10.730 DEBG 365 Close extent 33
43323 Sep 22 23:15:10.731 DEBG 365 Close extent 33
43324 Sep 22 23:15:10.731 DEBG [2] It's time to notify for 365
43325 Sep 22 23:15:10.731 INFO Completion from [2] id:365 status:true
43326 Sep 22 23:15:10.731 INFO [366/752] Repair commands completed
43327 Sep 22 23:15:10.731 INFO Pop front: ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43328 Sep 22 23:15:10.731 INFO Sent repair work, now wait for resp
43329 Sep 22 23:15:10.731 INFO [0] received reconcile message
43330 Sep 22 23:15:10.731 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43331 Sep 22 23:15:10.731 INFO [0] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43332 Sep 22 23:15:10.731 INFO [0] Sending repair request ReconciliationId(366)
43333 Sep 22 23:15:10.731 INFO [1] received reconcile message
43334 Sep 22 23:15:10.731 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43335 Sep 22 23:15:10.731 INFO [1] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43336 Sep 22 23:15:10.731 INFO [1] No action required ReconciliationId(366)
43337 Sep 22 23:15:10.731 INFO [2] received reconcile message
43338 Sep 22 23:15:10.731 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(366), op: ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43339 Sep 22 23:15:10.731 INFO [2] client ExtentRepair { repair_id: ReconciliationId(366), extent_id: 33, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43340 Sep 22 23:15:10.731 INFO [2] No action required ReconciliationId(366)
43341 Sep 22 23:15:10.731 DEBG 366 Repair extent 33 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43342 Sep 22 23:15:10.732 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/021.copy"
43343 Sep 22 23:15:10.794 INFO accepted connection, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43344 Sep 22 23:15:10.794 TRCE incoming request, uri: /extent/33/files, method: GET, req_id: 0164b5b8-6b31-44a1-b287-7e4d2c5cdbb4, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43345 Sep 22 23:15:10.794 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/33/files, method: GET, req_id: 0164b5b8-6b31-44a1-b287-7e4d2c5cdbb4, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43346 Sep 22 23:15:10.794 INFO eid:33 Found repair files: ["021", "021.db"]
43347 Sep 22 23:15:10.795 TRCE incoming request, uri: /newextent/33/data, method: GET, req_id: f905dba8-bdcc-4829-9585-6ab9bf0a6092, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43348 Sep 22 23:15:10.795 INFO request completed, latency_us: 332, response_code: 200, uri: /newextent/33/data, method: GET, req_id: f905dba8-bdcc-4829-9585-6ab9bf0a6092, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43349 Sep 22 23:15:10.800 TRCE incoming request, uri: /newextent/33/db, method: GET, req_id: 5777fda6-75b2-48da-a6b0-ffd3d277438c, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43350 Sep 22 23:15:10.801 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/33/db, method: GET, req_id: 5777fda6-75b2-48da-a6b0-ffd3d277438c, remote_addr: 127.0.0.1:40276, local_addr: 127.0.0.1:46213, task: repair
43351 Sep 22 23:15:10.802 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/021.copy" to "/tmp/downstairs-vrx8aK6L/00/000/021.replace"
43352 Sep 22 23:15:10.802 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43353 Sep 22 23:15:10.803 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/021.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43354 Sep 22 23:15:10.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/021"
43355 Sep 22 23:15:10.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/021.db"
43356 Sep 22 23:15:10.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43357 Sep 22 23:15:10.803 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/021.replace" to "/tmp/downstairs-vrx8aK6L/00/000/021.completed"
43358 Sep 22 23:15:10.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43359 Sep 22 23:15:10.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43360 Sep 22 23:15:10.803 DEBG [0] It's time to notify for 366
43361 Sep 22 23:15:10.804 INFO Completion from [0] id:366 status:true
43362 Sep 22 23:15:10.804 INFO [367/752] Repair commands completed
43363 Sep 22 23:15:10.804 INFO Pop front: ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }, state: ClientData([New, New, New]) }
43364 Sep 22 23:15:10.804 INFO Sent repair work, now wait for resp
43365 Sep 22 23:15:10.804 INFO [0] received reconcile message
43366 Sep 22 23:15:10.804 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }, state: ClientData([InProgress, New, New]) }, : downstairs
43367 Sep 22 23:15:10.804 INFO [0] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }
43368 Sep 22 23:15:10.804 INFO [1] received reconcile message
43369 Sep 22 23:15:10.804 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43370 Sep 22 23:15:10.804 INFO [1] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }
43371 Sep 22 23:15:10.804 INFO [2] received reconcile message
43372 Sep 22 23:15:10.804 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(367), op: ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43373 Sep 22 23:15:10.804 INFO [2] client ExtentReopen { repair_id: ReconciliationId(367), extent_id: 33 }
43374 Sep 22 23:15:10.804 DEBG 367 Reopen extent 33
43375 Sep 22 23:15:10.805 DEBG 367 Reopen extent 33
43376 Sep 22 23:15:10.805 DEBG 367 Reopen extent 33
43377 Sep 22 23:15:10.806 DEBG [2] It's time to notify for 367
43378 Sep 22 23:15:10.806 INFO Completion from [2] id:367 status:true
43379 Sep 22 23:15:10.806 INFO [368/752] Repair commands completed
43380 Sep 22 23:15:10.806 INFO Pop front: ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43381 Sep 22 23:15:10.806 INFO Sent repair work, now wait for resp
43382 Sep 22 23:15:10.806 INFO [0] received reconcile message
43383 Sep 22 23:15:10.806 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43384 Sep 22 23:15:10.806 INFO [0] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43385 Sep 22 23:15:10.806 INFO [1] received reconcile message
43386 Sep 22 23:15:10.806 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43387 Sep 22 23:15:10.806 INFO [1] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43388 Sep 22 23:15:10.806 INFO [2] received reconcile message
43389 Sep 22 23:15:10.806 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(368), op: ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43390 Sep 22 23:15:10.806 INFO [2] client ExtentFlush { repair_id: ReconciliationId(368), extent_id: 48, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43391 Sep 22 23:15:10.806 DEBG 368 Flush extent 48 with f:2 g:2
43392 Sep 22 23:15:10.806 DEBG Flush just extent 48 with f:2 and g:2
43393 Sep 22 23:15:10.806 DEBG [1] It's time to notify for 368
43394 Sep 22 23:15:10.806 INFO Completion from [1] id:368 status:true
43395 Sep 22 23:15:10.807 INFO [369/752] Repair commands completed
43396 Sep 22 23:15:10.807 INFO Pop front: ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }, state: ClientData([New, New, New]) }
43397 Sep 22 23:15:10.807 INFO Sent repair work, now wait for resp
43398 Sep 22 23:15:10.807 INFO [0] received reconcile message
43399 Sep 22 23:15:10.807 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }, state: ClientData([InProgress, New, New]) }, : downstairs
43400 Sep 22 23:15:10.807 INFO [0] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }
43401 Sep 22 23:15:10.807 INFO [1] received reconcile message
43402 Sep 22 23:15:10.807 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43403 Sep 22 23:15:10.807 INFO [1] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }
43404 Sep 22 23:15:10.807 INFO [2] received reconcile message
43405 Sep 22 23:15:10.807 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(369), op: ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43406 Sep 22 23:15:10.807 INFO [2] client ExtentClose { repair_id: ReconciliationId(369), extent_id: 48 }
43407 Sep 22 23:15:10.807 DEBG 369 Close extent 48
43408 Sep 22 23:15:10.807 DEBG 369 Close extent 48
43409 Sep 22 23:15:10.807 DEBG 369 Close extent 48
43410 Sep 22 23:15:10.808 DEBG [2] It's time to notify for 369
43411 Sep 22 23:15:10.808 INFO Completion from [2] id:369 status:true
43412 Sep 22 23:15:10.808 INFO [370/752] Repair commands completed
43413 Sep 22 23:15:10.808 INFO Pop front: ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43414 Sep 22 23:15:10.808 INFO Sent repair work, now wait for resp
43415 Sep 22 23:15:10.808 INFO [0] received reconcile message
43416 Sep 22 23:15:10.808 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43417 Sep 22 23:15:10.808 INFO [0] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43418 Sep 22 23:15:10.808 INFO [0] Sending repair request ReconciliationId(370)
43419 Sep 22 23:15:10.808 INFO [1] received reconcile message
43420 Sep 22 23:15:10.808 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43421 Sep 22 23:15:10.808 INFO [1] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43422 Sep 22 23:15:10.808 INFO [1] No action required ReconciliationId(370)
43423 Sep 22 23:15:10.808 INFO [2] received reconcile message
43424 Sep 22 23:15:10.808 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(370), op: ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43425 Sep 22 23:15:10.808 INFO [2] client ExtentRepair { repair_id: ReconciliationId(370), extent_id: 48, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43426 Sep 22 23:15:10.808 INFO [2] No action required ReconciliationId(370)
43427 Sep 22 23:15:10.808 DEBG 370 Repair extent 48 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43428 Sep 22 23:15:10.808 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/030.copy"
43429 Sep 22 23:15:10.869 INFO accepted connection, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43430 Sep 22 23:15:10.869 TRCE incoming request, uri: /extent/48/files, method: GET, req_id: bda9bef3-73c2-4aef-8477-4f9bade941a5, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43431 Sep 22 23:15:10.870 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/48/files, method: GET, req_id: bda9bef3-73c2-4aef-8477-4f9bade941a5, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43432 Sep 22 23:15:10.870 INFO eid:48 Found repair files: ["030", "030.db"]
43433 Sep 22 23:15:10.870 TRCE incoming request, uri: /newextent/48/data, method: GET, req_id: 20fc99f7-bc7d-429d-ad31-d2d63c0c4b88, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43434 Sep 22 23:15:10.870 INFO request completed, latency_us: 324, response_code: 200, uri: /newextent/48/data, method: GET, req_id: 20fc99f7-bc7d-429d-ad31-d2d63c0c4b88, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43435 Sep 22 23:15:10.876 TRCE incoming request, uri: /newextent/48/db, method: GET, req_id: 1f84c39a-6e91-4723-bc73-16df8890dc59, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43436 Sep 22 23:15:10.876 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/48/db, method: GET, req_id: 1f84c39a-6e91-4723-bc73-16df8890dc59, remote_addr: 127.0.0.1:42406, local_addr: 127.0.0.1:46213, task: repair
43437 Sep 22 23:15:10.877 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/030.copy" to "/tmp/downstairs-vrx8aK6L/00/000/030.replace"
43438 Sep 22 23:15:10.877 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43439 Sep 22 23:15:10.878 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/030.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43440 Sep 22 23:15:10.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/030"
43441 Sep 22 23:15:10.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/030.db"
43442 Sep 22 23:15:10.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43443 Sep 22 23:15:10.878 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/030.replace" to "/tmp/downstairs-vrx8aK6L/00/000/030.completed"
43444 Sep 22 23:15:10.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43445 Sep 22 23:15:10.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43446 Sep 22 23:15:10.879 DEBG [0] It's time to notify for 370
43447 Sep 22 23:15:10.879 INFO Completion from [0] id:370 status:true
43448 Sep 22 23:15:10.879 INFO [371/752] Repair commands completed
43449 Sep 22 23:15:10.879 INFO Pop front: ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }, state: ClientData([New, New, New]) }
43450 Sep 22 23:15:10.879 INFO Sent repair work, now wait for resp
43451 Sep 22 23:15:10.879 INFO [0] received reconcile message
43452 Sep 22 23:15:10.879 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }, state: ClientData([InProgress, New, New]) }, : downstairs
43453 Sep 22 23:15:10.879 INFO [0] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }
43454 Sep 22 23:15:10.879 INFO [1] received reconcile message
43455 Sep 22 23:15:10.879 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43456 Sep 22 23:15:10.879 INFO [1] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }
43457 Sep 22 23:15:10.879 INFO [2] received reconcile message
43458 Sep 22 23:15:10.879 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(371), op: ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43459 Sep 22 23:15:10.879 INFO [2] client ExtentReopen { repair_id: ReconciliationId(371), extent_id: 48 }
43460 Sep 22 23:15:10.879 DEBG 371 Reopen extent 48
43461 Sep 22 23:15:10.880 DEBG 371 Reopen extent 48
43462 Sep 22 23:15:10.880 DEBG 371 Reopen extent 48
43463 Sep 22 23:15:10.881 DEBG [2] It's time to notify for 371
43464 Sep 22 23:15:10.881 INFO Completion from [2] id:371 status:true
43465 Sep 22 23:15:10.881 INFO [372/752] Repair commands completed
43466 Sep 22 23:15:10.881 INFO Pop front: ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43467 Sep 22 23:15:10.881 INFO Sent repair work, now wait for resp
43468 Sep 22 23:15:10.881 INFO [0] received reconcile message
43469 Sep 22 23:15:10.881 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43470 Sep 22 23:15:10.881 INFO [0] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43471 Sep 22 23:15:10.881 INFO [1] received reconcile message
43472 Sep 22 23:15:10.881 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43473 Sep 22 23:15:10.881 INFO [1] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43474 Sep 22 23:15:10.881 INFO [2] received reconcile message
43475 Sep 22 23:15:10.881 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(372), op: ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43476 Sep 22 23:15:10.881 INFO [2] client ExtentFlush { repair_id: ReconciliationId(372), extent_id: 78, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43477 Sep 22 23:15:10.881 DEBG 372 Flush extent 78 with f:2 g:2
43478 Sep 22 23:15:10.881 DEBG Flush just extent 78 with f:2 and g:2
43479 Sep 22 23:15:10.881 DEBG [1] It's time to notify for 372
43480 Sep 22 23:15:10.882 INFO Completion from [1] id:372 status:true
43481 Sep 22 23:15:10.882 INFO [373/752] Repair commands completed
43482 Sep 22 23:15:10.882 INFO Pop front: ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }, state: ClientData([New, New, New]) }
43483 Sep 22 23:15:10.882 INFO Sent repair work, now wait for resp
43484 Sep 22 23:15:10.882 INFO [0] received reconcile message
43485 Sep 22 23:15:10.882 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }, state: ClientData([InProgress, New, New]) }, : downstairs
43486 Sep 22 23:15:10.882 INFO [0] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }
43487 Sep 22 23:15:10.882 INFO [1] received reconcile message
43488 Sep 22 23:15:10.882 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43489 Sep 22 23:15:10.882 INFO [1] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }
43490 Sep 22 23:15:10.882 INFO [2] received reconcile message
43491 Sep 22 23:15:10.882 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(373), op: ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43492 Sep 22 23:15:10.882 INFO [2] client ExtentClose { repair_id: ReconciliationId(373), extent_id: 78 }
43493 Sep 22 23:15:10.882 DEBG 373 Close extent 78
43494 Sep 22 23:15:10.882 DEBG 373 Close extent 78
43495 Sep 22 23:15:10.882 DEBG 373 Close extent 78
43496 Sep 22 23:15:10.883 DEBG [2] It's time to notify for 373
43497 Sep 22 23:15:10.883 INFO Completion from [2] id:373 status:true
43498 Sep 22 23:15:10.883 INFO [374/752] Repair commands completed
43499 Sep 22 23:15:10.883 INFO Pop front: ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43500 Sep 22 23:15:10.883 INFO Sent repair work, now wait for resp
43501 Sep 22 23:15:10.883 INFO [0] received reconcile message
43502 Sep 22 23:15:10.883 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43503 Sep 22 23:15:10.883 INFO [0] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43504 Sep 22 23:15:10.883 INFO [0] Sending repair request ReconciliationId(374)
43505 Sep 22 23:15:10.883 INFO [1] received reconcile message
43506 Sep 22 23:15:10.883 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43507 Sep 22 23:15:10.883 INFO [1] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43508 Sep 22 23:15:10.883 INFO [1] No action required ReconciliationId(374)
43509 Sep 22 23:15:10.883 INFO [2] received reconcile message
43510 Sep 22 23:15:10.883 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(374), op: ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43511 Sep 22 23:15:10.883 INFO [2] client ExtentRepair { repair_id: ReconciliationId(374), extent_id: 78, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43512 Sep 22 23:15:10.883 INFO [2] No action required ReconciliationId(374)
43513 Sep 22 23:15:10.883 DEBG 374 Repair extent 78 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43514 Sep 22 23:15:10.883 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04E.copy"
43515 Sep 22 23:15:10.939 DEBG [1] Read AckReady 1082, : downstairs
43516 Sep 22 23:15:10.940 DEBG up_ds_listen was notified
43517 Sep 22 23:15:10.940 DEBG up_ds_listen process 1082
43518 Sep 22 23:15:10.940 DEBG [A] ack job 1082:83, : downstairs
43519 Sep 22 23:15:10.946 INFO accepted connection, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43520 Sep 22 23:15:10.946 TRCE incoming request, uri: /extent/78/files, method: GET, req_id: e1d1a172-ca0b-4280-8698-76786dcc073c, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43521 Sep 22 23:15:10.946 INFO request completed, latency_us: 214, response_code: 200, uri: /extent/78/files, method: GET, req_id: e1d1a172-ca0b-4280-8698-76786dcc073c, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43522 Sep 22 23:15:10.947 INFO eid:78 Found repair files: ["04E", "04E.db"]
43523 Sep 22 23:15:10.947 TRCE incoming request, uri: /newextent/78/data, method: GET, req_id: f7688a5e-efa4-4398-84ff-80ee6bf08bd8, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43524 Sep 22 23:15:10.947 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/78/data, method: GET, req_id: f7688a5e-efa4-4398-84ff-80ee6bf08bd8, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43525 Sep 22 23:15:10.952 TRCE incoming request, uri: /newextent/78/db, method: GET, req_id: 887a2791-c791-4d39-b2da-0354100cecd8, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43526 Sep 22 23:15:10.952 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/78/db, method: GET, req_id: 887a2791-c791-4d39-b2da-0354100cecd8, remote_addr: 127.0.0.1:51487, local_addr: 127.0.0.1:46213, task: repair
43527 Sep 22 23:15:10.954 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04E.replace"
43528 Sep 22 23:15:10.954 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43529 Sep 22 23:15:10.954 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43530 Sep 22 23:15:10.955 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04E"
43531 Sep 22 23:15:10.955 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04E.db"
43532 Sep 22 23:15:10.955 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43533 Sep 22 23:15:10.955 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04E.completed"
43534 Sep 22 23:15:10.955 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43535 Sep 22 23:15:10.955 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43536 Sep 22 23:15:10.955 DEBG [0] It's time to notify for 374
43537 Sep 22 23:15:10.955 INFO Completion from [0] id:374 status:true
43538 Sep 22 23:15:10.955 INFO [375/752] Repair commands completed
43539 Sep 22 23:15:10.955 INFO Pop front: ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }, state: ClientData([New, New, New]) }
43540 Sep 22 23:15:10.955 INFO Sent repair work, now wait for resp
43541 Sep 22 23:15:10.955 INFO [0] received reconcile message
43542 Sep 22 23:15:10.955 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }, state: ClientData([InProgress, New, New]) }, : downstairs
43543 Sep 22 23:15:10.955 INFO [0] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }
43544 Sep 22 23:15:10.955 INFO [1] received reconcile message
43545 Sep 22 23:15:10.955 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43546 Sep 22 23:15:10.956 INFO [1] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }
43547 Sep 22 23:15:10.956 INFO [2] received reconcile message
43548 Sep 22 23:15:10.956 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(375), op: ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43549 Sep 22 23:15:10.956 INFO [2] client ExtentReopen { repair_id: ReconciliationId(375), extent_id: 78 }
43550 Sep 22 23:15:10.956 DEBG 375 Reopen extent 78
43551 Sep 22 23:15:10.956 DEBG 375 Reopen extent 78
43552 Sep 22 23:15:10.957 DEBG 375 Reopen extent 78
43553 Sep 22 23:15:10.957 DEBG [2] It's time to notify for 375
43554 Sep 22 23:15:10.958 INFO Completion from [2] id:375 status:true
43555 Sep 22 23:15:10.958 INFO [376/752] Repair commands completed
43556 Sep 22 23:15:10.958 INFO Pop front: ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43557 Sep 22 23:15:10.958 INFO Sent repair work, now wait for resp
43558 Sep 22 23:15:10.958 INFO [0] received reconcile message
43559 Sep 22 23:15:10.958 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43560 Sep 22 23:15:10.958 INFO [0] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43561 Sep 22 23:15:10.958 INFO [1] received reconcile message
43562 Sep 22 23:15:10.958 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43563 Sep 22 23:15:10.958 INFO [1] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43564 Sep 22 23:15:10.958 INFO [2] received reconcile message
43565 Sep 22 23:15:10.958 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(376), op: ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43566 Sep 22 23:15:10.958 INFO [2] client ExtentFlush { repair_id: ReconciliationId(376), extent_id: 88, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43567 Sep 22 23:15:10.958 DEBG 376 Flush extent 88 with f:2 g:2
43568 Sep 22 23:15:10.958 DEBG Flush just extent 88 with f:2 and g:2
43569 Sep 22 23:15:10.958 DEBG [1] It's time to notify for 376
43570 Sep 22 23:15:10.958 INFO Completion from [1] id:376 status:true
43571 Sep 22 23:15:10.958 INFO [377/752] Repair commands completed
43572 Sep 22 23:15:10.958 INFO Pop front: ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }, state: ClientData([New, New, New]) }
43573 Sep 22 23:15:10.958 INFO Sent repair work, now wait for resp
43574 Sep 22 23:15:10.958 INFO [0] received reconcile message
43575 Sep 22 23:15:10.958 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }, state: ClientData([InProgress, New, New]) }, : downstairs
43576 Sep 22 23:15:10.958 INFO [0] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }
43577 Sep 22 23:15:10.958 INFO [1] received reconcile message
43578 Sep 22 23:15:10.958 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43579 Sep 22 23:15:10.958 INFO [1] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }
43580 Sep 22 23:15:10.958 INFO [2] received reconcile message
43581 Sep 22 23:15:10.958 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(377), op: ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43582 Sep 22 23:15:10.958 INFO [2] client ExtentClose { repair_id: ReconciliationId(377), extent_id: 88 }
43583 Sep 22 23:15:10.959 DEBG 377 Close extent 88
43584 Sep 22 23:15:10.959 DEBG 377 Close extent 88
43585 Sep 22 23:15:10.959 DEBG 377 Close extent 88
43586 Sep 22 23:15:10.959 DEBG [2] It's time to notify for 377
43587 Sep 22 23:15:10.959 INFO Completion from [2] id:377 status:true
43588 Sep 22 23:15:10.960 INFO [378/752] Repair commands completed
43589 Sep 22 23:15:10.960 INFO Pop front: ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43590 Sep 22 23:15:10.960 INFO Sent repair work, now wait for resp
43591 Sep 22 23:15:10.960 INFO [0] received reconcile message
43592 Sep 22 23:15:10.960 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43593 Sep 22 23:15:10.960 INFO [0] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43594 Sep 22 23:15:10.960 INFO [0] Sending repair request ReconciliationId(378)
43595 Sep 22 23:15:10.960 INFO [1] received reconcile message
43596 Sep 22 23:15:10.960 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43597 Sep 22 23:15:10.960 INFO [1] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43598 Sep 22 23:15:10.960 INFO [1] No action required ReconciliationId(378)
43599 Sep 22 23:15:10.960 INFO [2] received reconcile message
43600 Sep 22 23:15:10.960 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(378), op: ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43601 Sep 22 23:15:10.960 INFO [2] client ExtentRepair { repair_id: ReconciliationId(378), extent_id: 88, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43602 Sep 22 23:15:10.960 INFO [2] No action required ReconciliationId(378)
43603 Sep 22 23:15:10.960 DEBG 378 Repair extent 88 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43604 Sep 22 23:15:10.960 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/058.copy"
43605 Sep 22 23:15:10.993 DEBG up_ds_listen checked 1 jobs, back to waiting
43606 Sep 22 23:15:11.025 INFO accepted connection, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43607 Sep 22 23:15:11.026 TRCE incoming request, uri: /extent/88/files, method: GET, req_id: 3e9ff68a-6866-4f07-99fa-9bc4ecbb0071, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43608 Sep 22 23:15:11.026 INFO request completed, latency_us: 262, response_code: 200, uri: /extent/88/files, method: GET, req_id: 3e9ff68a-6866-4f07-99fa-9bc4ecbb0071, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43609 Sep 22 23:15:11.026 INFO eid:88 Found repair files: ["058", "058.db"]
43610 Sep 22 23:15:11.027 TRCE incoming request, uri: /newextent/88/data, method: GET, req_id: 8ba18d7d-ab82-4522-8f19-171ca18ab892, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43611 Sep 22 23:15:11.027 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/88/data, method: GET, req_id: 8ba18d7d-ab82-4522-8f19-171ca18ab892, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43612 Sep 22 23:15:11.032 TRCE incoming request, uri: /newextent/88/db, method: GET, req_id: 2f697082-c02d-4298-8fdb-3a4fd5e21750, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43613 Sep 22 23:15:11.032 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/88/db, method: GET, req_id: 2f697082-c02d-4298-8fdb-3a4fd5e21750, remote_addr: 127.0.0.1:56140, local_addr: 127.0.0.1:46213, task: repair
43614 Sep 22 23:15:11.033 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/058.copy" to "/tmp/downstairs-vrx8aK6L/00/000/058.replace"
43615 Sep 22 23:15:11.034 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43616 Sep 22 23:15:11.035 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/058.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43617 Sep 22 23:15:11.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/058"
43618 Sep 22 23:15:11.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/058.db"
43619 Sep 22 23:15:11.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43620 Sep 22 23:15:11.035 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/058.replace" to "/tmp/downstairs-vrx8aK6L/00/000/058.completed"
43621 Sep 22 23:15:11.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43622 Sep 22 23:15:11.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43623 Sep 22 23:15:11.035 DEBG [0] It's time to notify for 378
43624 Sep 22 23:15:11.035 INFO Completion from [0] id:378 status:true
43625 Sep 22 23:15:11.035 INFO [379/752] Repair commands completed
43626 Sep 22 23:15:11.035 INFO Pop front: ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }, state: ClientData([New, New, New]) }
43627 Sep 22 23:15:11.036 INFO Sent repair work, now wait for resp
43628 Sep 22 23:15:11.036 INFO [0] received reconcile message
43629 Sep 22 23:15:11.036 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }, state: ClientData([InProgress, New, New]) }, : downstairs
43630 Sep 22 23:15:11.036 INFO [0] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }
43631 Sep 22 23:15:11.036 INFO [1] received reconcile message
43632 Sep 22 23:15:11.036 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43633 Sep 22 23:15:11.036 INFO [1] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }
43634 Sep 22 23:15:11.036 INFO [2] received reconcile message
43635 Sep 22 23:15:11.036 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(379), op: ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43636 Sep 22 23:15:11.036 INFO [2] client ExtentReopen { repair_id: ReconciliationId(379), extent_id: 88 }
43637 Sep 22 23:15:11.036 DEBG 379 Reopen extent 88
43638 Sep 22 23:15:11.037 DEBG 379 Reopen extent 88
43639 Sep 22 23:15:11.037 DEBG 379 Reopen extent 88
43640 Sep 22 23:15:11.038 DEBG [2] It's time to notify for 379
43641 Sep 22 23:15:11.038 INFO Completion from [2] id:379 status:true
43642 Sep 22 23:15:11.038 INFO [380/752] Repair commands completed
43643 Sep 22 23:15:11.038 INFO Pop front: ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43644 Sep 22 23:15:11.038 INFO Sent repair work, now wait for resp
43645 Sep 22 23:15:11.038 INFO [0] received reconcile message
43646 Sep 22 23:15:11.038 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43647 Sep 22 23:15:11.038 INFO [0] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43648 Sep 22 23:15:11.038 INFO [1] received reconcile message
43649 Sep 22 23:15:11.038 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43650 Sep 22 23:15:11.038 INFO [1] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43651 Sep 22 23:15:11.038 INFO [2] received reconcile message
43652 Sep 22 23:15:11.038 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(380), op: ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43653 Sep 22 23:15:11.038 INFO [2] client ExtentFlush { repair_id: ReconciliationId(380), extent_id: 82, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43654 Sep 22 23:15:11.038 DEBG 380 Flush extent 82 with f:2 g:2
43655 Sep 22 23:15:11.038 DEBG Flush just extent 82 with f:2 and g:2
43656 Sep 22 23:15:11.038 DEBG [1] It's time to notify for 380
43657 Sep 22 23:15:11.038 INFO Completion from [1] id:380 status:true
43658 Sep 22 23:15:11.038 INFO [381/752] Repair commands completed
43659 Sep 22 23:15:11.038 INFO Pop front: ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }, state: ClientData([New, New, New]) }
43660 Sep 22 23:15:11.039 INFO Sent repair work, now wait for resp
43661 Sep 22 23:15:11.039 INFO [0] received reconcile message
43662 Sep 22 23:15:11.039 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }, state: ClientData([InProgress, New, New]) }, : downstairs
43663 Sep 22 23:15:11.039 INFO [0] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }
43664 Sep 22 23:15:11.039 INFO [1] received reconcile message
43665 Sep 22 23:15:11.039 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43666 Sep 22 23:15:11.039 INFO [1] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }
43667 Sep 22 23:15:11.039 INFO [2] received reconcile message
43668 Sep 22 23:15:11.039 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(381), op: ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43669 Sep 22 23:15:11.039 INFO [2] client ExtentClose { repair_id: ReconciliationId(381), extent_id: 82 }
43670 Sep 22 23:15:11.039 DEBG 381 Close extent 82
43671 Sep 22 23:15:11.039 DEBG 381 Close extent 82
43672 Sep 22 23:15:11.039 DEBG 381 Close extent 82
43673 Sep 22 23:15:11.040 DEBG [2] It's time to notify for 381
43674 Sep 22 23:15:11.040 INFO Completion from [2] id:381 status:true
43675 Sep 22 23:15:11.040 INFO [382/752] Repair commands completed
43676 Sep 22 23:15:11.040 INFO Pop front: ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43677 Sep 22 23:15:11.040 INFO Sent repair work, now wait for resp
43678 Sep 22 23:15:11.040 INFO [0] received reconcile message
43679 Sep 22 23:15:11.040 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43680 Sep 22 23:15:11.040 INFO [0] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43681 Sep 22 23:15:11.040 INFO [0] Sending repair request ReconciliationId(382)
43682 Sep 22 23:15:11.040 INFO [1] received reconcile message
43683 Sep 22 23:15:11.040 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43684 Sep 22 23:15:11.040 INFO [1] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43685 Sep 22 23:15:11.040 INFO [1] No action required ReconciliationId(382)
43686 Sep 22 23:15:11.040 INFO [2] received reconcile message
43687 Sep 22 23:15:11.040 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(382), op: ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43688 Sep 22 23:15:11.040 INFO [2] client ExtentRepair { repair_id: ReconciliationId(382), extent_id: 82, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43689 Sep 22 23:15:11.040 INFO [2] No action required ReconciliationId(382)
43690 Sep 22 23:15:11.040 DEBG 382 Repair extent 82 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43691 Sep 22 23:15:11.040 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/052.copy"
43692 Sep 22 23:15:11.103 INFO accepted connection, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43693 Sep 22 23:15:11.103 TRCE incoming request, uri: /extent/82/files, method: GET, req_id: 29a729e2-51fa-47c7-bac6-926c59854eda, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43694 Sep 22 23:15:11.103 INFO request completed, latency_us: 213, response_code: 200, uri: /extent/82/files, method: GET, req_id: 29a729e2-51fa-47c7-bac6-926c59854eda, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43695 Sep 22 23:15:11.104 INFO eid:82 Found repair files: ["052", "052.db"]
43696 Sep 22 23:15:11.104 TRCE incoming request, uri: /newextent/82/data, method: GET, req_id: 0cc2d882-1c0d-4d89-a4a2-aa63b291d923, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43697 Sep 22 23:15:11.104 INFO request completed, latency_us: 337, response_code: 200, uri: /newextent/82/data, method: GET, req_id: 0cc2d882-1c0d-4d89-a4a2-aa63b291d923, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43698 Sep 22 23:15:11.109 TRCE incoming request, uri: /newextent/82/db, method: GET, req_id: e8d9fe7f-f633-4461-ac56-fec2a6ac9a82, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43699 Sep 22 23:15:11.110 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/82/db, method: GET, req_id: e8d9fe7f-f633-4461-ac56-fec2a6ac9a82, remote_addr: 127.0.0.1:43157, local_addr: 127.0.0.1:46213, task: repair
43700 Sep 22 23:15:11.111 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/052.copy" to "/tmp/downstairs-vrx8aK6L/00/000/052.replace"
43701 Sep 22 23:15:11.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43702 Sep 22 23:15:11.112 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/052.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43703 Sep 22 23:15:11.112 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/052"
43704 Sep 22 23:15:11.112 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/052.db"
43705 Sep 22 23:15:11.112 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43706 Sep 22 23:15:11.112 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/052.replace" to "/tmp/downstairs-vrx8aK6L/00/000/052.completed"
43707 Sep 22 23:15:11.112 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43708 Sep 22 23:15:11.112 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43709 Sep 22 23:15:11.112 DEBG [0] It's time to notify for 382
43710 Sep 22 23:15:11.112 INFO Completion from [0] id:382 status:true
43711 Sep 22 23:15:11.112 INFO [383/752] Repair commands completed
43712 Sep 22 23:15:11.113 INFO Pop front: ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }, state: ClientData([New, New, New]) }
43713 Sep 22 23:15:11.113 INFO Sent repair work, now wait for resp
43714 Sep 22 23:15:11.113 INFO [0] received reconcile message
43715 Sep 22 23:15:11.113 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }, state: ClientData([InProgress, New, New]) }, : downstairs
43716 Sep 22 23:15:11.113 INFO [0] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }
43717 Sep 22 23:15:11.113 INFO [1] received reconcile message
43718 Sep 22 23:15:11.113 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43719 Sep 22 23:15:11.113 INFO [1] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }
43720 Sep 22 23:15:11.113 INFO [2] received reconcile message
43721 Sep 22 23:15:11.113 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(383), op: ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43722 Sep 22 23:15:11.113 INFO [2] client ExtentReopen { repair_id: ReconciliationId(383), extent_id: 82 }
43723 Sep 22 23:15:11.113 DEBG 383 Reopen extent 82
43724 Sep 22 23:15:11.114 DEBG 383 Reopen extent 82
43725 Sep 22 23:15:11.114 DEBG 383 Reopen extent 82
43726 Sep 22 23:15:11.115 DEBG [2] It's time to notify for 383
43727 Sep 22 23:15:11.115 INFO Completion from [2] id:383 status:true
43728 Sep 22 23:15:11.115 INFO [384/752] Repair commands completed
43729 Sep 22 23:15:11.115 INFO Pop front: ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43730 Sep 22 23:15:11.115 INFO Sent repair work, now wait for resp
43731 Sep 22 23:15:11.115 INFO [0] received reconcile message
43732 Sep 22 23:15:11.115 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43733 Sep 22 23:15:11.115 INFO [0] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43734 Sep 22 23:15:11.115 INFO [1] received reconcile message
43735 Sep 22 23:15:11.115 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43736 Sep 22 23:15:11.115 INFO [1] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43737 Sep 22 23:15:11.115 INFO [2] received reconcile message
43738 Sep 22 23:15:11.115 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(384), op: ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43739 Sep 22 23:15:11.115 INFO [2] client ExtentFlush { repair_id: ReconciliationId(384), extent_id: 86, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43740 Sep 22 23:15:11.115 DEBG 384 Flush extent 86 with f:2 g:2
43741 Sep 22 23:15:11.115 DEBG Flush just extent 86 with f:2 and g:2
43742 Sep 22 23:15:11.115 DEBG [1] It's time to notify for 384
43743 Sep 22 23:15:11.115 INFO Completion from [1] id:384 status:true
43744 Sep 22 23:15:11.115 INFO [385/752] Repair commands completed
43745 Sep 22 23:15:11.115 INFO Pop front: ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }, state: ClientData([New, New, New]) }
43746 Sep 22 23:15:11.115 INFO Sent repair work, now wait for resp
43747 Sep 22 23:15:11.115 INFO [0] received reconcile message
43748 Sep 22 23:15:11.115 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }, state: ClientData([InProgress, New, New]) }, : downstairs
43749 Sep 22 23:15:11.115 INFO [0] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }
43750 Sep 22 23:15:11.115 INFO [1] received reconcile message
43751 Sep 22 23:15:11.115 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43752 Sep 22 23:15:11.115 INFO [1] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }
43753 Sep 22 23:15:11.115 INFO [2] received reconcile message
43754 Sep 22 23:15:11.116 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(385), op: ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43755 Sep 22 23:15:11.116 INFO [2] client ExtentClose { repair_id: ReconciliationId(385), extent_id: 86 }
43756 Sep 22 23:15:11.116 DEBG 385 Close extent 86
43757 Sep 22 23:15:11.116 DEBG 385 Close extent 86
43758 Sep 22 23:15:11.116 DEBG 385 Close extent 86
43759 Sep 22 23:15:11.117 DEBG [2] It's time to notify for 385
43760 Sep 22 23:15:11.117 INFO Completion from [2] id:385 status:true
43761 Sep 22 23:15:11.117 INFO [386/752] Repair commands completed
43762 Sep 22 23:15:11.117 INFO Pop front: ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43763 Sep 22 23:15:11.117 INFO Sent repair work, now wait for resp
43764 Sep 22 23:15:11.117 INFO [0] received reconcile message
43765 Sep 22 23:15:11.117 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43766 Sep 22 23:15:11.117 INFO [0] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43767 Sep 22 23:15:11.117 INFO [0] Sending repair request ReconciliationId(386)
43768 Sep 22 23:15:11.117 INFO [1] received reconcile message
43769 Sep 22 23:15:11.117 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43770 Sep 22 23:15:11.117 INFO [1] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43771 Sep 22 23:15:11.117 INFO [1] No action required ReconciliationId(386)
43772 Sep 22 23:15:11.117 INFO [2] received reconcile message
43773 Sep 22 23:15:11.117 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(386), op: ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43774 Sep 22 23:15:11.117 INFO [2] client ExtentRepair { repair_id: ReconciliationId(386), extent_id: 86, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43775 Sep 22 23:15:11.117 INFO [2] No action required ReconciliationId(386)
43776 Sep 22 23:15:11.117 DEBG 386 Repair extent 86 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43777 Sep 22 23:15:11.117 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/056.copy"
43778 Sep 22 23:15:11.181 INFO accepted connection, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43779 Sep 22 23:15:11.181 TRCE incoming request, uri: /extent/86/files, method: GET, req_id: 28b29fe2-23ca-4872-95e1-b6cd82a70413, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43780 Sep 22 23:15:11.181 INFO request completed, latency_us: 204, response_code: 200, uri: /extent/86/files, method: GET, req_id: 28b29fe2-23ca-4872-95e1-b6cd82a70413, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43781 Sep 22 23:15:11.182 INFO eid:86 Found repair files: ["056", "056.db"]
43782 Sep 22 23:15:11.182 TRCE incoming request, uri: /newextent/86/data, method: GET, req_id: e22ad7ab-0baf-4d2f-8f29-0c5c4eb3c278, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43783 Sep 22 23:15:11.182 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/86/data, method: GET, req_id: e22ad7ab-0baf-4d2f-8f29-0c5c4eb3c278, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43784 Sep 22 23:15:11.187 TRCE incoming request, uri: /newextent/86/db, method: GET, req_id: f123ad6f-ec93-45af-ac28-ceb1400879f2, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43785 Sep 22 23:15:11.187 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/86/db, method: GET, req_id: f123ad6f-ec93-45af-ac28-ceb1400879f2, remote_addr: 127.0.0.1:49281, local_addr: 127.0.0.1:46213, task: repair
43786 Sep 22 23:15:11.189 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/056.copy" to "/tmp/downstairs-vrx8aK6L/00/000/056.replace"
43787 Sep 22 23:15:11.189 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43788 Sep 22 23:15:11.189 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/056.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43789 Sep 22 23:15:11.190 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/056"
43790 Sep 22 23:15:11.190 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/056.db"
43791 Sep 22 23:15:11.190 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43792 Sep 22 23:15:11.190 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/056.replace" to "/tmp/downstairs-vrx8aK6L/00/000/056.completed"
43793 Sep 22 23:15:11.190 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43794 Sep 22 23:15:11.190 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43795 Sep 22 23:15:11.190 DEBG [0] It's time to notify for 386
43796 Sep 22 23:15:11.190 INFO Completion from [0] id:386 status:true
43797 Sep 22 23:15:11.190 INFO [387/752] Repair commands completed
43798 Sep 22 23:15:11.190 INFO Pop front: ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }, state: ClientData([New, New, New]) }
43799 Sep 22 23:15:11.190 INFO Sent repair work, now wait for resp
43800 Sep 22 23:15:11.190 INFO [0] received reconcile message
43801 Sep 22 23:15:11.190 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }, state: ClientData([InProgress, New, New]) }, : downstairs
43802 Sep 22 23:15:11.190 INFO [0] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }
43803 Sep 22 23:15:11.190 INFO [1] received reconcile message
43804 Sep 22 23:15:11.190 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43805 Sep 22 23:15:11.190 INFO [1] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }
43806 Sep 22 23:15:11.191 INFO [2] received reconcile message
43807 Sep 22 23:15:11.191 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(387), op: ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43808 Sep 22 23:15:11.191 INFO [2] client ExtentReopen { repair_id: ReconciliationId(387), extent_id: 86 }
43809 Sep 22 23:15:11.191 DEBG 387 Reopen extent 86
43810 Sep 22 23:15:11.191 DEBG 387 Reopen extent 86
43811 Sep 22 23:15:11.192 DEBG 387 Reopen extent 86
43812 Sep 22 23:15:11.192 DEBG [2] It's time to notify for 387
43813 Sep 22 23:15:11.192 INFO Completion from [2] id:387 status:true
43814 Sep 22 23:15:11.192 INFO [388/752] Repair commands completed
43815 Sep 22 23:15:11.192 INFO Pop front: ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43816 Sep 22 23:15:11.192 INFO Sent repair work, now wait for resp
43817 Sep 22 23:15:11.193 INFO [0] received reconcile message
43818 Sep 22 23:15:11.193 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43819 Sep 22 23:15:11.193 INFO [0] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43820 Sep 22 23:15:11.193 INFO [1] received reconcile message
43821 Sep 22 23:15:11.193 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43822 Sep 22 23:15:11.193 INFO [1] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43823 Sep 22 23:15:11.193 INFO [2] received reconcile message
43824 Sep 22 23:15:11.193 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(388), op: ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43825 Sep 22 23:15:11.193 INFO [2] client ExtentFlush { repair_id: ReconciliationId(388), extent_id: 94, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43826 Sep 22 23:15:11.193 DEBG 388 Flush extent 94 with f:2 g:2
43827 Sep 22 23:15:11.193 DEBG Flush just extent 94 with f:2 and g:2
43828 Sep 22 23:15:11.193 DEBG [1] It's time to notify for 388
43829 Sep 22 23:15:11.193 INFO Completion from [1] id:388 status:true
43830 Sep 22 23:15:11.193 INFO [389/752] Repair commands completed
43831 Sep 22 23:15:11.193 INFO Pop front: ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }, state: ClientData([New, New, New]) }
43832 Sep 22 23:15:11.193 INFO Sent repair work, now wait for resp
43833 Sep 22 23:15:11.193 INFO [0] received reconcile message
43834 Sep 22 23:15:11.193 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }, state: ClientData([InProgress, New, New]) }, : downstairs
43835 Sep 22 23:15:11.193 INFO [0] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }
43836 Sep 22 23:15:11.193 INFO [1] received reconcile message
43837 Sep 22 23:15:11.193 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43838 Sep 22 23:15:11.193 INFO [1] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }
43839 Sep 22 23:15:11.193 INFO [2] received reconcile message
43840 Sep 22 23:15:11.193 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(389), op: ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43841 Sep 22 23:15:11.193 INFO [2] client ExtentClose { repair_id: ReconciliationId(389), extent_id: 94 }
43842 Sep 22 23:15:11.193 DEBG 389 Close extent 94
43843 Sep 22 23:15:11.194 DEBG 389 Close extent 94
43844 Sep 22 23:15:11.194 DEBG 389 Close extent 94
43845 Sep 22 23:15:11.194 DEBG [2] It's time to notify for 389
43846 Sep 22 23:15:11.194 INFO Completion from [2] id:389 status:true
43847 Sep 22 23:15:11.194 INFO [390/752] Repair commands completed
43848 Sep 22 23:15:11.194 INFO Pop front: ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43849 Sep 22 23:15:11.194 INFO Sent repair work, now wait for resp
43850 Sep 22 23:15:11.194 INFO [0] received reconcile message
43851 Sep 22 23:15:11.194 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43852 Sep 22 23:15:11.195 INFO [0] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43853 Sep 22 23:15:11.195 INFO [0] Sending repair request ReconciliationId(390)
43854 Sep 22 23:15:11.195 INFO [1] received reconcile message
43855 Sep 22 23:15:11.195 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43856 Sep 22 23:15:11.195 INFO [1] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43857 Sep 22 23:15:11.195 INFO [1] No action required ReconciliationId(390)
43858 Sep 22 23:15:11.195 INFO [2] received reconcile message
43859 Sep 22 23:15:11.195 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(390), op: ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43860 Sep 22 23:15:11.195 INFO [2] client ExtentRepair { repair_id: ReconciliationId(390), extent_id: 94, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43861 Sep 22 23:15:11.195 INFO [2] No action required ReconciliationId(390)
43862 Sep 22 23:15:11.195 DEBG 390 Repair extent 94 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43863 Sep 22 23:15:11.195 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05E.copy"
43864 Sep 22 23:15:11.258 INFO accepted connection, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43865 Sep 22 23:15:11.259 TRCE incoming request, uri: /extent/94/files, method: GET, req_id: 69627f5d-a5af-4826-95fc-a878ab23d9d5, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43866 Sep 22 23:15:11.259 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/94/files, method: GET, req_id: 69627f5d-a5af-4826-95fc-a878ab23d9d5, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43867 Sep 22 23:15:11.259 INFO eid:94 Found repair files: ["05E", "05E.db"]
43868 Sep 22 23:15:11.259 TRCE incoming request, uri: /newextent/94/data, method: GET, req_id: 5bfb62ea-9451-4ec0-a544-a54522c10853, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43869 Sep 22 23:15:11.260 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/94/data, method: GET, req_id: 5bfb62ea-9451-4ec0-a544-a54522c10853, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43870 Sep 22 23:15:11.265 TRCE incoming request, uri: /newextent/94/db, method: GET, req_id: 61f60823-9fd7-4462-a4df-3bc313fc8e6c, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43871 Sep 22 23:15:11.265 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/94/db, method: GET, req_id: 61f60823-9fd7-4462-a4df-3bc313fc8e6c, remote_addr: 127.0.0.1:52448, local_addr: 127.0.0.1:46213, task: repair
43872 Sep 22 23:15:11.266 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05E.replace"
43873 Sep 22 23:15:11.266 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43874 Sep 22 23:15:11.267 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43875 Sep 22 23:15:11.267 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05E"
43876 Sep 22 23:15:11.267 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05E.db"
43877 Sep 22 23:15:11.267 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43878 Sep 22 23:15:11.267 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05E.completed"
43879 Sep 22 23:15:11.267 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43880 Sep 22 23:15:11.267 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43881 Sep 22 23:15:11.267 DEBG [0] It's time to notify for 390
43882 Sep 22 23:15:11.268 INFO Completion from [0] id:390 status:true
43883 Sep 22 23:15:11.268 INFO [391/752] Repair commands completed
43884 Sep 22 23:15:11.268 INFO Pop front: ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }, state: ClientData([New, New, New]) }
43885 Sep 22 23:15:11.268 INFO Sent repair work, now wait for resp
43886 Sep 22 23:15:11.268 INFO [0] received reconcile message
43887 Sep 22 23:15:11.268 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }, state: ClientData([InProgress, New, New]) }, : downstairs
43888 Sep 22 23:15:11.268 INFO [0] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }
43889 Sep 22 23:15:11.268 INFO [1] received reconcile message
43890 Sep 22 23:15:11.268 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43891 Sep 22 23:15:11.268 INFO [1] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }
43892 Sep 22 23:15:11.268 INFO [2] received reconcile message
43893 Sep 22 23:15:11.268 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(391), op: ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43894 Sep 22 23:15:11.268 INFO [2] client ExtentReopen { repair_id: ReconciliationId(391), extent_id: 94 }
43895 Sep 22 23:15:11.268 DEBG 391 Reopen extent 94
43896 Sep 22 23:15:11.269 DEBG 391 Reopen extent 94
43897 Sep 22 23:15:11.269 DEBG 391 Reopen extent 94
43898 Sep 22 23:15:11.270 DEBG [2] It's time to notify for 391
43899 Sep 22 23:15:11.270 INFO Completion from [2] id:391 status:true
43900 Sep 22 23:15:11.270 INFO [392/752] Repair commands completed
43901 Sep 22 23:15:11.270 INFO Pop front: ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43902 Sep 22 23:15:11.270 INFO Sent repair work, now wait for resp
43903 Sep 22 23:15:11.270 INFO [0] received reconcile message
43904 Sep 22 23:15:11.270 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43905 Sep 22 23:15:11.270 INFO [0] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43906 Sep 22 23:15:11.270 INFO [1] received reconcile message
43907 Sep 22 23:15:11.270 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43908 Sep 22 23:15:11.270 INFO [1] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43909 Sep 22 23:15:11.270 INFO [2] received reconcile message
43910 Sep 22 23:15:11.270 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(392), op: ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43911 Sep 22 23:15:11.270 INFO [2] client ExtentFlush { repair_id: ReconciliationId(392), extent_id: 145, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43912 Sep 22 23:15:11.270 DEBG 392 Flush extent 145 with f:2 g:2
43913 Sep 22 23:15:11.270 DEBG Flush just extent 145 with f:2 and g:2
43914 Sep 22 23:15:11.270 DEBG [1] It's time to notify for 392
43915 Sep 22 23:15:11.270 INFO Completion from [1] id:392 status:true
43916 Sep 22 23:15:11.270 INFO [393/752] Repair commands completed
43917 Sep 22 23:15:11.270 INFO Pop front: ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }, state: ClientData([New, New, New]) }
43918 Sep 22 23:15:11.270 INFO Sent repair work, now wait for resp
43919 Sep 22 23:15:11.270 INFO [0] received reconcile message
43920 Sep 22 23:15:11.270 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }, state: ClientData([InProgress, New, New]) }, : downstairs
43921 Sep 22 23:15:11.270 INFO [0] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }
43922 Sep 22 23:15:11.270 INFO [1] received reconcile message
43923 Sep 22 23:15:11.270 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43924 Sep 22 23:15:11.270 INFO [1] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }
43925 Sep 22 23:15:11.271 INFO [2] received reconcile message
43926 Sep 22 23:15:11.271 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(393), op: ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43927 Sep 22 23:15:11.271 INFO [2] client ExtentClose { repair_id: ReconciliationId(393), extent_id: 145 }
43928 Sep 22 23:15:11.271 DEBG 393 Close extent 145
43929 Sep 22 23:15:11.271 DEBG 393 Close extent 145
43930 Sep 22 23:15:11.271 DEBG 393 Close extent 145
43931 Sep 22 23:15:11.272 DEBG [2] It's time to notify for 393
43932 Sep 22 23:15:11.272 INFO Completion from [2] id:393 status:true
43933 Sep 22 23:15:11.272 INFO [394/752] Repair commands completed
43934 Sep 22 23:15:11.272 INFO Pop front: ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
43935 Sep 22 23:15:11.272 INFO Sent repair work, now wait for resp
43936 Sep 22 23:15:11.272 INFO [0] received reconcile message
43937 Sep 22 23:15:11.272 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
43938 Sep 22 23:15:11.272 INFO [0] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43939 Sep 22 23:15:11.272 INFO [0] Sending repair request ReconciliationId(394)
43940 Sep 22 23:15:11.272 INFO [1] received reconcile message
43941 Sep 22 23:15:11.272 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43942 Sep 22 23:15:11.272 INFO [1] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43943 Sep 22 23:15:11.272 INFO [1] No action required ReconciliationId(394)
43944 Sep 22 23:15:11.272 INFO [2] received reconcile message
43945 Sep 22 23:15:11.272 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(394), op: ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
43946 Sep 22 23:15:11.272 INFO [2] client ExtentRepair { repair_id: ReconciliationId(394), extent_id: 145, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
43947 Sep 22 23:15:11.272 INFO [2] No action required ReconciliationId(394)
43948 Sep 22 23:15:11.272 DEBG 394 Repair extent 145 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
43949 Sep 22 23:15:11.272 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/091.copy"
43950 Sep 22 23:15:11.337 INFO accepted connection, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43951 Sep 22 23:15:11.337 TRCE incoming request, uri: /extent/145/files, method: GET, req_id: e921412f-6fd6-44eb-b757-74d15bcae950, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43952 Sep 22 23:15:11.337 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/145/files, method: GET, req_id: e921412f-6fd6-44eb-b757-74d15bcae950, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43953 Sep 22 23:15:11.338 INFO eid:145 Found repair files: ["091", "091.db"]
43954 Sep 22 23:15:11.338 TRCE incoming request, uri: /newextent/145/data, method: GET, req_id: 2df0297d-8ed1-4248-ba06-a68f8b38460f, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43955 Sep 22 23:15:11.338 INFO request completed, latency_us: 348, response_code: 200, uri: /newextent/145/data, method: GET, req_id: 2df0297d-8ed1-4248-ba06-a68f8b38460f, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43956 Sep 22 23:15:11.343 TRCE incoming request, uri: /newextent/145/db, method: GET, req_id: 5d806757-8399-463a-9bfd-8c0ac9810c1f, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43957 Sep 22 23:15:11.344 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/145/db, method: GET, req_id: 5d806757-8399-463a-9bfd-8c0ac9810c1f, remote_addr: 127.0.0.1:62775, local_addr: 127.0.0.1:46213, task: repair
43958 Sep 22 23:15:11.345 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/091.copy" to "/tmp/downstairs-vrx8aK6L/00/000/091.replace"
43959 Sep 22 23:15:11.345 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43960 Sep 22 23:15:11.346 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/091.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
43961 Sep 22 23:15:11.346 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/091"
43962 Sep 22 23:15:11.346 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/091.db"
43963 Sep 22 23:15:11.346 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43964 Sep 22 23:15:11.346 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/091.replace" to "/tmp/downstairs-vrx8aK6L/00/000/091.completed"
43965 Sep 22 23:15:11.346 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43966 Sep 22 23:15:11.346 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
43967 Sep 22 23:15:11.346 DEBG [0] It's time to notify for 394
43968 Sep 22 23:15:11.347 INFO Completion from [0] id:394 status:true
43969 Sep 22 23:15:11.347 INFO [395/752] Repair commands completed
43970 Sep 22 23:15:11.347 INFO Pop front: ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }, state: ClientData([New, New, New]) }
43971 Sep 22 23:15:11.347 INFO Sent repair work, now wait for resp
43972 Sep 22 23:15:11.347 INFO [0] received reconcile message
43973 Sep 22 23:15:11.347 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }, state: ClientData([InProgress, New, New]) }, : downstairs
43974 Sep 22 23:15:11.347 INFO [0] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }
43975 Sep 22 23:15:11.347 INFO [1] received reconcile message
43976 Sep 22 23:15:11.347 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
43977 Sep 22 23:15:11.347 INFO [1] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }
43978 Sep 22 23:15:11.347 INFO [2] received reconcile message
43979 Sep 22 23:15:11.347 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(395), op: ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
43980 Sep 22 23:15:11.347 INFO [2] client ExtentReopen { repair_id: ReconciliationId(395), extent_id: 145 }
43981 Sep 22 23:15:11.347 DEBG 395 Reopen extent 145
43982 Sep 22 23:15:11.348 DEBG 395 Reopen extent 145
43983 Sep 22 23:15:11.348 DEBG 395 Reopen extent 145
43984 Sep 22 23:15:11.349 DEBG [2] It's time to notify for 395
43985 Sep 22 23:15:11.349 INFO Completion from [2] id:395 status:true
43986 Sep 22 23:15:11.349 INFO [396/752] Repair commands completed
43987 Sep 22 23:15:11.349 INFO Pop front: ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
43988 Sep 22 23:15:11.349 INFO Sent repair work, now wait for resp
43989 Sep 22 23:15:11.349 INFO [0] received reconcile message
43990 Sep 22 23:15:11.349 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
43991 Sep 22 23:15:11.349 INFO [0] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43992 Sep 22 23:15:11.349 INFO [1] received reconcile message
43993 Sep 22 23:15:11.349 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
43994 Sep 22 23:15:11.349 INFO [1] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43995 Sep 22 23:15:11.349 INFO [2] received reconcile message
43996 Sep 22 23:15:11.349 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(396), op: ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
43997 Sep 22 23:15:11.349 INFO [2] client ExtentFlush { repair_id: ReconciliationId(396), extent_id: 168, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
43998 Sep 22 23:15:11.349 DEBG 396 Flush extent 168 with f:2 g:2
43999 Sep 22 23:15:11.349 DEBG Flush just extent 168 with f:2 and g:2
44000 Sep 22 23:15:11.349 DEBG [1] It's time to notify for 396
44001 Sep 22 23:15:11.349 INFO Completion from [1] id:396 status:true
44002 Sep 22 23:15:11.349 INFO [397/752] Repair commands completed
44003 Sep 22 23:15:11.349 INFO Pop front: ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }, state: ClientData([New, New, New]) }
44004 Sep 22 23:15:11.350 INFO Sent repair work, now wait for resp
44005 Sep 22 23:15:11.350 INFO [0] received reconcile message
44006 Sep 22 23:15:11.350 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }, state: ClientData([InProgress, New, New]) }, : downstairs
44007 Sep 22 23:15:11.350 INFO [0] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }
44008 Sep 22 23:15:11.350 INFO [1] received reconcile message
44009 Sep 22 23:15:11.350 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44010 Sep 22 23:15:11.350 INFO [1] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }
44011 Sep 22 23:15:11.350 INFO [2] received reconcile message
44012 Sep 22 23:15:11.350 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(397), op: ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44013 Sep 22 23:15:11.350 INFO [2] client ExtentClose { repair_id: ReconciliationId(397), extent_id: 168 }
44014 Sep 22 23:15:11.350 DEBG 397 Close extent 168
44015 Sep 22 23:15:11.350 DEBG 397 Close extent 168
44016 Sep 22 23:15:11.350 DEBG 397 Close extent 168
44017 Sep 22 23:15:11.351 DEBG [2] It's time to notify for 397
44018 Sep 22 23:15:11.351 INFO Completion from [2] id:397 status:true
44019 Sep 22 23:15:11.351 INFO [398/752] Repair commands completed
44020 Sep 22 23:15:11.351 INFO Pop front: ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44021 Sep 22 23:15:11.351 INFO Sent repair work, now wait for resp
44022 Sep 22 23:15:11.351 INFO [0] received reconcile message
44023 Sep 22 23:15:11.351 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44024 Sep 22 23:15:11.351 INFO [0] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44025 Sep 22 23:15:11.351 INFO [0] Sending repair request ReconciliationId(398)
44026 Sep 22 23:15:11.351 INFO [1] received reconcile message
44027 Sep 22 23:15:11.351 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44028 Sep 22 23:15:11.351 INFO [1] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44029 Sep 22 23:15:11.351 INFO [1] No action required ReconciliationId(398)
44030 Sep 22 23:15:11.351 INFO [2] received reconcile message
44031 Sep 22 23:15:11.351 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(398), op: ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44032 Sep 22 23:15:11.351 INFO [2] client ExtentRepair { repair_id: ReconciliationId(398), extent_id: 168, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44033 Sep 22 23:15:11.351 INFO [2] No action required ReconciliationId(398)
44034 Sep 22 23:15:11.351 DEBG 398 Repair extent 168 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44035 Sep 22 23:15:11.351 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A8.copy"
44036 Sep 22 23:15:11.375 ERRO [0] job id 1083 saw error GenericError("test error")
44037 Sep 22 23:15:11.375 ERRO [0] job id 1083 saw error GenericError("test error")
44038 Sep 22 23:15:11.375 DEBG up_ds_listen was notified
44039 Sep 22 23:15:11.375 DEBG up_ds_listen process 1083
44040 Sep 22 23:15:11.375 DEBG [A] ack job 1083:84, : downstairs
44041 Sep 22 23:15:11.375 DEBG up_ds_listen checked 1 jobs, back to waiting
44042 Sep 22 23:15:11.381 DEBG Read :1082 deps:[JobId(1081)] res:true
44043 Sep 22 23:15:11.403 DEBG IO Read 1084 has deps [JobId(1083)]
44044 Sep 22 23:15:11.414 INFO accepted connection, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44045 Sep 22 23:15:11.415 TRCE incoming request, uri: /extent/168/files, method: GET, req_id: 183eb21a-6502-4d6f-884f-e9b87d04cf5d, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44046 Sep 22 23:15:11.415 INFO request completed, latency_us: 223, response_code: 200, uri: /extent/168/files, method: GET, req_id: 183eb21a-6502-4d6f-884f-e9b87d04cf5d, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44047 Sep 22 23:15:11.415 INFO eid:168 Found repair files: ["0A8", "0A8.db"]
44048 Sep 22 23:15:11.415 TRCE incoming request, uri: /newextent/168/data, method: GET, req_id: 42229d6f-34f6-488e-8762-2277951d1815, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44049 Sep 22 23:15:11.416 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/168/data, method: GET, req_id: 42229d6f-34f6-488e-8762-2277951d1815, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44050 Sep 22 23:15:11.417 INFO [lossy] skipping 1083
44051 Sep 22 23:15:11.417 DEBG Flush :1083 extent_limit None deps:[JobId(1082), JobId(1081)] res:true f:31 g:1
44052 Sep 22 23:15:11.417 INFO [lossy] skipping 1084
44053 Sep 22 23:15:11.417 INFO [lossy] skipping 1084
44054 Sep 22 23:15:11.417 INFO [lossy] skipping 1084
44055 Sep 22 23:15:11.421 TRCE incoming request, uri: /newextent/168/db, method: GET, req_id: a5b7274c-d630-476d-8703-1dedd48c7b78, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44056 Sep 22 23:15:11.421 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/168/db, method: GET, req_id: a5b7274c-d630-476d-8703-1dedd48c7b78, remote_addr: 127.0.0.1:33934, local_addr: 127.0.0.1:46213, task: repair
44057 Sep 22 23:15:11.422 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A8.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A8.replace"
44058 Sep 22 23:15:11.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44059 Sep 22 23:15:11.423 DEBG Read :1084 deps:[JobId(1083)] res:true
44060 Sep 22 23:15:11.423 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A8.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44061 Sep 22 23:15:11.424 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A8"
44062 Sep 22 23:15:11.424 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A8.db"
44063 Sep 22 23:15:11.424 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44064 Sep 22 23:15:11.424 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A8.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A8.completed"
44065 Sep 22 23:15:11.424 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44066 Sep 22 23:15:11.424 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44067 Sep 22 23:15:11.424 DEBG [0] It's time to notify for 398
44068 Sep 22 23:15:11.424 INFO Completion from [0] id:398 status:true
44069 Sep 22 23:15:11.424 INFO [399/752] Repair commands completed
44070 Sep 22 23:15:11.424 INFO Pop front: ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }, state: ClientData([New, New, New]) }
44071 Sep 22 23:15:11.424 INFO Sent repair work, now wait for resp
44072 Sep 22 23:15:11.424 INFO [0] received reconcile message
44073 Sep 22 23:15:11.424 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }, state: ClientData([InProgress, New, New]) }, : downstairs
44074 Sep 22 23:15:11.424 INFO [0] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }
44075 Sep 22 23:15:11.424 INFO [1] received reconcile message
44076 Sep 22 23:15:11.424 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44077 Sep 22 23:15:11.424 INFO [1] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }
44078 Sep 22 23:15:11.424 INFO [2] received reconcile message
44079 Sep 22 23:15:11.424 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(399), op: ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44080 Sep 22 23:15:11.424 INFO [2] client ExtentReopen { repair_id: ReconciliationId(399), extent_id: 168 }
44081 Sep 22 23:15:11.425 DEBG 399 Reopen extent 168
44082 Sep 22 23:15:11.425 DEBG 399 Reopen extent 168
44083 Sep 22 23:15:11.426 DEBG 399 Reopen extent 168
44084 Sep 22 23:15:11.426 DEBG [2] It's time to notify for 399
44085 Sep 22 23:15:11.426 INFO Completion from [2] id:399 status:true
44086 Sep 22 23:15:11.426 INFO [400/752] Repair commands completed
44087 Sep 22 23:15:11.426 INFO Pop front: ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44088 Sep 22 23:15:11.426 INFO Sent repair work, now wait for resp
44089 Sep 22 23:15:11.426 INFO [0] received reconcile message
44090 Sep 22 23:15:11.427 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44091 Sep 22 23:15:11.427 INFO [0] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44092 Sep 22 23:15:11.427 INFO [1] received reconcile message
44093 Sep 22 23:15:11.427 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44094 Sep 22 23:15:11.427 INFO [1] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44095 Sep 22 23:15:11.427 INFO [2] received reconcile message
44096 Sep 22 23:15:11.427 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(400), op: ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44097 Sep 22 23:15:11.427 INFO [2] client ExtentFlush { repair_id: ReconciliationId(400), extent_id: 127, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44098 Sep 22 23:15:11.427 DEBG 400 Flush extent 127 with f:2 g:2
44099 Sep 22 23:15:11.427 DEBG Flush just extent 127 with f:2 and g:2
44100 Sep 22 23:15:11.427 DEBG [1] It's time to notify for 400
44101 Sep 22 23:15:11.427 INFO Completion from [1] id:400 status:true
44102 Sep 22 23:15:11.427 INFO [401/752] Repair commands completed
44103 Sep 22 23:15:11.427 INFO Pop front: ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }, state: ClientData([New, New, New]) }
44104 Sep 22 23:15:11.427 INFO Sent repair work, now wait for resp
44105 Sep 22 23:15:11.427 INFO [0] received reconcile message
44106 Sep 22 23:15:11.427 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }, state: ClientData([InProgress, New, New]) }, : downstairs
44107 Sep 22 23:15:11.427 INFO [0] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }
44108 Sep 22 23:15:11.427 INFO [1] received reconcile message
44109 Sep 22 23:15:11.427 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44110 Sep 22 23:15:11.427 INFO [1] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }
44111 Sep 22 23:15:11.427 INFO [2] received reconcile message
44112 Sep 22 23:15:11.427 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(401), op: ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44113 Sep 22 23:15:11.427 INFO [2] client ExtentClose { repair_id: ReconciliationId(401), extent_id: 127 }
44114 Sep 22 23:15:11.427 DEBG 401 Close extent 127
44115 Sep 22 23:15:11.428 DEBG 401 Close extent 127
44116 Sep 22 23:15:11.428 DEBG 401 Close extent 127
44117 Sep 22 23:15:11.428 DEBG [2] It's time to notify for 401
44118 Sep 22 23:15:11.428 INFO Completion from [2] id:401 status:true
44119 Sep 22 23:15:11.428 INFO [402/752] Repair commands completed
44120 Sep 22 23:15:11.428 INFO Pop front: ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44121 Sep 22 23:15:11.428 INFO Sent repair work, now wait for resp
44122 Sep 22 23:15:11.428 INFO [0] received reconcile message
44123 Sep 22 23:15:11.428 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44124 Sep 22 23:15:11.428 INFO [0] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44125 Sep 22 23:15:11.429 INFO [0] Sending repair request ReconciliationId(402)
44126 Sep 22 23:15:11.429 INFO [1] received reconcile message
44127 Sep 22 23:15:11.429 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44128 Sep 22 23:15:11.429 INFO [1] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44129 Sep 22 23:15:11.429 INFO [1] No action required ReconciliationId(402)
44130 Sep 22 23:15:11.429 INFO [2] received reconcile message
44131 Sep 22 23:15:11.429 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(402), op: ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44132 Sep 22 23:15:11.429 INFO [2] client ExtentRepair { repair_id: ReconciliationId(402), extent_id: 127, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44133 Sep 22 23:15:11.429 INFO [2] No action required ReconciliationId(402)
44134 Sep 22 23:15:11.429 DEBG 402 Repair extent 127 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44135 Sep 22 23:15:11.429 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07F.copy"
44136 Sep 22 23:15:11.493 INFO accepted connection, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44137 Sep 22 23:15:11.493 TRCE incoming request, uri: /extent/127/files, method: GET, req_id: 4a2b9aee-da6e-4e30-97db-d6a5edc6b8d5, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44138 Sep 22 23:15:11.493 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/127/files, method: GET, req_id: 4a2b9aee-da6e-4e30-97db-d6a5edc6b8d5, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44139 Sep 22 23:15:11.494 INFO eid:127 Found repair files: ["07F", "07F.db"]
44140 Sep 22 23:15:11.494 TRCE incoming request, uri: /newextent/127/data, method: GET, req_id: 81c1bbc9-076f-401d-b3cf-732a797ebf19, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44141 Sep 22 23:15:11.494 INFO request completed, latency_us: 257, response_code: 200, uri: /newextent/127/data, method: GET, req_id: 81c1bbc9-076f-401d-b3cf-732a797ebf19, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44142 Sep 22 23:15:11.499 TRCE incoming request, uri: /newextent/127/db, method: GET, req_id: 546a330f-36b7-4640-9219-ed90178f13d3, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44143 Sep 22 23:15:11.500 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/127/db, method: GET, req_id: 546a330f-36b7-4640-9219-ed90178f13d3, remote_addr: 127.0.0.1:33658, local_addr: 127.0.0.1:46213, task: repair
44144 Sep 22 23:15:11.501 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07F.replace"
44145 Sep 22 23:15:11.501 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44146 Sep 22 23:15:11.502 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44147 Sep 22 23:15:11.502 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07F"
44148 Sep 22 23:15:11.502 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07F.db"
44149 Sep 22 23:15:11.502 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44150 Sep 22 23:15:11.502 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07F.completed"
44151 Sep 22 23:15:11.502 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44152 Sep 22 23:15:11.502 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44153 Sep 22 23:15:11.502 DEBG [0] It's time to notify for 402
44154 Sep 22 23:15:11.502 INFO Completion from [0] id:402 status:true
44155 Sep 22 23:15:11.502 INFO [403/752] Repair commands completed
44156 Sep 22 23:15:11.502 INFO Pop front: ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }, state: ClientData([New, New, New]) }
44157 Sep 22 23:15:11.502 INFO Sent repair work, now wait for resp
44158 Sep 22 23:15:11.503 INFO [0] received reconcile message
44159 Sep 22 23:15:11.503 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }, state: ClientData([InProgress, New, New]) }, : downstairs
44160 Sep 22 23:15:11.503 INFO [0] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }
44161 Sep 22 23:15:11.503 INFO [1] received reconcile message
44162 Sep 22 23:15:11.503 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44163 Sep 22 23:15:11.503 INFO [1] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }
44164 Sep 22 23:15:11.503 INFO [2] received reconcile message
44165 Sep 22 23:15:11.503 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(403), op: ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44166 Sep 22 23:15:11.503 INFO [2] client ExtentReopen { repair_id: ReconciliationId(403), extent_id: 127 }
44167 Sep 22 23:15:11.503 DEBG 403 Reopen extent 127
44168 Sep 22 23:15:11.504 DEBG 403 Reopen extent 127
44169 Sep 22 23:15:11.504 DEBG 403 Reopen extent 127
44170 Sep 22 23:15:11.505 DEBG [2] It's time to notify for 403
44171 Sep 22 23:15:11.505 INFO Completion from [2] id:403 status:true
44172 Sep 22 23:15:11.505 INFO [404/752] Repair commands completed
44173 Sep 22 23:15:11.505 INFO Pop front: ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44174 Sep 22 23:15:11.505 INFO Sent repair work, now wait for resp
44175 Sep 22 23:15:11.505 INFO [0] received reconcile message
44176 Sep 22 23:15:11.505 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44177 Sep 22 23:15:11.505 INFO [0] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44178 Sep 22 23:15:11.505 INFO [1] received reconcile message
44179 Sep 22 23:15:11.505 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44180 Sep 22 23:15:11.505 INFO [1] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44181 Sep 22 23:15:11.505 INFO [2] received reconcile message
44182 Sep 22 23:15:11.505 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(404), op: ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44183 Sep 22 23:15:11.505 INFO [2] client ExtentFlush { repair_id: ReconciliationId(404), extent_id: 26, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44184 Sep 22 23:15:11.505 DEBG 404 Flush extent 26 with f:2 g:2
44185 Sep 22 23:15:11.505 DEBG Flush just extent 26 with f:2 and g:2
44186 Sep 22 23:15:11.505 DEBG [1] It's time to notify for 404
44187 Sep 22 23:15:11.505 INFO Completion from [1] id:404 status:true
44188 Sep 22 23:15:11.505 INFO [405/752] Repair commands completed
44189 Sep 22 23:15:11.505 INFO Pop front: ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }, state: ClientData([New, New, New]) }
44190 Sep 22 23:15:11.505 INFO Sent repair work, now wait for resp
44191 Sep 22 23:15:11.505 INFO [0] received reconcile message
44192 Sep 22 23:15:11.505 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }, state: ClientData([InProgress, New, New]) }, : downstairs
44193 Sep 22 23:15:11.505 INFO [0] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }
44194 Sep 22 23:15:11.505 INFO [1] received reconcile message
44195 Sep 22 23:15:11.505 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44196 Sep 22 23:15:11.505 INFO [1] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }
44197 Sep 22 23:15:11.505 INFO [2] received reconcile message
44198 Sep 22 23:15:11.505 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(405), op: ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44199 Sep 22 23:15:11.506 INFO [2] client ExtentClose { repair_id: ReconciliationId(405), extent_id: 26 }
44200 Sep 22 23:15:11.506 DEBG 405 Close extent 26
44201 Sep 22 23:15:11.506 DEBG 405 Close extent 26
44202 Sep 22 23:15:11.506 DEBG 405 Close extent 26
44203 Sep 22 23:15:11.507 DEBG [2] It's time to notify for 405
44204 Sep 22 23:15:11.507 INFO Completion from [2] id:405 status:true
44205 Sep 22 23:15:11.507 INFO [406/752] Repair commands completed
44206 Sep 22 23:15:11.507 INFO Pop front: ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44207 Sep 22 23:15:11.507 INFO Sent repair work, now wait for resp
44208 Sep 22 23:15:11.507 INFO [0] received reconcile message
44209 Sep 22 23:15:11.507 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44210 Sep 22 23:15:11.507 INFO [0] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44211 Sep 22 23:15:11.507 INFO [0] Sending repair request ReconciliationId(406)
44212 Sep 22 23:15:11.507 INFO [1] received reconcile message
44213 Sep 22 23:15:11.507 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44214 Sep 22 23:15:11.507 INFO [1] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44215 Sep 22 23:15:11.507 INFO [1] No action required ReconciliationId(406)
44216 Sep 22 23:15:11.507 INFO [2] received reconcile message
44217 Sep 22 23:15:11.507 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(406), op: ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44218 Sep 22 23:15:11.507 INFO [2] client ExtentRepair { repair_id: ReconciliationId(406), extent_id: 26, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44219 Sep 22 23:15:11.507 INFO [2] No action required ReconciliationId(406)
44220 Sep 22 23:15:11.507 DEBG 406 Repair extent 26 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44221 Sep 22 23:15:11.507 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01A.copy"
44222 Sep 22 23:15:11.571 INFO accepted connection, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44223 Sep 22 23:15:11.571 TRCE incoming request, uri: /extent/26/files, method: GET, req_id: 1b9b10e1-8af0-42d6-9203-ae346e2b9b0a, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44224 Sep 22 23:15:11.572 INFO request completed, latency_us: 229, response_code: 200, uri: /extent/26/files, method: GET, req_id: 1b9b10e1-8af0-42d6-9203-ae346e2b9b0a, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44225 Sep 22 23:15:11.572 INFO eid:26 Found repair files: ["01A", "01A.db"]
44226 Sep 22 23:15:11.572 TRCE incoming request, uri: /newextent/26/data, method: GET, req_id: 54ec14e7-3b7e-4e0d-81d6-0cbb2dacba46, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44227 Sep 22 23:15:11.572 INFO request completed, latency_us: 343, response_code: 200, uri: /newextent/26/data, method: GET, req_id: 54ec14e7-3b7e-4e0d-81d6-0cbb2dacba46, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44228 Sep 22 23:15:11.578 TRCE incoming request, uri: /newextent/26/db, method: GET, req_id: ea27c0d5-8311-4275-8b6d-d2f5d0e5a286, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44229 Sep 22 23:15:11.578 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/26/db, method: GET, req_id: ea27c0d5-8311-4275-8b6d-d2f5d0e5a286, remote_addr: 127.0.0.1:42529, local_addr: 127.0.0.1:46213, task: repair
44230 Sep 22 23:15:11.579 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01A.replace"
44231 Sep 22 23:15:11.579 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44232 Sep 22 23:15:11.580 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44233 Sep 22 23:15:11.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01A"
44234 Sep 22 23:15:11.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01A.db"
44235 Sep 22 23:15:11.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44236 Sep 22 23:15:11.580 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01A.completed"
44237 Sep 22 23:15:11.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44238 Sep 22 23:15:11.581 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44239 Sep 22 23:15:11.581 DEBG [0] It's time to notify for 406
44240 Sep 22 23:15:11.581 INFO Completion from [0] id:406 status:true
44241 Sep 22 23:15:11.581 INFO [407/752] Repair commands completed
44242 Sep 22 23:15:11.581 INFO Pop front: ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }, state: ClientData([New, New, New]) }
44243 Sep 22 23:15:11.581 INFO Sent repair work, now wait for resp
44244 Sep 22 23:15:11.581 INFO [0] received reconcile message
44245 Sep 22 23:15:11.581 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }, state: ClientData([InProgress, New, New]) }, : downstairs
44246 Sep 22 23:15:11.581 INFO [0] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }
44247 Sep 22 23:15:11.581 INFO [1] received reconcile message
44248 Sep 22 23:15:11.581 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44249 Sep 22 23:15:11.581 INFO [1] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }
44250 Sep 22 23:15:11.581 INFO [2] received reconcile message
44251 Sep 22 23:15:11.581 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(407), op: ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44252 Sep 22 23:15:11.581 INFO [2] client ExtentReopen { repair_id: ReconciliationId(407), extent_id: 26 }
44253 Sep 22 23:15:11.581 DEBG 407 Reopen extent 26
44254 Sep 22 23:15:11.582 DEBG 407 Reopen extent 26
44255 Sep 22 23:15:11.583 DEBG 407 Reopen extent 26
44256 Sep 22 23:15:11.583 DEBG [2] It's time to notify for 407
44257 Sep 22 23:15:11.583 INFO Completion from [2] id:407 status:true
44258 Sep 22 23:15:11.583 INFO [408/752] Repair commands completed
44259 Sep 22 23:15:11.583 INFO Pop front: ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44260 Sep 22 23:15:11.583 INFO Sent repair work, now wait for resp
44261 Sep 22 23:15:11.583 INFO [0] received reconcile message
44262 Sep 22 23:15:11.583 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44263 Sep 22 23:15:11.583 INFO [0] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44264 Sep 22 23:15:11.583 INFO [1] received reconcile message
44265 Sep 22 23:15:11.583 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44266 Sep 22 23:15:11.583 INFO [1] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44267 Sep 22 23:15:11.583 INFO [2] received reconcile message
44268 Sep 22 23:15:11.583 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(408), op: ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44269 Sep 22 23:15:11.583 INFO [2] client ExtentFlush { repair_id: ReconciliationId(408), extent_id: 34, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44270 Sep 22 23:15:11.584 DEBG 408 Flush extent 34 with f:2 g:2
44271 Sep 22 23:15:11.584 DEBG Flush just extent 34 with f:2 and g:2
44272 Sep 22 23:15:11.584 DEBG [1] It's time to notify for 408
44273 Sep 22 23:15:11.584 INFO Completion from [1] id:408 status:true
44274 Sep 22 23:15:11.584 INFO [409/752] Repair commands completed
44275 Sep 22 23:15:11.584 INFO Pop front: ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }, state: ClientData([New, New, New]) }
44276 Sep 22 23:15:11.584 INFO Sent repair work, now wait for resp
44277 Sep 22 23:15:11.584 INFO [0] received reconcile message
44278 Sep 22 23:15:11.584 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }, state: ClientData([InProgress, New, New]) }, : downstairs
44279 Sep 22 23:15:11.584 INFO [0] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }
44280 Sep 22 23:15:11.584 INFO [1] received reconcile message
44281 Sep 22 23:15:11.584 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44282 Sep 22 23:15:11.584 INFO [1] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }
44283 Sep 22 23:15:11.584 INFO [2] received reconcile message
44284 Sep 22 23:15:11.584 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(409), op: ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44285 Sep 22 23:15:11.584 INFO [2] client ExtentClose { repair_id: ReconciliationId(409), extent_id: 34 }
44286 Sep 22 23:15:11.584 DEBG 409 Close extent 34
44287 Sep 22 23:15:11.584 DEBG 409 Close extent 34
44288 Sep 22 23:15:11.585 DEBG 409 Close extent 34
44289 Sep 22 23:15:11.585 DEBG [2] It's time to notify for 409
44290 Sep 22 23:15:11.585 INFO Completion from [2] id:409 status:true
44291 Sep 22 23:15:11.585 INFO [410/752] Repair commands completed
44292 Sep 22 23:15:11.585 INFO Pop front: ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44293 Sep 22 23:15:11.585 INFO Sent repair work, now wait for resp
44294 Sep 22 23:15:11.585 INFO [0] received reconcile message
44295 Sep 22 23:15:11.585 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44296 Sep 22 23:15:11.585 INFO [0] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44297 Sep 22 23:15:11.585 INFO [0] Sending repair request ReconciliationId(410)
44298 Sep 22 23:15:11.585 INFO [1] received reconcile message
44299 Sep 22 23:15:11.585 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44300 Sep 22 23:15:11.585 INFO [1] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44301 Sep 22 23:15:11.585 INFO [1] No action required ReconciliationId(410)
44302 Sep 22 23:15:11.585 INFO [2] received reconcile message
44303 Sep 22 23:15:11.585 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(410), op: ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44304 Sep 22 23:15:11.585 INFO [2] client ExtentRepair { repair_id: ReconciliationId(410), extent_id: 34, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44305 Sep 22 23:15:11.585 INFO [2] No action required ReconciliationId(410)
44306 Sep 22 23:15:11.586 DEBG 410 Repair extent 34 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44307 Sep 22 23:15:11.586 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/022.copy"
44308 Sep 22 23:15:11.650 INFO accepted connection, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44309 Sep 22 23:15:11.650 TRCE incoming request, uri: /extent/34/files, method: GET, req_id: 75a10963-4838-4311-b957-e4167bac9ada, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44310 Sep 22 23:15:11.650 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/34/files, method: GET, req_id: 75a10963-4838-4311-b957-e4167bac9ada, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44311 Sep 22 23:15:11.651 INFO eid:34 Found repair files: ["022", "022.db"]
44312 Sep 22 23:15:11.651 TRCE incoming request, uri: /newextent/34/data, method: GET, req_id: e6cbc666-4bc9-4395-8a26-30e718184c95, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44313 Sep 22 23:15:11.651 INFO request completed, latency_us: 316, response_code: 200, uri: /newextent/34/data, method: GET, req_id: e6cbc666-4bc9-4395-8a26-30e718184c95, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44314 Sep 22 23:15:11.656 TRCE incoming request, uri: /newextent/34/db, method: GET, req_id: ee3d11e4-e9d8-449d-a1ac-6bd04655168d, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44315 Sep 22 23:15:11.657 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/34/db, method: GET, req_id: ee3d11e4-e9d8-449d-a1ac-6bd04655168d, remote_addr: 127.0.0.1:34604, local_addr: 127.0.0.1:46213, task: repair
44316 Sep 22 23:15:11.658 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/022.copy" to "/tmp/downstairs-vrx8aK6L/00/000/022.replace"
44317 Sep 22 23:15:11.658 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44318 Sep 22 23:15:11.659 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/022.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44319 Sep 22 23:15:11.659 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/022"
44320 Sep 22 23:15:11.659 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/022.db"
44321 Sep 22 23:15:11.659 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44322 Sep 22 23:15:11.659 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/022.replace" to "/tmp/downstairs-vrx8aK6L/00/000/022.completed"
44323 Sep 22 23:15:11.659 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44324 Sep 22 23:15:11.659 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44325 Sep 22 23:15:11.659 DEBG [0] It's time to notify for 410
44326 Sep 22 23:15:11.659 INFO Completion from [0] id:410 status:true
44327 Sep 22 23:15:11.659 INFO [411/752] Repair commands completed
44328 Sep 22 23:15:11.659 INFO Pop front: ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }, state: ClientData([New, New, New]) }
44329 Sep 22 23:15:11.659 INFO Sent repair work, now wait for resp
44330 Sep 22 23:15:11.660 INFO [0] received reconcile message
44331 Sep 22 23:15:11.660 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }, state: ClientData([InProgress, New, New]) }, : downstairs
44332 Sep 22 23:15:11.660 INFO [0] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }
44333 Sep 22 23:15:11.660 INFO [1] received reconcile message
44334 Sep 22 23:15:11.660 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44335 Sep 22 23:15:11.660 INFO [1] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }
44336 Sep 22 23:15:11.660 INFO [2] received reconcile message
44337 Sep 22 23:15:11.660 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(411), op: ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44338 Sep 22 23:15:11.660 INFO [2] client ExtentReopen { repair_id: ReconciliationId(411), extent_id: 34 }
44339 Sep 22 23:15:11.660 DEBG 411 Reopen extent 34
44340 Sep 22 23:15:11.660 DEBG 411 Reopen extent 34
44341 Sep 22 23:15:11.661 DEBG 411 Reopen extent 34
44342 Sep 22 23:15:11.661 DEBG [2] It's time to notify for 411
44343 Sep 22 23:15:11.662 INFO Completion from [2] id:411 status:true
44344 Sep 22 23:15:11.662 INFO [412/752] Repair commands completed
44345 Sep 22 23:15:11.662 INFO Pop front: ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44346 Sep 22 23:15:11.662 INFO Sent repair work, now wait for resp
44347 Sep 22 23:15:11.662 INFO [0] received reconcile message
44348 Sep 22 23:15:11.662 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44349 Sep 22 23:15:11.662 INFO [0] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44350 Sep 22 23:15:11.662 INFO [1] received reconcile message
44351 Sep 22 23:15:11.662 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44352 Sep 22 23:15:11.662 INFO [1] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44353 Sep 22 23:15:11.662 INFO [2] received reconcile message
44354 Sep 22 23:15:11.662 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(412), op: ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44355 Sep 22 23:15:11.662 INFO [2] client ExtentFlush { repair_id: ReconciliationId(412), extent_id: 12, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44356 Sep 22 23:15:11.662 DEBG 412 Flush extent 12 with f:2 g:2
44357 Sep 22 23:15:11.662 DEBG Flush just extent 12 with f:2 and g:2
44358 Sep 22 23:15:11.662 DEBG [1] It's time to notify for 412
44359 Sep 22 23:15:11.662 INFO Completion from [1] id:412 status:true
44360 Sep 22 23:15:11.662 INFO [413/752] Repair commands completed
44361 Sep 22 23:15:11.662 INFO Pop front: ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }, state: ClientData([New, New, New]) }
44362 Sep 22 23:15:11.662 INFO Sent repair work, now wait for resp
44363 Sep 22 23:15:11.662 INFO [0] received reconcile message
44364 Sep 22 23:15:11.662 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }, state: ClientData([InProgress, New, New]) }, : downstairs
44365 Sep 22 23:15:11.662 INFO [0] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }
44366 Sep 22 23:15:11.662 INFO [1] received reconcile message
44367 Sep 22 23:15:11.662 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44368 Sep 22 23:15:11.662 INFO [1] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }
44369 Sep 22 23:15:11.662 INFO [2] received reconcile message
44370 Sep 22 23:15:11.662 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(413), op: ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44371 Sep 22 23:15:11.662 INFO [2] client ExtentClose { repair_id: ReconciliationId(413), extent_id: 12 }
44372 Sep 22 23:15:11.663 DEBG 413 Close extent 12
44373 Sep 22 23:15:11.663 DEBG 413 Close extent 12
44374 Sep 22 23:15:11.663 DEBG 413 Close extent 12
44375 Sep 22 23:15:11.663 DEBG [2] It's time to notify for 413
44376 Sep 22 23:15:11.663 INFO Completion from [2] id:413 status:true
44377 Sep 22 23:15:11.664 INFO [414/752] Repair commands completed
44378 Sep 22 23:15:11.664 INFO Pop front: ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44379 Sep 22 23:15:11.664 INFO Sent repair work, now wait for resp
44380 Sep 22 23:15:11.664 INFO [0] received reconcile message
44381 Sep 22 23:15:11.664 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44382 Sep 22 23:15:11.664 INFO [0] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44383 Sep 22 23:15:11.664 INFO [0] Sending repair request ReconciliationId(414)
44384 Sep 22 23:15:11.664 INFO [1] received reconcile message
44385 Sep 22 23:15:11.664 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44386 Sep 22 23:15:11.664 INFO [1] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44387 Sep 22 23:15:11.664 INFO [1] No action required ReconciliationId(414)
44388 Sep 22 23:15:11.664 INFO [2] received reconcile message
44389 Sep 22 23:15:11.664 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(414), op: ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44390 Sep 22 23:15:11.664 INFO [2] client ExtentRepair { repair_id: ReconciliationId(414), extent_id: 12, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44391 Sep 22 23:15:11.664 INFO [2] No action required ReconciliationId(414)
44392 Sep 22 23:15:11.664 DEBG 414 Repair extent 12 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44393 Sep 22 23:15:11.664 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00C.copy"
44394 Sep 22 23:15:11.726 INFO accepted connection, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44395 Sep 22 23:15:11.727 TRCE incoming request, uri: /extent/12/files, method: GET, req_id: 07f68c1f-ee80-4eaf-af07-5c54812f0f8a, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44396 Sep 22 23:15:11.727 INFO request completed, latency_us: 246, response_code: 200, uri: /extent/12/files, method: GET, req_id: 07f68c1f-ee80-4eaf-af07-5c54812f0f8a, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44397 Sep 22 23:15:11.727 INFO eid:12 Found repair files: ["00C", "00C.db"]
44398 Sep 22 23:15:11.727 TRCE incoming request, uri: /newextent/12/data, method: GET, req_id: c5019864-7b10-448c-89e6-d000ae79eccc, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44399 Sep 22 23:15:11.728 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/12/data, method: GET, req_id: c5019864-7b10-448c-89e6-d000ae79eccc, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44400 Sep 22 23:15:11.733 TRCE incoming request, uri: /newextent/12/db, method: GET, req_id: 5a5ebc74-abd4-4638-b647-a81d3527c464, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44401 Sep 22 23:15:11.733 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/12/db, method: GET, req_id: 5a5ebc74-abd4-4638-b647-a81d3527c464, remote_addr: 127.0.0.1:62790, local_addr: 127.0.0.1:46213, task: repair
44402 Sep 22 23:15:11.735 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00C.replace"
44403 Sep 22 23:15:11.735 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44404 Sep 22 23:15:11.736 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44405 Sep 22 23:15:11.736 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00C"
44406 Sep 22 23:15:11.736 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00C.db"
44407 Sep 22 23:15:11.736 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44408 Sep 22 23:15:11.736 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00C.completed"
44409 Sep 22 23:15:11.736 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44410 Sep 22 23:15:11.736 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44411 Sep 22 23:15:11.737 DEBG [0] It's time to notify for 414
44412 Sep 22 23:15:11.737 INFO Completion from [0] id:414 status:true
44413 Sep 22 23:15:11.737 INFO [415/752] Repair commands completed
44414 Sep 22 23:15:11.737 INFO Pop front: ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }, state: ClientData([New, New, New]) }
44415 Sep 22 23:15:11.737 INFO Sent repair work, now wait for resp
44416 Sep 22 23:15:11.737 INFO [0] received reconcile message
44417 Sep 22 23:15:11.737 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }, state: ClientData([InProgress, New, New]) }, : downstairs
44418 Sep 22 23:15:11.737 INFO [0] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }
44419 Sep 22 23:15:11.737 INFO [1] received reconcile message
44420 Sep 22 23:15:11.737 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44421 Sep 22 23:15:11.737 INFO [1] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }
44422 Sep 22 23:15:11.737 INFO [2] received reconcile message
44423 Sep 22 23:15:11.737 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(415), op: ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44424 Sep 22 23:15:11.737 INFO [2] client ExtentReopen { repair_id: ReconciliationId(415), extent_id: 12 }
44425 Sep 22 23:15:11.737 DEBG 415 Reopen extent 12
44426 Sep 22 23:15:11.738 DEBG 415 Reopen extent 12
44427 Sep 22 23:15:11.739 DEBG 415 Reopen extent 12
44428 Sep 22 23:15:11.739 DEBG [2] It's time to notify for 415
44429 Sep 22 23:15:11.739 INFO Completion from [2] id:415 status:true
44430 Sep 22 23:15:11.739 INFO [416/752] Repair commands completed
44431 Sep 22 23:15:11.739 INFO Pop front: ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44432 Sep 22 23:15:11.739 INFO Sent repair work, now wait for resp
44433 Sep 22 23:15:11.739 INFO [0] received reconcile message
44434 Sep 22 23:15:11.739 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44435 Sep 22 23:15:11.739 INFO [0] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44436 Sep 22 23:15:11.740 INFO [1] received reconcile message
44437 Sep 22 23:15:11.740 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44438 Sep 22 23:15:11.740 INFO [1] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44439 Sep 22 23:15:11.740 INFO [2] received reconcile message
44440 Sep 22 23:15:11.740 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(416), op: ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44441 Sep 22 23:15:11.740 INFO [2] client ExtentFlush { repair_id: ReconciliationId(416), extent_id: 172, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44442 Sep 22 23:15:11.740 DEBG 416 Flush extent 172 with f:2 g:2
44443 Sep 22 23:15:11.740 DEBG Flush just extent 172 with f:2 and g:2
44444 Sep 22 23:15:11.740 DEBG [1] It's time to notify for 416
44445 Sep 22 23:15:11.740 INFO Completion from [1] id:416 status:true
44446 Sep 22 23:15:11.740 INFO [417/752] Repair commands completed
44447 Sep 22 23:15:11.740 INFO Pop front: ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }, state: ClientData([New, New, New]) }
44448 Sep 22 23:15:11.740 INFO Sent repair work, now wait for resp
44449 Sep 22 23:15:11.740 INFO [0] received reconcile message
44450 Sep 22 23:15:11.740 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }, state: ClientData([InProgress, New, New]) }, : downstairs
44451 Sep 22 23:15:11.740 INFO [0] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }
44452 Sep 22 23:15:11.740 INFO [1] received reconcile message
44453 Sep 22 23:15:11.740 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44454 Sep 22 23:15:11.740 INFO [1] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }
44455 Sep 22 23:15:11.740 INFO [2] received reconcile message
44456 Sep 22 23:15:11.740 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(417), op: ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44457 Sep 22 23:15:11.740 INFO [2] client ExtentClose { repair_id: ReconciliationId(417), extent_id: 172 }
44458 Sep 22 23:15:11.740 DEBG 417 Close extent 172
44459 Sep 22 23:15:11.741 DEBG 417 Close extent 172
44460 Sep 22 23:15:11.741 DEBG 417 Close extent 172
44461 Sep 22 23:15:11.741 DEBG [2] It's time to notify for 417
44462 Sep 22 23:15:11.741 INFO Completion from [2] id:417 status:true
44463 Sep 22 23:15:11.741 INFO [418/752] Repair commands completed
44464 Sep 22 23:15:11.741 INFO Pop front: ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44465 Sep 22 23:15:11.741 INFO Sent repair work, now wait for resp
44466 Sep 22 23:15:11.741 INFO [0] received reconcile message
44467 Sep 22 23:15:11.741 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44468 Sep 22 23:15:11.741 INFO [0] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44469 Sep 22 23:15:11.742 INFO [0] Sending repair request ReconciliationId(418)
44470 Sep 22 23:15:11.742 INFO [1] received reconcile message
44471 Sep 22 23:15:11.742 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44472 Sep 22 23:15:11.742 INFO [1] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44473 Sep 22 23:15:11.742 INFO [1] No action required ReconciliationId(418)
44474 Sep 22 23:15:11.742 INFO [2] received reconcile message
44475 Sep 22 23:15:11.742 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(418), op: ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44476 Sep 22 23:15:11.742 INFO [2] client ExtentRepair { repair_id: ReconciliationId(418), extent_id: 172, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44477 Sep 22 23:15:11.742 INFO [2] No action required ReconciliationId(418)
44478 Sep 22 23:15:11.742 DEBG 418 Repair extent 172 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44479 Sep 22 23:15:11.742 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AC.copy"
44480 Sep 22 23:15:11.804 INFO accepted connection, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44481 Sep 22 23:15:11.805 TRCE incoming request, uri: /extent/172/files, method: GET, req_id: 6d40fc74-e2c7-47a0-afb4-8db6534fbcd7, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44482 Sep 22 23:15:11.805 INFO request completed, latency_us: 243, response_code: 200, uri: /extent/172/files, method: GET, req_id: 6d40fc74-e2c7-47a0-afb4-8db6534fbcd7, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44483 Sep 22 23:15:11.805 INFO eid:172 Found repair files: ["0AC", "0AC.db"]
44484 Sep 22 23:15:11.806 TRCE incoming request, uri: /newextent/172/data, method: GET, req_id: e195532e-1c5e-4006-af9e-7d704a7e663a, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44485 Sep 22 23:15:11.806 INFO request completed, latency_us: 347, response_code: 200, uri: /newextent/172/data, method: GET, req_id: e195532e-1c5e-4006-af9e-7d704a7e663a, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44486 Sep 22 23:15:11.811 TRCE incoming request, uri: /newextent/172/db, method: GET, req_id: 284e8531-3182-4ac8-9d72-b75dc82d53c3, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44487 Sep 22 23:15:11.811 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/172/db, method: GET, req_id: 284e8531-3182-4ac8-9d72-b75dc82d53c3, remote_addr: 127.0.0.1:37584, local_addr: 127.0.0.1:46213, task: repair
44488 Sep 22 23:15:11.813 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AC.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AC.replace"
44489 Sep 22 23:15:11.813 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44490 Sep 22 23:15:11.814 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AC.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44491 Sep 22 23:15:11.814 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AC"
44492 Sep 22 23:15:11.814 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AC.db"
44493 Sep 22 23:15:11.814 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44494 Sep 22 23:15:11.814 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AC.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AC.completed"
44495 Sep 22 23:15:11.814 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44496 Sep 22 23:15:11.814 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44497 Sep 22 23:15:11.814 DEBG [0] It's time to notify for 418
44498 Sep 22 23:15:11.815 INFO Completion from [0] id:418 status:true
44499 Sep 22 23:15:11.815 INFO [419/752] Repair commands completed
44500 Sep 22 23:15:11.815 INFO Pop front: ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }, state: ClientData([New, New, New]) }
44501 Sep 22 23:15:11.815 INFO Sent repair work, now wait for resp
44502 Sep 22 23:15:11.815 INFO [0] received reconcile message
44503 Sep 22 23:15:11.815 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }, state: ClientData([InProgress, New, New]) }, : downstairs
44504 Sep 22 23:15:11.815 INFO [0] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }
44505 Sep 22 23:15:11.815 INFO [1] received reconcile message
44506 Sep 22 23:15:11.815 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44507 Sep 22 23:15:11.815 INFO [1] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }
44508 Sep 22 23:15:11.815 INFO [2] received reconcile message
44509 Sep 22 23:15:11.815 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(419), op: ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44510 Sep 22 23:15:11.815 INFO [2] client ExtentReopen { repair_id: ReconciliationId(419), extent_id: 172 }
44511 Sep 22 23:15:11.815 DEBG 419 Reopen extent 172
44512 Sep 22 23:15:11.816 DEBG 419 Reopen extent 172
44513 Sep 22 23:15:11.816 DEBG 419 Reopen extent 172
44514 Sep 22 23:15:11.817 DEBG [2] It's time to notify for 419
44515 Sep 22 23:15:11.817 INFO Completion from [2] id:419 status:true
44516 Sep 22 23:15:11.817 INFO [420/752] Repair commands completed
44517 Sep 22 23:15:11.817 INFO Pop front: ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44518 Sep 22 23:15:11.817 INFO Sent repair work, now wait for resp
44519 Sep 22 23:15:11.817 INFO [0] received reconcile message
44520 Sep 22 23:15:11.817 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44521 Sep 22 23:15:11.817 INFO [0] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44522 Sep 22 23:15:11.817 INFO [1] received reconcile message
44523 Sep 22 23:15:11.817 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44524 Sep 22 23:15:11.817 INFO [1] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44525 Sep 22 23:15:11.817 INFO [2] received reconcile message
44526 Sep 22 23:15:11.817 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(420), op: ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44527 Sep 22 23:15:11.817 INFO [2] client ExtentFlush { repair_id: ReconciliationId(420), extent_id: 53, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44528 Sep 22 23:15:11.817 DEBG 420 Flush extent 53 with f:2 g:2
44529 Sep 22 23:15:11.817 DEBG Flush just extent 53 with f:2 and g:2
44530 Sep 22 23:15:11.817 DEBG [1] It's time to notify for 420
44531 Sep 22 23:15:11.817 INFO Completion from [1] id:420 status:true
44532 Sep 22 23:15:11.817 INFO [421/752] Repair commands completed
44533 Sep 22 23:15:11.817 INFO Pop front: ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }, state: ClientData([New, New, New]) }
44534 Sep 22 23:15:11.818 INFO Sent repair work, now wait for resp
44535 Sep 22 23:15:11.818 INFO [0] received reconcile message
44536 Sep 22 23:15:11.818 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }, state: ClientData([InProgress, New, New]) }, : downstairs
44537 Sep 22 23:15:11.818 INFO [0] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }
44538 Sep 22 23:15:11.818 INFO [1] received reconcile message
44539 Sep 22 23:15:11.818 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44540 Sep 22 23:15:11.818 INFO [1] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }
44541 Sep 22 23:15:11.818 INFO [2] received reconcile message
44542 Sep 22 23:15:11.818 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(421), op: ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44543 Sep 22 23:15:11.818 INFO [2] client ExtentClose { repair_id: ReconciliationId(421), extent_id: 53 }
44544 Sep 22 23:15:11.818 DEBG 421 Close extent 53
44545 Sep 22 23:15:11.818 DEBG 421 Close extent 53
44546 Sep 22 23:15:11.818 DEBG 421 Close extent 53
44547 Sep 22 23:15:11.819 DEBG [2] It's time to notify for 421
44548 Sep 22 23:15:11.819 INFO Completion from [2] id:421 status:true
44549 Sep 22 23:15:11.819 INFO [422/752] Repair commands completed
44550 Sep 22 23:15:11.819 INFO Pop front: ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44551 Sep 22 23:15:11.819 INFO Sent repair work, now wait for resp
44552 Sep 22 23:15:11.819 INFO [0] received reconcile message
44553 Sep 22 23:15:11.819 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44554 Sep 22 23:15:11.819 INFO [0] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44555 Sep 22 23:15:11.819 INFO [0] Sending repair request ReconciliationId(422)
44556 Sep 22 23:15:11.819 INFO [1] received reconcile message
44557 Sep 22 23:15:11.819 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44558 Sep 22 23:15:11.819 INFO [1] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44559 Sep 22 23:15:11.819 INFO [1] No action required ReconciliationId(422)
44560 Sep 22 23:15:11.819 INFO [2] received reconcile message
44561 Sep 22 23:15:11.819 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(422), op: ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44562 Sep 22 23:15:11.819 INFO [2] client ExtentRepair { repair_id: ReconciliationId(422), extent_id: 53, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44563 Sep 22 23:15:11.819 INFO [2] No action required ReconciliationId(422)
44564 Sep 22 23:15:11.819 DEBG 422 Repair extent 53 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44565 Sep 22 23:15:11.819 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/035.copy"
44566 Sep 22 23:15:11.825 DEBG [rc] retire 1083 clears [JobId(1082), JobId(1083)], : downstairs
44567 Sep 22 23:15:11.825 DEBG IO Flush 1085 has deps [JobId(1084)]
44568 Sep 22 23:15:11.831 DEBG Read :1084 deps:[JobId(1083)] res:true
44569 Sep 22 23:15:11.859 DEBG Read :1084 deps:[JobId(1083)] res:true
44570 Sep 22 23:15:11.882 INFO accepted connection, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44571 Sep 22 23:15:11.883 TRCE incoming request, uri: /extent/53/files, method: GET, req_id: e9c5ddb7-bff8-43e1-a5c1-d10cc24b99ea, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44572 Sep 22 23:15:11.883 INFO request completed, latency_us: 225, response_code: 200, uri: /extent/53/files, method: GET, req_id: e9c5ddb7-bff8-43e1-a5c1-d10cc24b99ea, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44573 Sep 22 23:15:11.883 INFO eid:53 Found repair files: ["035", "035.db"]
44574 Sep 22 23:15:11.883 TRCE incoming request, uri: /newextent/53/data, method: GET, req_id: 62b63dc1-f589-4f65-b006-ea840af99148, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44575 Sep 22 23:15:11.884 DEBG Flush :1085 extent_limit None deps:[JobId(1084)] res:true f:32 g:1
44576 Sep 22 23:15:11.884 INFO [lossy] sleeping 1 second
44577 Sep 22 23:15:11.884 INFO request completed, latency_us: 321, response_code: 200, uri: /newextent/53/data, method: GET, req_id: 62b63dc1-f589-4f65-b006-ea840af99148, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44578 Sep 22 23:15:11.889 TRCE incoming request, uri: /newextent/53/db, method: GET, req_id: 0cb534ce-2ded-4bb8-84d1-33ab70990a7d, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44579 Sep 22 23:15:11.889 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/53/db, method: GET, req_id: 0cb534ce-2ded-4bb8-84d1-33ab70990a7d, remote_addr: 127.0.0.1:49693, local_addr: 127.0.0.1:46213, task: repair
44580 Sep 22 23:15:11.890 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/035.copy" to "/tmp/downstairs-vrx8aK6L/00/000/035.replace"
44581 Sep 22 23:15:11.890 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44582 Sep 22 23:15:11.891 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/035.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44583 Sep 22 23:15:11.892 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/035"
44584 Sep 22 23:15:11.892 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/035.db"
44585 Sep 22 23:15:11.892 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44586 Sep 22 23:15:11.892 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/035.replace" to "/tmp/downstairs-vrx8aK6L/00/000/035.completed"
44587 Sep 22 23:15:11.892 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44588 Sep 22 23:15:11.892 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44589 Sep 22 23:15:11.892 DEBG [0] It's time to notify for 422
44590 Sep 22 23:15:11.892 INFO Completion from [0] id:422 status:true
44591 Sep 22 23:15:11.892 INFO [423/752] Repair commands completed
44592 Sep 22 23:15:11.892 INFO Pop front: ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }, state: ClientData([New, New, New]) }
44593 Sep 22 23:15:11.892 INFO Sent repair work, now wait for resp
44594 Sep 22 23:15:11.892 INFO [0] received reconcile message
44595 Sep 22 23:15:11.892 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }, state: ClientData([InProgress, New, New]) }, : downstairs
44596 Sep 22 23:15:11.892 INFO [0] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }
44597 Sep 22 23:15:11.892 INFO [1] received reconcile message
44598 Sep 22 23:15:11.892 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44599 Sep 22 23:15:11.892 INFO [1] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }
44600 Sep 22 23:15:11.892 INFO [2] received reconcile message
44601 Sep 22 23:15:11.892 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(423), op: ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44602 Sep 22 23:15:11.892 INFO [2] client ExtentReopen { repair_id: ReconciliationId(423), extent_id: 53 }
44603 Sep 22 23:15:11.893 DEBG 423 Reopen extent 53
44604 Sep 22 23:15:11.893 DEBG 423 Reopen extent 53
44605 Sep 22 23:15:11.894 DEBG 423 Reopen extent 53
44606 Sep 22 23:15:11.894 DEBG [2] It's time to notify for 423
44607 Sep 22 23:15:11.894 INFO Completion from [2] id:423 status:true
44608 Sep 22 23:15:11.894 INFO [424/752] Repair commands completed
44609 Sep 22 23:15:11.895 INFO Pop front: ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44610 Sep 22 23:15:11.895 INFO Sent repair work, now wait for resp
44611 Sep 22 23:15:11.895 INFO [0] received reconcile message
44612 Sep 22 23:15:11.895 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44613 Sep 22 23:15:11.895 INFO [0] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44614 Sep 22 23:15:11.895 INFO [1] received reconcile message
44615 Sep 22 23:15:11.895 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44616 Sep 22 23:15:11.895 INFO [1] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44617 Sep 22 23:15:11.895 INFO [2] received reconcile message
44618 Sep 22 23:15:11.895 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(424), op: ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44619 Sep 22 23:15:11.895 INFO [2] client ExtentFlush { repair_id: ReconciliationId(424), extent_id: 175, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44620 Sep 22 23:15:11.895 DEBG 424 Flush extent 175 with f:2 g:2
44621 Sep 22 23:15:11.895 DEBG Flush just extent 175 with f:2 and g:2
44622 Sep 22 23:15:11.895 DEBG [1] It's time to notify for 424
44623 Sep 22 23:15:11.895 INFO Completion from [1] id:424 status:true
44624 Sep 22 23:15:11.895 INFO [425/752] Repair commands completed
44625 Sep 22 23:15:11.895 INFO Pop front: ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }, state: ClientData([New, New, New]) }
44626 Sep 22 23:15:11.895 INFO Sent repair work, now wait for resp
44627 Sep 22 23:15:11.895 INFO [0] received reconcile message
44628 Sep 22 23:15:11.895 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }, state: ClientData([InProgress, New, New]) }, : downstairs
44629 Sep 22 23:15:11.895 INFO [0] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }
44630 Sep 22 23:15:11.895 INFO [1] received reconcile message
44631 Sep 22 23:15:11.895 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44632 Sep 22 23:15:11.895 INFO [1] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }
44633 Sep 22 23:15:11.895 INFO [2] received reconcile message
44634 Sep 22 23:15:11.895 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(425), op: ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44635 Sep 22 23:15:11.895 INFO [2] client ExtentClose { repair_id: ReconciliationId(425), extent_id: 175 }
44636 Sep 22 23:15:11.895 DEBG 425 Close extent 175
44637 Sep 22 23:15:11.896 DEBG 425 Close extent 175
44638 Sep 22 23:15:11.896 DEBG 425 Close extent 175
44639 Sep 22 23:15:11.896 DEBG [2] It's time to notify for 425
44640 Sep 22 23:15:11.896 INFO Completion from [2] id:425 status:true
44641 Sep 22 23:15:11.896 INFO [426/752] Repair commands completed
44642 Sep 22 23:15:11.896 INFO Pop front: ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44643 Sep 22 23:15:11.897 INFO Sent repair work, now wait for resp
44644 Sep 22 23:15:11.897 INFO [0] received reconcile message
44645 Sep 22 23:15:11.897 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44646 Sep 22 23:15:11.897 INFO [0] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44647 Sep 22 23:15:11.897 INFO [0] Sending repair request ReconciliationId(426)
44648 Sep 22 23:15:11.897 INFO [1] received reconcile message
44649 Sep 22 23:15:11.897 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44650 Sep 22 23:15:11.897 INFO [1] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44651 Sep 22 23:15:11.897 INFO [1] No action required ReconciliationId(426)
44652 Sep 22 23:15:11.897 INFO [2] received reconcile message
44653 Sep 22 23:15:11.897 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(426), op: ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44654 Sep 22 23:15:11.897 INFO [2] client ExtentRepair { repair_id: ReconciliationId(426), extent_id: 175, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44655 Sep 22 23:15:11.897 INFO [2] No action required ReconciliationId(426)
44656 Sep 22 23:15:11.897 DEBG 426 Repair extent 175 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44657 Sep 22 23:15:11.897 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AF.copy"
44658 Sep 22 23:15:11.932 INFO [lossy] sleeping 1 second
44659 Sep 22 23:15:11.960 INFO accepted connection, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44660 Sep 22 23:15:11.960 TRCE incoming request, uri: /extent/175/files, method: GET, req_id: f4bf0792-7ea4-4ec7-b29a-516bd932aef2, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44661 Sep 22 23:15:11.960 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/175/files, method: GET, req_id: f4bf0792-7ea4-4ec7-b29a-516bd932aef2, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44662 Sep 22 23:15:11.961 INFO eid:175 Found repair files: ["0AF", "0AF.db"]
44663 Sep 22 23:15:11.961 TRCE incoming request, uri: /newextent/175/data, method: GET, req_id: a908de21-a3fc-47ae-a6a1-958435575886, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44664 Sep 22 23:15:11.961 INFO request completed, latency_us: 335, response_code: 200, uri: /newextent/175/data, method: GET, req_id: a908de21-a3fc-47ae-a6a1-958435575886, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44665 Sep 22 23:15:11.966 TRCE incoming request, uri: /newextent/175/db, method: GET, req_id: ab42246d-b87d-4fac-ad4b-447e11bdd0b5, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44666 Sep 22 23:15:11.967 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/175/db, method: GET, req_id: ab42246d-b87d-4fac-ad4b-447e11bdd0b5, remote_addr: 127.0.0.1:39881, local_addr: 127.0.0.1:46213, task: repair
44667 Sep 22 23:15:11.968 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AF.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AF.replace"
44668 Sep 22 23:15:11.968 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44669 Sep 22 23:15:11.969 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AF.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44670 Sep 22 23:15:11.969 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AF"
44671 Sep 22 23:15:11.969 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AF.db"
44672 Sep 22 23:15:11.969 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44673 Sep 22 23:15:11.969 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AF.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AF.completed"
44674 Sep 22 23:15:11.969 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44675 Sep 22 23:15:11.969 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44676 Sep 22 23:15:11.969 DEBG [0] It's time to notify for 426
44677 Sep 22 23:15:11.970 INFO Completion from [0] id:426 status:true
44678 Sep 22 23:15:11.970 INFO [427/752] Repair commands completed
44679 Sep 22 23:15:11.970 INFO Pop front: ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }, state: ClientData([New, New, New]) }
44680 Sep 22 23:15:11.970 INFO Sent repair work, now wait for resp
44681 Sep 22 23:15:11.970 INFO [0] received reconcile message
44682 Sep 22 23:15:11.970 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }, state: ClientData([InProgress, New, New]) }, : downstairs
44683 Sep 22 23:15:11.970 INFO [0] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }
44684 Sep 22 23:15:11.970 INFO [1] received reconcile message
44685 Sep 22 23:15:11.970 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44686 Sep 22 23:15:11.970 INFO [1] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }
44687 Sep 22 23:15:11.970 INFO [2] received reconcile message
44688 Sep 22 23:15:11.970 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(427), op: ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44689 Sep 22 23:15:11.970 INFO [2] client ExtentReopen { repair_id: ReconciliationId(427), extent_id: 175 }
44690 Sep 22 23:15:11.970 DEBG 427 Reopen extent 175
44691 Sep 22 23:15:11.971 DEBG 427 Reopen extent 175
44692 Sep 22 23:15:11.971 DEBG 427 Reopen extent 175
44693 Sep 22 23:15:11.972 DEBG [2] It's time to notify for 427
44694 Sep 22 23:15:11.972 INFO Completion from [2] id:427 status:true
44695 Sep 22 23:15:11.972 INFO [428/752] Repair commands completed
44696 Sep 22 23:15:11.972 INFO Pop front: ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44697 Sep 22 23:15:11.972 INFO Sent repair work, now wait for resp
44698 Sep 22 23:15:11.972 INFO [0] received reconcile message
44699 Sep 22 23:15:11.972 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44700 Sep 22 23:15:11.972 INFO [0] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44701 Sep 22 23:15:11.972 INFO [1] received reconcile message
44702 Sep 22 23:15:11.972 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44703 Sep 22 23:15:11.972 INFO [1] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44704 Sep 22 23:15:11.972 INFO [2] received reconcile message
44705 Sep 22 23:15:11.972 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(428), op: ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44706 Sep 22 23:15:11.972 INFO [2] client ExtentFlush { repair_id: ReconciliationId(428), extent_id: 140, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44707 Sep 22 23:15:11.972 DEBG 428 Flush extent 140 with f:2 g:2
44708 Sep 22 23:15:11.972 DEBG Flush just extent 140 with f:2 and g:2
44709 Sep 22 23:15:11.972 DEBG [1] It's time to notify for 428
44710 Sep 22 23:15:11.972 INFO Completion from [1] id:428 status:true
44711 Sep 22 23:15:11.972 INFO [429/752] Repair commands completed
44712 Sep 22 23:15:11.972 INFO Pop front: ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }, state: ClientData([New, New, New]) }
44713 Sep 22 23:15:11.972 INFO Sent repair work, now wait for resp
44714 Sep 22 23:15:11.972 INFO [0] received reconcile message
44715 Sep 22 23:15:11.972 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }, state: ClientData([InProgress, New, New]) }, : downstairs
44716 Sep 22 23:15:11.972 INFO [0] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }
44717 Sep 22 23:15:11.972 INFO [1] received reconcile message
44718 Sep 22 23:15:11.972 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44719 Sep 22 23:15:11.972 INFO [1] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }
44720 Sep 22 23:15:11.973 INFO [2] received reconcile message
44721 Sep 22 23:15:11.973 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(429), op: ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44722 Sep 22 23:15:11.973 INFO [2] client ExtentClose { repair_id: ReconciliationId(429), extent_id: 140 }
44723 Sep 22 23:15:11.973 DEBG 429 Close extent 140
44724 Sep 22 23:15:11.973 DEBG 429 Close extent 140
44725 Sep 22 23:15:11.973 DEBG 429 Close extent 140
44726 Sep 22 23:15:11.974 DEBG [2] It's time to notify for 429
44727 Sep 22 23:15:11.974 INFO Completion from [2] id:429 status:true
44728 Sep 22 23:15:11.974 INFO [430/752] Repair commands completed
44729 Sep 22 23:15:11.974 INFO Pop front: ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44730 Sep 22 23:15:11.974 INFO Sent repair work, now wait for resp
44731 Sep 22 23:15:11.974 INFO [0] received reconcile message
44732 Sep 22 23:15:11.974 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44733 Sep 22 23:15:11.974 INFO [0] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44734 Sep 22 23:15:11.974 INFO [0] Sending repair request ReconciliationId(430)
44735 Sep 22 23:15:11.974 INFO [1] received reconcile message
44736 Sep 22 23:15:11.974 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44737 Sep 22 23:15:11.974 INFO [1] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44738 Sep 22 23:15:11.974 INFO [1] No action required ReconciliationId(430)
44739 Sep 22 23:15:11.974 INFO [2] received reconcile message
44740 Sep 22 23:15:11.974 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(430), op: ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44741 Sep 22 23:15:11.974 INFO [2] client ExtentRepair { repair_id: ReconciliationId(430), extent_id: 140, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44742 Sep 22 23:15:11.974 INFO [2] No action required ReconciliationId(430)
44743 Sep 22 23:15:11.974 DEBG 430 Repair extent 140 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44744 Sep 22 23:15:11.974 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08C.copy"
44745 Sep 22 23:15:12.037 INFO accepted connection, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44746 Sep 22 23:15:12.037 TRCE incoming request, uri: /extent/140/files, method: GET, req_id: c9d4aaa1-796b-40ba-8a27-2fa3aacc8b8f, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44747 Sep 22 23:15:12.038 INFO request completed, latency_us: 240, response_code: 200, uri: /extent/140/files, method: GET, req_id: c9d4aaa1-796b-40ba-8a27-2fa3aacc8b8f, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44748 Sep 22 23:15:12.038 INFO eid:140 Found repair files: ["08C", "08C.db"]
44749 Sep 22 23:15:12.038 TRCE incoming request, uri: /newextent/140/data, method: GET, req_id: 20261ec3-4d50-49e9-87a4-c75652cfcfed, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44750 Sep 22 23:15:12.039 INFO request completed, latency_us: 331, response_code: 200, uri: /newextent/140/data, method: GET, req_id: 20261ec3-4d50-49e9-87a4-c75652cfcfed, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44751 Sep 22 23:15:12.044 TRCE incoming request, uri: /newextent/140/db, method: GET, req_id: 9792fdb7-1d39-4c86-843d-c56796939b30, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44752 Sep 22 23:15:12.044 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/140/db, method: GET, req_id: 9792fdb7-1d39-4c86-843d-c56796939b30, remote_addr: 127.0.0.1:52379, local_addr: 127.0.0.1:46213, task: repair
44753 Sep 22 23:15:12.045 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08C.replace"
44754 Sep 22 23:15:12.045 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44755 Sep 22 23:15:12.046 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44756 Sep 22 23:15:12.047 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08C"
44757 Sep 22 23:15:12.047 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08C.db"
44758 Sep 22 23:15:12.047 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44759 Sep 22 23:15:12.047 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08C.completed"
44760 Sep 22 23:15:12.047 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44761 Sep 22 23:15:12.047 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44762 Sep 22 23:15:12.047 DEBG [0] It's time to notify for 430
44763 Sep 22 23:15:12.047 INFO Completion from [0] id:430 status:true
44764 Sep 22 23:15:12.047 INFO [431/752] Repair commands completed
44765 Sep 22 23:15:12.047 INFO Pop front: ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }, state: ClientData([New, New, New]) }
44766 Sep 22 23:15:12.047 INFO Sent repair work, now wait for resp
44767 Sep 22 23:15:12.047 INFO [0] received reconcile message
44768 Sep 22 23:15:12.047 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }, state: ClientData([InProgress, New, New]) }, : downstairs
44769 Sep 22 23:15:12.047 INFO [0] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }
44770 Sep 22 23:15:12.047 INFO [1] received reconcile message
44771 Sep 22 23:15:12.047 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44772 Sep 22 23:15:12.047 INFO [1] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }
44773 Sep 22 23:15:12.048 INFO [2] received reconcile message
44774 Sep 22 23:15:12.048 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(431), op: ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44775 Sep 22 23:15:12.048 INFO [2] client ExtentReopen { repair_id: ReconciliationId(431), extent_id: 140 }
44776 Sep 22 23:15:12.048 DEBG 431 Reopen extent 140
44777 Sep 22 23:15:12.048 DEBG 431 Reopen extent 140
44778 Sep 22 23:15:12.049 DEBG 431 Reopen extent 140
44779 Sep 22 23:15:12.050 DEBG [2] It's time to notify for 431
44780 Sep 22 23:15:12.050 INFO Completion from [2] id:431 status:true
44781 Sep 22 23:15:12.050 INFO [432/752] Repair commands completed
44782 Sep 22 23:15:12.050 INFO Pop front: ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44783 Sep 22 23:15:12.050 INFO Sent repair work, now wait for resp
44784 Sep 22 23:15:12.050 INFO [0] received reconcile message
44785 Sep 22 23:15:12.050 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44786 Sep 22 23:15:12.050 INFO [0] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44787 Sep 22 23:15:12.050 INFO [1] received reconcile message
44788 Sep 22 23:15:12.050 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44789 Sep 22 23:15:12.050 INFO [1] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44790 Sep 22 23:15:12.050 INFO [2] received reconcile message
44791 Sep 22 23:15:12.050 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(432), op: ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44792 Sep 22 23:15:12.050 INFO [2] client ExtentFlush { repair_id: ReconciliationId(432), extent_id: 167, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44793 Sep 22 23:15:12.050 DEBG 432 Flush extent 167 with f:2 g:2
44794 Sep 22 23:15:12.050 DEBG Flush just extent 167 with f:2 and g:2
44795 Sep 22 23:15:12.050 DEBG [1] It's time to notify for 432
44796 Sep 22 23:15:12.050 INFO Completion from [1] id:432 status:true
44797 Sep 22 23:15:12.050 INFO [433/752] Repair commands completed
44798 Sep 22 23:15:12.050 INFO Pop front: ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }, state: ClientData([New, New, New]) }
44799 Sep 22 23:15:12.050 INFO Sent repair work, now wait for resp
44800 Sep 22 23:15:12.050 INFO [0] received reconcile message
44801 Sep 22 23:15:12.050 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }, state: ClientData([InProgress, New, New]) }, : downstairs
44802 Sep 22 23:15:12.050 INFO [0] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }
44803 Sep 22 23:15:12.050 INFO [1] received reconcile message
44804 Sep 22 23:15:12.050 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44805 Sep 22 23:15:12.050 INFO [1] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }
44806 Sep 22 23:15:12.050 INFO [2] received reconcile message
44807 Sep 22 23:15:12.050 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(433), op: ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44808 Sep 22 23:15:12.050 INFO [2] client ExtentClose { repair_id: ReconciliationId(433), extent_id: 167 }
44809 Sep 22 23:15:12.051 DEBG 433 Close extent 167
44810 Sep 22 23:15:12.051 DEBG 433 Close extent 167
44811 Sep 22 23:15:12.051 DEBG 433 Close extent 167
44812 Sep 22 23:15:12.052 DEBG [2] It's time to notify for 433
44813 Sep 22 23:15:12.052 INFO Completion from [2] id:433 status:true
44814 Sep 22 23:15:12.052 INFO [434/752] Repair commands completed
44815 Sep 22 23:15:12.052 INFO Pop front: ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44816 Sep 22 23:15:12.052 INFO Sent repair work, now wait for resp
44817 Sep 22 23:15:12.052 INFO [0] received reconcile message
44818 Sep 22 23:15:12.052 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44819 Sep 22 23:15:12.052 INFO [0] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44820 Sep 22 23:15:12.052 INFO [0] Sending repair request ReconciliationId(434)
44821 Sep 22 23:15:12.052 INFO [1] received reconcile message
44822 Sep 22 23:15:12.052 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44823 Sep 22 23:15:12.052 INFO [1] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44824 Sep 22 23:15:12.052 INFO [1] No action required ReconciliationId(434)
44825 Sep 22 23:15:12.052 INFO [2] received reconcile message
44826 Sep 22 23:15:12.052 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(434), op: ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44827 Sep 22 23:15:12.052 INFO [2] client ExtentRepair { repair_id: ReconciliationId(434), extent_id: 167, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44828 Sep 22 23:15:12.052 INFO [2] No action required ReconciliationId(434)
44829 Sep 22 23:15:12.052 DEBG 434 Repair extent 167 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44830 Sep 22 23:15:12.052 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A7.copy"
44831 Sep 22 23:15:12.117 INFO accepted connection, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44832 Sep 22 23:15:12.117 TRCE incoming request, uri: /extent/167/files, method: GET, req_id: 5541d023-016e-4024-9ed1-0ee81abc8b66, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44833 Sep 22 23:15:12.117 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/167/files, method: GET, req_id: 5541d023-016e-4024-9ed1-0ee81abc8b66, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44834 Sep 22 23:15:12.118 INFO eid:167 Found repair files: ["0A7", "0A7.db"]
44835 Sep 22 23:15:12.118 TRCE incoming request, uri: /newextent/167/data, method: GET, req_id: f71f775f-fbaa-4297-824c-9cb66eeae8dd, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44836 Sep 22 23:15:12.118 INFO request completed, latency_us: 344, response_code: 200, uri: /newextent/167/data, method: GET, req_id: f71f775f-fbaa-4297-824c-9cb66eeae8dd, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44837 Sep 22 23:15:12.124 TRCE incoming request, uri: /newextent/167/db, method: GET, req_id: d723036b-0083-4e5e-96f8-5164bf9ea101, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44838 Sep 22 23:15:12.124 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/167/db, method: GET, req_id: d723036b-0083-4e5e-96f8-5164bf9ea101, remote_addr: 127.0.0.1:62058, local_addr: 127.0.0.1:46213, task: repair
44839 Sep 22 23:15:12.125 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A7.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A7.replace"
44840 Sep 22 23:15:12.125 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44841 Sep 22 23:15:12.126 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A7.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44842 Sep 22 23:15:12.126 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A7"
44843 Sep 22 23:15:12.126 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A7.db"
44844 Sep 22 23:15:12.126 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44845 Sep 22 23:15:12.126 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A7.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A7.completed"
44846 Sep 22 23:15:12.126 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44847 Sep 22 23:15:12.126 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44848 Sep 22 23:15:12.127 DEBG [0] It's time to notify for 434
44849 Sep 22 23:15:12.127 INFO Completion from [0] id:434 status:true
44850 Sep 22 23:15:12.127 INFO [435/752] Repair commands completed
44851 Sep 22 23:15:12.127 INFO Pop front: ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }, state: ClientData([New, New, New]) }
44852 Sep 22 23:15:12.127 INFO Sent repair work, now wait for resp
44853 Sep 22 23:15:12.127 INFO [0] received reconcile message
44854 Sep 22 23:15:12.127 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }, state: ClientData([InProgress, New, New]) }, : downstairs
44855 Sep 22 23:15:12.127 INFO [0] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }
44856 Sep 22 23:15:12.127 INFO [1] received reconcile message
44857 Sep 22 23:15:12.127 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44858 Sep 22 23:15:12.127 INFO [1] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }
44859 Sep 22 23:15:12.127 INFO [2] received reconcile message
44860 Sep 22 23:15:12.127 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(435), op: ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44861 Sep 22 23:15:12.127 INFO [2] client ExtentReopen { repair_id: ReconciliationId(435), extent_id: 167 }
44862 Sep 22 23:15:12.127 DEBG 435 Reopen extent 167
44863 Sep 22 23:15:12.128 DEBG 435 Reopen extent 167
44864 Sep 22 23:15:12.128 DEBG 435 Reopen extent 167
44865 Sep 22 23:15:12.129 DEBG [2] It's time to notify for 435
44866 Sep 22 23:15:12.129 INFO Completion from [2] id:435 status:true
44867 Sep 22 23:15:12.129 INFO [436/752] Repair commands completed
44868 Sep 22 23:15:12.129 INFO Pop front: ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44869 Sep 22 23:15:12.129 INFO Sent repair work, now wait for resp
44870 Sep 22 23:15:12.129 INFO [0] received reconcile message
44871 Sep 22 23:15:12.129 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44872 Sep 22 23:15:12.129 INFO [0] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44873 Sep 22 23:15:12.129 INFO [1] received reconcile message
44874 Sep 22 23:15:12.129 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44875 Sep 22 23:15:12.129 INFO [1] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44876 Sep 22 23:15:12.129 INFO [2] received reconcile message
44877 Sep 22 23:15:12.129 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(436), op: ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44878 Sep 22 23:15:12.129 INFO [2] client ExtentFlush { repair_id: ReconciliationId(436), extent_id: 142, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44879 Sep 22 23:15:12.129 DEBG 436 Flush extent 142 with f:2 g:2
44880 Sep 22 23:15:12.129 DEBG Flush just extent 142 with f:2 and g:2
44881 Sep 22 23:15:12.129 DEBG [1] It's time to notify for 436
44882 Sep 22 23:15:12.129 INFO Completion from [1] id:436 status:true
44883 Sep 22 23:15:12.129 INFO [437/752] Repair commands completed
44884 Sep 22 23:15:12.129 INFO Pop front: ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }, state: ClientData([New, New, New]) }
44885 Sep 22 23:15:12.130 INFO Sent repair work, now wait for resp
44886 Sep 22 23:15:12.130 INFO [0] received reconcile message
44887 Sep 22 23:15:12.130 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }, state: ClientData([InProgress, New, New]) }, : downstairs
44888 Sep 22 23:15:12.130 INFO [0] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }
44889 Sep 22 23:15:12.130 INFO [1] received reconcile message
44890 Sep 22 23:15:12.130 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44891 Sep 22 23:15:12.130 INFO [1] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }
44892 Sep 22 23:15:12.130 INFO [2] received reconcile message
44893 Sep 22 23:15:12.130 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(437), op: ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44894 Sep 22 23:15:12.130 INFO [2] client ExtentClose { repair_id: ReconciliationId(437), extent_id: 142 }
44895 Sep 22 23:15:12.130 DEBG 437 Close extent 142
44896 Sep 22 23:15:12.130 DEBG 437 Close extent 142
44897 Sep 22 23:15:12.130 DEBG 437 Close extent 142
44898 Sep 22 23:15:12.131 DEBG [2] It's time to notify for 437
44899 Sep 22 23:15:12.131 INFO Completion from [2] id:437 status:true
44900 Sep 22 23:15:12.131 INFO [438/752] Repair commands completed
44901 Sep 22 23:15:12.131 INFO Pop front: ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44902 Sep 22 23:15:12.131 INFO Sent repair work, now wait for resp
44903 Sep 22 23:15:12.131 INFO [0] received reconcile message
44904 Sep 22 23:15:12.131 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44905 Sep 22 23:15:12.131 INFO [0] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44906 Sep 22 23:15:12.131 INFO [0] Sending repair request ReconciliationId(438)
44907 Sep 22 23:15:12.131 INFO [1] received reconcile message
44908 Sep 22 23:15:12.131 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44909 Sep 22 23:15:12.131 INFO [1] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44910 Sep 22 23:15:12.131 INFO [1] No action required ReconciliationId(438)
44911 Sep 22 23:15:12.131 INFO [2] received reconcile message
44912 Sep 22 23:15:12.131 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(438), op: ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44913 Sep 22 23:15:12.131 INFO [2] client ExtentRepair { repair_id: ReconciliationId(438), extent_id: 142, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44914 Sep 22 23:15:12.131 INFO [2] No action required ReconciliationId(438)
44915 Sep 22 23:15:12.131 DEBG 438 Repair extent 142 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
44916 Sep 22 23:15:12.131 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/08E.copy"
44917 Sep 22 23:15:12.194 INFO accepted connection, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44918 Sep 22 23:15:12.194 TRCE incoming request, uri: /extent/142/files, method: GET, req_id: 35da7fed-ae8c-4d5a-8cc6-47040391d177, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44919 Sep 22 23:15:12.195 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/142/files, method: GET, req_id: 35da7fed-ae8c-4d5a-8cc6-47040391d177, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44920 Sep 22 23:15:12.195 INFO eid:142 Found repair files: ["08E", "08E.db"]
44921 Sep 22 23:15:12.195 TRCE incoming request, uri: /newextent/142/data, method: GET, req_id: 5e192a6a-1d91-44c3-a632-1db637d7e88f, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44922 Sep 22 23:15:12.195 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/142/data, method: GET, req_id: 5e192a6a-1d91-44c3-a632-1db637d7e88f, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44923 Sep 22 23:15:12.201 TRCE incoming request, uri: /newextent/142/db, method: GET, req_id: 83dc4f83-ab55-4e3b-9c16-13819b0b5634, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44924 Sep 22 23:15:12.201 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/142/db, method: GET, req_id: 83dc4f83-ab55-4e3b-9c16-13819b0b5634, remote_addr: 127.0.0.1:50202, local_addr: 127.0.0.1:46213, task: repair
44925 Sep 22 23:15:12.202 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/08E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/08E.replace"
44926 Sep 22 23:15:12.202 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44927 Sep 22 23:15:12.203 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/08E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
44928 Sep 22 23:15:12.203 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08E"
44929 Sep 22 23:15:12.203 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/08E.db"
44930 Sep 22 23:15:12.203 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44931 Sep 22 23:15:12.203 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/08E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/08E.completed"
44932 Sep 22 23:15:12.203 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44933 Sep 22 23:15:12.203 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
44934 Sep 22 23:15:12.204 DEBG [0] It's time to notify for 438
44935 Sep 22 23:15:12.204 INFO Completion from [0] id:438 status:true
44936 Sep 22 23:15:12.204 INFO [439/752] Repair commands completed
44937 Sep 22 23:15:12.204 INFO Pop front: ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }, state: ClientData([New, New, New]) }
44938 Sep 22 23:15:12.204 INFO Sent repair work, now wait for resp
44939 Sep 22 23:15:12.204 INFO [0] received reconcile message
44940 Sep 22 23:15:12.204 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }, state: ClientData([InProgress, New, New]) }, : downstairs
44941 Sep 22 23:15:12.204 INFO [0] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }
44942 Sep 22 23:15:12.204 INFO [1] received reconcile message
44943 Sep 22 23:15:12.204 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44944 Sep 22 23:15:12.204 INFO [1] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }
44945 Sep 22 23:15:12.204 INFO [2] received reconcile message
44946 Sep 22 23:15:12.204 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(439), op: ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44947 Sep 22 23:15:12.204 INFO [2] client ExtentReopen { repair_id: ReconciliationId(439), extent_id: 142 }
44948 Sep 22 23:15:12.204 DEBG 439 Reopen extent 142
44949 Sep 22 23:15:12.205 DEBG 439 Reopen extent 142
44950 Sep 22 23:15:12.205 DEBG 439 Reopen extent 142
44951 Sep 22 23:15:12.206 DEBG [2] It's time to notify for 439
44952 Sep 22 23:15:12.206 INFO Completion from [2] id:439 status:true
44953 Sep 22 23:15:12.206 INFO [440/752] Repair commands completed
44954 Sep 22 23:15:12.206 INFO Pop front: ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
44955 Sep 22 23:15:12.206 INFO Sent repair work, now wait for resp
44956 Sep 22 23:15:12.206 INFO [0] received reconcile message
44957 Sep 22 23:15:12.206 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
44958 Sep 22 23:15:12.206 INFO [0] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44959 Sep 22 23:15:12.206 INFO [1] received reconcile message
44960 Sep 22 23:15:12.206 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
44961 Sep 22 23:15:12.206 INFO [1] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44962 Sep 22 23:15:12.206 INFO [2] received reconcile message
44963 Sep 22 23:15:12.206 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(440), op: ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
44964 Sep 22 23:15:12.206 INFO [2] client ExtentFlush { repair_id: ReconciliationId(440), extent_id: 171, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
44965 Sep 22 23:15:12.206 DEBG 440 Flush extent 171 with f:2 g:2
44966 Sep 22 23:15:12.206 DEBG Flush just extent 171 with f:2 and g:2
44967 Sep 22 23:15:12.206 DEBG [1] It's time to notify for 440
44968 Sep 22 23:15:12.207 INFO Completion from [1] id:440 status:true
44969 Sep 22 23:15:12.207 INFO [441/752] Repair commands completed
44970 Sep 22 23:15:12.207 INFO Pop front: ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }, state: ClientData([New, New, New]) }
44971 Sep 22 23:15:12.207 INFO Sent repair work, now wait for resp
44972 Sep 22 23:15:12.207 INFO [0] received reconcile message
44973 Sep 22 23:15:12.207 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }, state: ClientData([InProgress, New, New]) }, : downstairs
44974 Sep 22 23:15:12.207 INFO [0] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }
44975 Sep 22 23:15:12.207 INFO [1] received reconcile message
44976 Sep 22 23:15:12.207 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44977 Sep 22 23:15:12.207 INFO [1] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }
44978 Sep 22 23:15:12.207 INFO [2] received reconcile message
44979 Sep 22 23:15:12.207 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(441), op: ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
44980 Sep 22 23:15:12.207 INFO [2] client ExtentClose { repair_id: ReconciliationId(441), extent_id: 171 }
44981 Sep 22 23:15:12.207 DEBG 441 Close extent 171
44982 Sep 22 23:15:12.207 DEBG 441 Close extent 171
44983 Sep 22 23:15:12.207 DEBG 441 Close extent 171
44984 Sep 22 23:15:12.208 DEBG [2] It's time to notify for 441
44985 Sep 22 23:15:12.208 INFO Completion from [2] id:441 status:true
44986 Sep 22 23:15:12.208 INFO [442/752] Repair commands completed
44987 Sep 22 23:15:12.208 INFO Pop front: ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
44988 Sep 22 23:15:12.208 INFO Sent repair work, now wait for resp
44989 Sep 22 23:15:12.208 INFO [0] received reconcile message
44990 Sep 22 23:15:12.208 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
44991 Sep 22 23:15:12.208 INFO [0] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44992 Sep 22 23:15:12.208 INFO [0] Sending repair request ReconciliationId(442)
44993 Sep 22 23:15:12.208 INFO [1] received reconcile message
44994 Sep 22 23:15:12.208 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
44995 Sep 22 23:15:12.208 INFO [1] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
44996 Sep 22 23:15:12.208 INFO [1] No action required ReconciliationId(442)
44997 Sep 22 23:15:12.208 INFO [2] received reconcile message
44998 Sep 22 23:15:12.208 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(442), op: ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
44999 Sep 22 23:15:12.208 INFO [2] client ExtentRepair { repair_id: ReconciliationId(442), extent_id: 171, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45000 Sep 22 23:15:12.208 INFO [2] No action required ReconciliationId(442)
45001 Sep 22 23:15:12.208 DEBG 442 Repair extent 171 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45002 Sep 22 23:15:12.208 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AB.copy"
45003 Sep 22 23:15:12.261 DEBG [2] Read AckReady 1084, : downstairs
45004 Sep 22 23:15:12.262 DEBG up_ds_listen was notified
45005 Sep 22 23:15:12.262 DEBG up_ds_listen process 1084
45006 Sep 22 23:15:12.262 DEBG [A] ack job 1084:85, : downstairs
45007 Sep 22 23:15:12.272 INFO accepted connection, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45008 Sep 22 23:15:12.272 TRCE incoming request, uri: /extent/171/files, method: GET, req_id: 0384f40a-e41b-4027-9f1c-d85df518581e, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45009 Sep 22 23:15:12.273 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/171/files, method: GET, req_id: 0384f40a-e41b-4027-9f1c-d85df518581e, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45010 Sep 22 23:15:12.273 INFO eid:171 Found repair files: ["0AB", "0AB.db"]
45011 Sep 22 23:15:12.273 TRCE incoming request, uri: /newextent/171/data, method: GET, req_id: 77719b3b-f1ed-42a5-9a14-6e190e6edf99, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45012 Sep 22 23:15:12.273 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/171/data, method: GET, req_id: 77719b3b-f1ed-42a5-9a14-6e190e6edf99, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45013 Sep 22 23:15:12.279 TRCE incoming request, uri: /newextent/171/db, method: GET, req_id: 16d4f5ca-9b40-42f1-b429-46f94f21af4c, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45014 Sep 22 23:15:12.279 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/171/db, method: GET, req_id: 16d4f5ca-9b40-42f1-b429-46f94f21af4c, remote_addr: 127.0.0.1:36145, local_addr: 127.0.0.1:46213, task: repair
45015 Sep 22 23:15:12.280 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AB.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AB.replace"
45016 Sep 22 23:15:12.280 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45017 Sep 22 23:15:12.281 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AB.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45018 Sep 22 23:15:12.281 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AB"
45019 Sep 22 23:15:12.281 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AB.db"
45020 Sep 22 23:15:12.281 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45021 Sep 22 23:15:12.281 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AB.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AB.completed"
45022 Sep 22 23:15:12.281 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45023 Sep 22 23:15:12.281 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45024 Sep 22 23:15:12.282 DEBG [0] It's time to notify for 442
45025 Sep 22 23:15:12.282 INFO Completion from [0] id:442 status:true
45026 Sep 22 23:15:12.282 INFO [443/752] Repair commands completed
45027 Sep 22 23:15:12.282 INFO Pop front: ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }, state: ClientData([New, New, New]) }
45028 Sep 22 23:15:12.282 INFO Sent repair work, now wait for resp
45029 Sep 22 23:15:12.282 INFO [0] received reconcile message
45030 Sep 22 23:15:12.282 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }, state: ClientData([InProgress, New, New]) }, : downstairs
45031 Sep 22 23:15:12.282 INFO [0] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }
45032 Sep 22 23:15:12.282 INFO [1] received reconcile message
45033 Sep 22 23:15:12.282 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45034 Sep 22 23:15:12.282 INFO [1] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }
45035 Sep 22 23:15:12.282 INFO [2] received reconcile message
45036 Sep 22 23:15:12.282 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(443), op: ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45037 Sep 22 23:15:12.282 INFO [2] client ExtentReopen { repair_id: ReconciliationId(443), extent_id: 171 }
45038 Sep 22 23:15:12.282 DEBG 443 Reopen extent 171
45039 Sep 22 23:15:12.283 DEBG 443 Reopen extent 171
45040 Sep 22 23:15:12.283 DEBG 443 Reopen extent 171
45041 Sep 22 23:15:12.284 DEBG [2] It's time to notify for 443
45042 Sep 22 23:15:12.284 INFO Completion from [2] id:443 status:true
45043 Sep 22 23:15:12.284 INFO [444/752] Repair commands completed
45044 Sep 22 23:15:12.284 INFO Pop front: ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45045 Sep 22 23:15:12.284 INFO Sent repair work, now wait for resp
45046 Sep 22 23:15:12.284 INFO [0] received reconcile message
45047 Sep 22 23:15:12.284 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45048 Sep 22 23:15:12.284 INFO [0] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45049 Sep 22 23:15:12.284 INFO [1] received reconcile message
45050 Sep 22 23:15:12.284 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45051 Sep 22 23:15:12.284 INFO [1] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45052 Sep 22 23:15:12.284 INFO [2] received reconcile message
45053 Sep 22 23:15:12.284 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(444), op: ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45054 Sep 22 23:15:12.284 INFO [2] client ExtentFlush { repair_id: ReconciliationId(444), extent_id: 42, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45055 Sep 22 23:15:12.284 DEBG 444 Flush extent 42 with f:2 g:2
45056 Sep 22 23:15:12.284 DEBG Flush just extent 42 with f:2 and g:2
45057 Sep 22 23:15:12.284 DEBG [1] It's time to notify for 444
45058 Sep 22 23:15:12.284 INFO Completion from [1] id:444 status:true
45059 Sep 22 23:15:12.284 INFO [445/752] Repair commands completed
45060 Sep 22 23:15:12.284 INFO Pop front: ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }, state: ClientData([New, New, New]) }
45061 Sep 22 23:15:12.284 INFO Sent repair work, now wait for resp
45062 Sep 22 23:15:12.285 INFO [0] received reconcile message
45063 Sep 22 23:15:12.285 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }, state: ClientData([InProgress, New, New]) }, : downstairs
45064 Sep 22 23:15:12.285 INFO [0] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }
45065 Sep 22 23:15:12.285 INFO [1] received reconcile message
45066 Sep 22 23:15:12.285 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45067 Sep 22 23:15:12.285 INFO [1] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }
45068 Sep 22 23:15:12.285 INFO [2] received reconcile message
45069 Sep 22 23:15:12.285 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(445), op: ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45070 Sep 22 23:15:12.285 INFO [2] client ExtentClose { repair_id: ReconciliationId(445), extent_id: 42 }
45071 Sep 22 23:15:12.285 DEBG 445 Close extent 42
45072 Sep 22 23:15:12.285 DEBG 445 Close extent 42
45073 Sep 22 23:15:12.285 DEBG 445 Close extent 42
45074 Sep 22 23:15:12.286 DEBG [2] It's time to notify for 445
45075 Sep 22 23:15:12.286 INFO Completion from [2] id:445 status:true
45076 Sep 22 23:15:12.286 INFO [446/752] Repair commands completed
45077 Sep 22 23:15:12.286 INFO Pop front: ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45078 Sep 22 23:15:12.286 INFO Sent repair work, now wait for resp
45079 Sep 22 23:15:12.286 INFO [0] received reconcile message
45080 Sep 22 23:15:12.286 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45081 Sep 22 23:15:12.286 INFO [0] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45082 Sep 22 23:15:12.286 INFO [0] Sending repair request ReconciliationId(446)
45083 Sep 22 23:15:12.286 INFO [1] received reconcile message
45084 Sep 22 23:15:12.286 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45085 Sep 22 23:15:12.286 INFO [1] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45086 Sep 22 23:15:12.286 INFO [1] No action required ReconciliationId(446)
45087 Sep 22 23:15:12.286 INFO [2] received reconcile message
45088 Sep 22 23:15:12.286 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(446), op: ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45089 Sep 22 23:15:12.286 INFO [2] client ExtentRepair { repair_id: ReconciliationId(446), extent_id: 42, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45090 Sep 22 23:15:12.286 INFO [2] No action required ReconciliationId(446)
45091 Sep 22 23:15:12.286 DEBG 446 Repair extent 42 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45092 Sep 22 23:15:12.286 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02A.copy"
45093 Sep 22 23:15:12.315 DEBG up_ds_listen checked 1 jobs, back to waiting
45094 Sep 22 23:15:12.352 INFO accepted connection, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45095 Sep 22 23:15:12.352 TRCE incoming request, uri: /extent/42/files, method: GET, req_id: c2ffd996-3e6f-4578-bf41-dd130016f786, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45096 Sep 22 23:15:12.352 INFO request completed, latency_us: 275, response_code: 200, uri: /extent/42/files, method: GET, req_id: c2ffd996-3e6f-4578-bf41-dd130016f786, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45097 Sep 22 23:15:12.353 INFO eid:42 Found repair files: ["02A", "02A.db"]
45098 Sep 22 23:15:12.353 TRCE incoming request, uri: /newextent/42/data, method: GET, req_id: 3e49bf1a-0e17-49a6-9022-dfbc80a4c546, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45099 Sep 22 23:15:12.353 INFO request completed, latency_us: 368, response_code: 200, uri: /newextent/42/data, method: GET, req_id: 3e49bf1a-0e17-49a6-9022-dfbc80a4c546, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45100 Sep 22 23:15:12.359 TRCE incoming request, uri: /newextent/42/db, method: GET, req_id: 82b06844-40ca-46f1-9a79-af7401d87714, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45101 Sep 22 23:15:12.359 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/42/db, method: GET, req_id: 82b06844-40ca-46f1-9a79-af7401d87714, remote_addr: 127.0.0.1:45360, local_addr: 127.0.0.1:46213, task: repair
45102 Sep 22 23:15:12.360 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02A.replace"
45103 Sep 22 23:15:12.360 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45104 Sep 22 23:15:12.362 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45105 Sep 22 23:15:12.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02A"
45106 Sep 22 23:15:12.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02A.db"
45107 Sep 22 23:15:12.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45108 Sep 22 23:15:12.362 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02A.completed"
45109 Sep 22 23:15:12.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45110 Sep 22 23:15:12.362 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45111 Sep 22 23:15:12.362 DEBG [0] It's time to notify for 446
45112 Sep 22 23:15:12.362 INFO Completion from [0] id:446 status:true
45113 Sep 22 23:15:12.362 INFO [447/752] Repair commands completed
45114 Sep 22 23:15:12.362 INFO Pop front: ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }, state: ClientData([New, New, New]) }
45115 Sep 22 23:15:12.363 INFO Sent repair work, now wait for resp
45116 Sep 22 23:15:12.363 INFO [0] received reconcile message
45117 Sep 22 23:15:12.363 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }, state: ClientData([InProgress, New, New]) }, : downstairs
45118 Sep 22 23:15:12.363 INFO [0] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }
45119 Sep 22 23:15:12.363 INFO [1] received reconcile message
45120 Sep 22 23:15:12.363 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45121 Sep 22 23:15:12.363 INFO [1] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }
45122 Sep 22 23:15:12.363 INFO [2] received reconcile message
45123 Sep 22 23:15:12.363 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(447), op: ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45124 Sep 22 23:15:12.363 INFO [2] client ExtentReopen { repair_id: ReconciliationId(447), extent_id: 42 }
45125 Sep 22 23:15:12.363 DEBG 447 Reopen extent 42
45126 Sep 22 23:15:12.364 DEBG 447 Reopen extent 42
45127 Sep 22 23:15:12.364 DEBG 447 Reopen extent 42
45128 Sep 22 23:15:12.365 DEBG [2] It's time to notify for 447
45129 Sep 22 23:15:12.365 INFO Completion from [2] id:447 status:true
45130 Sep 22 23:15:12.365 INFO [448/752] Repair commands completed
45131 Sep 22 23:15:12.365 INFO Pop front: ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45132 Sep 22 23:15:12.365 INFO Sent repair work, now wait for resp
45133 Sep 22 23:15:12.365 INFO [0] received reconcile message
45134 Sep 22 23:15:12.365 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45135 Sep 22 23:15:12.365 INFO [0] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45136 Sep 22 23:15:12.365 INFO [1] received reconcile message
45137 Sep 22 23:15:12.365 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45138 Sep 22 23:15:12.365 INFO [1] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45139 Sep 22 23:15:12.365 INFO [2] received reconcile message
45140 Sep 22 23:15:12.365 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(448), op: ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45141 Sep 22 23:15:12.365 INFO [2] client ExtentFlush { repair_id: ReconciliationId(448), extent_id: 43, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45142 Sep 22 23:15:12.365 DEBG 448 Flush extent 43 with f:2 g:2
45143 Sep 22 23:15:12.365 DEBG Flush just extent 43 with f:2 and g:2
45144 Sep 22 23:15:12.366 DEBG [1] It's time to notify for 448
45145 Sep 22 23:15:12.366 INFO Completion from [1] id:448 status:true
45146 Sep 22 23:15:12.366 INFO [449/752] Repair commands completed
45147 Sep 22 23:15:12.366 INFO Pop front: ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }, state: ClientData([New, New, New]) }
45148 Sep 22 23:15:12.366 INFO Sent repair work, now wait for resp
45149 Sep 22 23:15:12.366 INFO [0] received reconcile message
45150 Sep 22 23:15:12.366 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }, state: ClientData([InProgress, New, New]) }, : downstairs
45151 Sep 22 23:15:12.366 INFO [0] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }
45152 Sep 22 23:15:12.366 INFO [lossy] sleeping 1 second
45153 Sep 22 23:15:12.366 INFO [1] received reconcile message
45154 Sep 22 23:15:12.366 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45155 Sep 22 23:15:12.366 INFO [1] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }
45156 Sep 22 23:15:12.366 INFO [2] received reconcile message
45157 Sep 22 23:15:12.366 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(449), op: ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45158 Sep 22 23:15:12.366 INFO [2] client ExtentClose { repair_id: ReconciliationId(449), extent_id: 43 }
45159 Sep 22 23:15:12.366 DEBG 449 Close extent 43
45160 Sep 22 23:15:12.366 DEBG 449 Close extent 43
45161 Sep 22 23:15:12.367 DEBG 449 Close extent 43
45162 Sep 22 23:15:12.367 DEBG [2] It's time to notify for 449
45163 Sep 22 23:15:12.367 INFO Completion from [2] id:449 status:true
45164 Sep 22 23:15:12.367 INFO [450/752] Repair commands completed
45165 Sep 22 23:15:12.367 INFO Pop front: ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45166 Sep 22 23:15:12.367 INFO Sent repair work, now wait for resp
45167 Sep 22 23:15:12.367 INFO [0] received reconcile message
45168 Sep 22 23:15:12.367 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45169 Sep 22 23:15:12.367 INFO [0] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45170 Sep 22 23:15:12.367 INFO [0] Sending repair request ReconciliationId(450)
45171 Sep 22 23:15:12.367 INFO [1] received reconcile message
45172 Sep 22 23:15:12.367 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45173 Sep 22 23:15:12.367 INFO [1] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45174 Sep 22 23:15:12.367 INFO [1] No action required ReconciliationId(450)
45175 Sep 22 23:15:12.367 INFO [2] received reconcile message
45176 Sep 22 23:15:12.367 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(450), op: ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45177 Sep 22 23:15:12.367 INFO [2] client ExtentRepair { repair_id: ReconciliationId(450), extent_id: 43, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45178 Sep 22 23:15:12.367 INFO [2] No action required ReconciliationId(450)
45179 Sep 22 23:15:12.367 DEBG 450 Repair extent 43 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45180 Sep 22 23:15:12.368 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02B.copy"
45181 Sep 22 23:15:12.430 INFO accepted connection, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45182 Sep 22 23:15:12.431 TRCE incoming request, uri: /extent/43/files, method: GET, req_id: 0879bb4d-8f86-4f18-ad7a-4e7cde96d56d, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45183 Sep 22 23:15:12.431 INFO request completed, latency_us: 253, response_code: 200, uri: /extent/43/files, method: GET, req_id: 0879bb4d-8f86-4f18-ad7a-4e7cde96d56d, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45184 Sep 22 23:15:12.431 INFO eid:43 Found repair files: ["02B", "02B.db"]
45185 Sep 22 23:15:12.432 TRCE incoming request, uri: /newextent/43/data, method: GET, req_id: 49fcb992-9aa5-493b-b23c-500294aa746d, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45186 Sep 22 23:15:12.432 INFO request completed, latency_us: 360, response_code: 200, uri: /newextent/43/data, method: GET, req_id: 49fcb992-9aa5-493b-b23c-500294aa746d, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45187 Sep 22 23:15:12.437 TRCE incoming request, uri: /newextent/43/db, method: GET, req_id: caa411d5-6ee2-453e-b17a-79e7764b89ad, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45188 Sep 22 23:15:12.437 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/43/db, method: GET, req_id: caa411d5-6ee2-453e-b17a-79e7764b89ad, remote_addr: 127.0.0.1:60105, local_addr: 127.0.0.1:46213, task: repair
45189 Sep 22 23:15:12.439 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02B.replace"
45190 Sep 22 23:15:12.439 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45191 Sep 22 23:15:12.440 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45192 Sep 22 23:15:12.440 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02B"
45193 Sep 22 23:15:12.440 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02B.db"
45194 Sep 22 23:15:12.440 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45195 Sep 22 23:15:12.440 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02B.completed"
45196 Sep 22 23:15:12.440 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45197 Sep 22 23:15:12.440 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45198 Sep 22 23:15:12.440 DEBG [0] It's time to notify for 450
45199 Sep 22 23:15:12.441 INFO Completion from [0] id:450 status:true
45200 Sep 22 23:15:12.441 INFO [451/752] Repair commands completed
45201 Sep 22 23:15:12.441 INFO Pop front: ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }, state: ClientData([New, New, New]) }
45202 Sep 22 23:15:12.441 INFO Sent repair work, now wait for resp
45203 Sep 22 23:15:12.441 INFO [0] received reconcile message
45204 Sep 22 23:15:12.441 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }, state: ClientData([InProgress, New, New]) }, : downstairs
45205 Sep 22 23:15:12.441 INFO [0] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }
45206 Sep 22 23:15:12.441 INFO [1] received reconcile message
45207 Sep 22 23:15:12.441 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45208 Sep 22 23:15:12.441 INFO [1] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }
45209 Sep 22 23:15:12.441 INFO [2] received reconcile message
45210 Sep 22 23:15:12.441 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(451), op: ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45211 Sep 22 23:15:12.441 INFO [2] client ExtentReopen { repair_id: ReconciliationId(451), extent_id: 43 }
45212 Sep 22 23:15:12.441 DEBG 451 Reopen extent 43
45213 Sep 22 23:15:12.442 DEBG 451 Reopen extent 43
45214 Sep 22 23:15:12.442 DEBG 451 Reopen extent 43
45215 Sep 22 23:15:12.443 DEBG [2] It's time to notify for 451
45216 Sep 22 23:15:12.443 INFO Completion from [2] id:451 status:true
45217 Sep 22 23:15:12.443 INFO [452/752] Repair commands completed
45218 Sep 22 23:15:12.443 INFO Pop front: ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45219 Sep 22 23:15:12.443 INFO Sent repair work, now wait for resp
45220 Sep 22 23:15:12.443 INFO [0] received reconcile message
45221 Sep 22 23:15:12.443 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45222 Sep 22 23:15:12.443 INFO [0] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45223 Sep 22 23:15:12.443 INFO [1] received reconcile message
45224 Sep 22 23:15:12.443 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45225 Sep 22 23:15:12.443 INFO [1] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45226 Sep 22 23:15:12.443 INFO [2] received reconcile message
45227 Sep 22 23:15:12.443 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(452), op: ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45228 Sep 22 23:15:12.443 INFO [2] client ExtentFlush { repair_id: ReconciliationId(452), extent_id: 179, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45229 Sep 22 23:15:12.443 DEBG 452 Flush extent 179 with f:2 g:2
45230 Sep 22 23:15:12.443 DEBG Flush just extent 179 with f:2 and g:2
45231 Sep 22 23:15:12.444 DEBG [1] It's time to notify for 452
45232 Sep 22 23:15:12.444 INFO Completion from [1] id:452 status:true
45233 Sep 22 23:15:12.444 INFO [453/752] Repair commands completed
45234 Sep 22 23:15:12.444 INFO Pop front: ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }, state: ClientData([New, New, New]) }
45235 Sep 22 23:15:12.444 INFO Sent repair work, now wait for resp
45236 Sep 22 23:15:12.444 INFO [0] received reconcile message
45237 Sep 22 23:15:12.444 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }, state: ClientData([InProgress, New, New]) }, : downstairs
45238 Sep 22 23:15:12.444 INFO [0] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }
45239 Sep 22 23:15:12.444 INFO [1] received reconcile message
45240 Sep 22 23:15:12.444 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45241 Sep 22 23:15:12.444 INFO [1] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }
45242 Sep 22 23:15:12.444 INFO [2] received reconcile message
45243 Sep 22 23:15:12.444 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(453), op: ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45244 Sep 22 23:15:12.444 INFO [2] client ExtentClose { repair_id: ReconciliationId(453), extent_id: 179 }
45245 Sep 22 23:15:12.444 DEBG 453 Close extent 179
45246 Sep 22 23:15:12.444 DEBG 453 Close extent 179
45247 Sep 22 23:15:12.445 DEBG 453 Close extent 179
45248 Sep 22 23:15:12.445 DEBG [2] It's time to notify for 453
45249 Sep 22 23:15:12.445 INFO Completion from [2] id:453 status:true
45250 Sep 22 23:15:12.445 INFO [454/752] Repair commands completed
45251 Sep 22 23:15:12.445 INFO Pop front: ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45252 Sep 22 23:15:12.445 INFO Sent repair work, now wait for resp
45253 Sep 22 23:15:12.445 INFO [0] received reconcile message
45254 Sep 22 23:15:12.445 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45255 Sep 22 23:15:12.445 INFO [0] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45256 Sep 22 23:15:12.445 INFO [0] Sending repair request ReconciliationId(454)
45257 Sep 22 23:15:12.445 INFO [1] received reconcile message
45258 Sep 22 23:15:12.445 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45259 Sep 22 23:15:12.445 INFO [1] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45260 Sep 22 23:15:12.445 INFO [1] No action required ReconciliationId(454)
45261 Sep 22 23:15:12.445 INFO [2] received reconcile message
45262 Sep 22 23:15:12.445 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(454), op: ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45263 Sep 22 23:15:12.445 INFO [2] client ExtentRepair { repair_id: ReconciliationId(454), extent_id: 179, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45264 Sep 22 23:15:12.445 INFO [2] No action required ReconciliationId(454)
45265 Sep 22 23:15:12.446 DEBG 454 Repair extent 179 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45266 Sep 22 23:15:12.446 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B3.copy"
45267 Sep 22 23:15:12.508 INFO accepted connection, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45268 Sep 22 23:15:12.508 TRCE incoming request, uri: /extent/179/files, method: GET, req_id: 878cbee6-1c05-44da-8360-a056c841fce5, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45269 Sep 22 23:15:12.509 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/179/files, method: GET, req_id: 878cbee6-1c05-44da-8360-a056c841fce5, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45270 Sep 22 23:15:12.509 INFO eid:179 Found repair files: ["0B3", "0B3.db"]
45271 Sep 22 23:15:12.509 TRCE incoming request, uri: /newextent/179/data, method: GET, req_id: ae584d82-9e6c-4193-b01d-0959a1a66636, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45272 Sep 22 23:15:12.510 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/179/data, method: GET, req_id: ae584d82-9e6c-4193-b01d-0959a1a66636, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45273 Sep 22 23:15:12.515 TRCE incoming request, uri: /newextent/179/db, method: GET, req_id: e42a6c40-27c6-40a3-b6ac-30f694ae7c3d, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45274 Sep 22 23:15:12.515 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/179/db, method: GET, req_id: e42a6c40-27c6-40a3-b6ac-30f694ae7c3d, remote_addr: 127.0.0.1:60795, local_addr: 127.0.0.1:46213, task: repair
45275 Sep 22 23:15:12.516 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B3.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B3.replace"
45276 Sep 22 23:15:12.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45277 Sep 22 23:15:12.517 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B3.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45278 Sep 22 23:15:12.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B3"
45279 Sep 22 23:15:12.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B3.db"
45280 Sep 22 23:15:12.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45281 Sep 22 23:15:12.518 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B3.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B3.completed"
45282 Sep 22 23:15:12.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45283 Sep 22 23:15:12.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45284 Sep 22 23:15:12.518 DEBG [0] It's time to notify for 454
45285 Sep 22 23:15:12.518 INFO Completion from [0] id:454 status:true
45286 Sep 22 23:15:12.518 INFO [455/752] Repair commands completed
45287 Sep 22 23:15:12.518 INFO Pop front: ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }, state: ClientData([New, New, New]) }
45288 Sep 22 23:15:12.518 INFO Sent repair work, now wait for resp
45289 Sep 22 23:15:12.518 INFO [0] received reconcile message
45290 Sep 22 23:15:12.518 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }, state: ClientData([InProgress, New, New]) }, : downstairs
45291 Sep 22 23:15:12.518 INFO [0] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }
45292 Sep 22 23:15:12.519 INFO [1] received reconcile message
45293 Sep 22 23:15:12.519 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45294 Sep 22 23:15:12.519 INFO [1] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }
45295 Sep 22 23:15:12.519 INFO [2] received reconcile message
45296 Sep 22 23:15:12.519 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(455), op: ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45297 Sep 22 23:15:12.519 INFO [2] client ExtentReopen { repair_id: ReconciliationId(455), extent_id: 179 }
45298 Sep 22 23:15:12.519 DEBG 455 Reopen extent 179
45299 Sep 22 23:15:12.520 DEBG 455 Reopen extent 179
45300 Sep 22 23:15:12.520 DEBG 455 Reopen extent 179
45301 Sep 22 23:15:12.521 DEBG [2] It's time to notify for 455
45302 Sep 22 23:15:12.521 INFO Completion from [2] id:455 status:true
45303 Sep 22 23:15:12.521 INFO [456/752] Repair commands completed
45304 Sep 22 23:15:12.521 INFO Pop front: ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45305 Sep 22 23:15:12.521 INFO Sent repair work, now wait for resp
45306 Sep 22 23:15:12.521 INFO [0] received reconcile message
45307 Sep 22 23:15:12.521 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45308 Sep 22 23:15:12.521 INFO [0] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45309 Sep 22 23:15:12.521 INFO [1] received reconcile message
45310 Sep 22 23:15:12.521 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45311 Sep 22 23:15:12.521 INFO [1] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45312 Sep 22 23:15:12.521 INFO [2] received reconcile message
45313 Sep 22 23:15:12.521 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(456), op: ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45314 Sep 22 23:15:12.521 INFO [2] client ExtentFlush { repair_id: ReconciliationId(456), extent_id: 120, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45315 Sep 22 23:15:12.521 DEBG 456 Flush extent 120 with f:2 g:2
45316 Sep 22 23:15:12.521 DEBG Flush just extent 120 with f:2 and g:2
45317 Sep 22 23:15:12.521 DEBG [1] It's time to notify for 456
45318 Sep 22 23:15:12.521 INFO Completion from [1] id:456 status:true
45319 Sep 22 23:15:12.521 INFO [457/752] Repair commands completed
45320 Sep 22 23:15:12.521 INFO Pop front: ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }, state: ClientData([New, New, New]) }
45321 Sep 22 23:15:12.521 INFO Sent repair work, now wait for resp
45322 Sep 22 23:15:12.521 INFO [0] received reconcile message
45323 Sep 22 23:15:12.521 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }, state: ClientData([InProgress, New, New]) }, : downstairs
45324 Sep 22 23:15:12.521 INFO [0] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }
45325 Sep 22 23:15:12.521 INFO [1] received reconcile message
45326 Sep 22 23:15:12.521 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45327 Sep 22 23:15:12.521 INFO [1] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }
45328 Sep 22 23:15:12.522 INFO [2] received reconcile message
45329 Sep 22 23:15:12.522 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(457), op: ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45330 Sep 22 23:15:12.522 INFO [2] client ExtentClose { repair_id: ReconciliationId(457), extent_id: 120 }
45331 Sep 22 23:15:12.522 DEBG 457 Close extent 120
45332 Sep 22 23:15:12.522 DEBG 457 Close extent 120
45333 Sep 22 23:15:12.522 DEBG 457 Close extent 120
45334 Sep 22 23:15:12.523 DEBG [2] It's time to notify for 457
45335 Sep 22 23:15:12.523 INFO Completion from [2] id:457 status:true
45336 Sep 22 23:15:12.523 INFO [458/752] Repair commands completed
45337 Sep 22 23:15:12.523 INFO Pop front: ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45338 Sep 22 23:15:12.523 INFO Sent repair work, now wait for resp
45339 Sep 22 23:15:12.523 INFO [0] received reconcile message
45340 Sep 22 23:15:12.523 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45341 Sep 22 23:15:12.523 INFO [0] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45342 Sep 22 23:15:12.523 INFO [0] Sending repair request ReconciliationId(458)
45343 Sep 22 23:15:12.523 INFO [1] received reconcile message
45344 Sep 22 23:15:12.523 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45345 Sep 22 23:15:12.523 INFO [1] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45346 Sep 22 23:15:12.523 INFO [1] No action required ReconciliationId(458)
45347 Sep 22 23:15:12.523 INFO [2] received reconcile message
45348 Sep 22 23:15:12.523 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(458), op: ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45349 Sep 22 23:15:12.523 INFO [2] client ExtentRepair { repair_id: ReconciliationId(458), extent_id: 120, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45350 Sep 22 23:15:12.523 INFO [2] No action required ReconciliationId(458)
45351 Sep 22 23:15:12.523 DEBG 458 Repair extent 120 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45352 Sep 22 23:15:12.523 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/078.copy"
45353 Sep 22 23:15:12.589 INFO accepted connection, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45354 Sep 22 23:15:12.589 TRCE incoming request, uri: /extent/120/files, method: GET, req_id: 07f7762c-3d9a-4087-b476-155d883c58fe, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45355 Sep 22 23:15:12.589 INFO request completed, latency_us: 190, response_code: 200, uri: /extent/120/files, method: GET, req_id: 07f7762c-3d9a-4087-b476-155d883c58fe, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45356 Sep 22 23:15:12.590 INFO eid:120 Found repair files: ["078", "078.db"]
45357 Sep 22 23:15:12.590 TRCE incoming request, uri: /newextent/120/data, method: GET, req_id: 8fcf22ec-a2e2-43e9-9b1a-784b135be829, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45358 Sep 22 23:15:12.590 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/120/data, method: GET, req_id: 8fcf22ec-a2e2-43e9-9b1a-784b135be829, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45359 Sep 22 23:15:12.595 TRCE incoming request, uri: /newextent/120/db, method: GET, req_id: 3554c51b-4b10-4323-b0d8-d66bd66eebf3, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45360 Sep 22 23:15:12.596 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/120/db, method: GET, req_id: 3554c51b-4b10-4323-b0d8-d66bd66eebf3, remote_addr: 127.0.0.1:36560, local_addr: 127.0.0.1:46213, task: repair
45361 Sep 22 23:15:12.597 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/078.copy" to "/tmp/downstairs-vrx8aK6L/00/000/078.replace"
45362 Sep 22 23:15:12.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45363 Sep 22 23:15:12.598 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/078.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45364 Sep 22 23:15:12.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/078"
45365 Sep 22 23:15:12.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/078.db"
45366 Sep 22 23:15:12.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45367 Sep 22 23:15:12.598 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/078.replace" to "/tmp/downstairs-vrx8aK6L/00/000/078.completed"
45368 Sep 22 23:15:12.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45369 Sep 22 23:15:12.598 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45370 Sep 22 23:15:12.598 DEBG [0] It's time to notify for 458
45371 Sep 22 23:15:12.599 INFO Completion from [0] id:458 status:true
45372 Sep 22 23:15:12.599 INFO [459/752] Repair commands completed
45373 Sep 22 23:15:12.599 INFO Pop front: ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }, state: ClientData([New, New, New]) }
45374 Sep 22 23:15:12.599 INFO Sent repair work, now wait for resp
45375 Sep 22 23:15:12.599 INFO [0] received reconcile message
45376 Sep 22 23:15:12.599 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }, state: ClientData([InProgress, New, New]) }, : downstairs
45377 Sep 22 23:15:12.599 INFO [0] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }
45378 Sep 22 23:15:12.599 INFO [1] received reconcile message
45379 Sep 22 23:15:12.599 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45380 Sep 22 23:15:12.599 INFO [1] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }
45381 Sep 22 23:15:12.599 INFO [2] received reconcile message
45382 Sep 22 23:15:12.599 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(459), op: ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45383 Sep 22 23:15:12.599 INFO [2] client ExtentReopen { repair_id: ReconciliationId(459), extent_id: 120 }
45384 Sep 22 23:15:12.599 DEBG 459 Reopen extent 120
45385 Sep 22 23:15:12.600 DEBG 459 Reopen extent 120
45386 Sep 22 23:15:12.600 DEBG 459 Reopen extent 120
45387 Sep 22 23:15:12.601 DEBG [2] It's time to notify for 459
45388 Sep 22 23:15:12.601 INFO Completion from [2] id:459 status:true
45389 Sep 22 23:15:12.601 INFO [460/752] Repair commands completed
45390 Sep 22 23:15:12.601 INFO Pop front: ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45391 Sep 22 23:15:12.601 INFO Sent repair work, now wait for resp
45392 Sep 22 23:15:12.601 INFO [0] received reconcile message
45393 Sep 22 23:15:12.601 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45394 Sep 22 23:15:12.601 INFO [0] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45395 Sep 22 23:15:12.601 INFO [1] received reconcile message
45396 Sep 22 23:15:12.601 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45397 Sep 22 23:15:12.601 INFO [1] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45398 Sep 22 23:15:12.601 INFO [2] received reconcile message
45399 Sep 22 23:15:12.601 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(460), op: ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45400 Sep 22 23:15:12.601 INFO [2] client ExtentFlush { repair_id: ReconciliationId(460), extent_id: 66, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45401 Sep 22 23:15:12.601 DEBG 460 Flush extent 66 with f:2 g:2
45402 Sep 22 23:15:12.601 DEBG Flush just extent 66 with f:2 and g:2
45403 Sep 22 23:15:12.601 DEBG [1] It's time to notify for 460
45404 Sep 22 23:15:12.601 INFO Completion from [1] id:460 status:true
45405 Sep 22 23:15:12.601 INFO [461/752] Repair commands completed
45406 Sep 22 23:15:12.601 INFO Pop front: ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }, state: ClientData([New, New, New]) }
45407 Sep 22 23:15:12.601 INFO Sent repair work, now wait for resp
45408 Sep 22 23:15:12.601 INFO [0] received reconcile message
45409 Sep 22 23:15:12.601 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }, state: ClientData([InProgress, New, New]) }, : downstairs
45410 Sep 22 23:15:12.601 INFO [0] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }
45411 Sep 22 23:15:12.601 INFO [1] received reconcile message
45412 Sep 22 23:15:12.601 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45413 Sep 22 23:15:12.601 INFO [1] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }
45414 Sep 22 23:15:12.601 INFO [2] received reconcile message
45415 Sep 22 23:15:12.602 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(461), op: ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45416 Sep 22 23:15:12.602 INFO [2] client ExtentClose { repair_id: ReconciliationId(461), extent_id: 66 }
45417 Sep 22 23:15:12.602 DEBG 461 Close extent 66
45418 Sep 22 23:15:12.602 DEBG 461 Close extent 66
45419 Sep 22 23:15:12.602 DEBG 461 Close extent 66
45420 Sep 22 23:15:12.603 DEBG [2] It's time to notify for 461
45421 Sep 22 23:15:12.603 INFO Completion from [2] id:461 status:true
45422 Sep 22 23:15:12.603 INFO [462/752] Repair commands completed
45423 Sep 22 23:15:12.603 INFO Pop front: ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45424 Sep 22 23:15:12.603 INFO Sent repair work, now wait for resp
45425 Sep 22 23:15:12.603 INFO [0] received reconcile message
45426 Sep 22 23:15:12.603 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45427 Sep 22 23:15:12.603 INFO [0] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45428 Sep 22 23:15:12.603 INFO [0] Sending repair request ReconciliationId(462)
45429 Sep 22 23:15:12.603 INFO [1] received reconcile message
45430 Sep 22 23:15:12.603 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45431 Sep 22 23:15:12.603 INFO [1] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45432 Sep 22 23:15:12.603 INFO [1] No action required ReconciliationId(462)
45433 Sep 22 23:15:12.603 INFO [2] received reconcile message
45434 Sep 22 23:15:12.603 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(462), op: ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45435 Sep 22 23:15:12.603 INFO [2] client ExtentRepair { repair_id: ReconciliationId(462), extent_id: 66, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45436 Sep 22 23:15:12.603 INFO [2] No action required ReconciliationId(462)
45437 Sep 22 23:15:12.603 DEBG 462 Repair extent 66 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45438 Sep 22 23:15:12.603 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/042.copy"
45439 Sep 22 23:15:12.668 INFO accepted connection, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45440 Sep 22 23:15:12.668 TRCE incoming request, uri: /extent/66/files, method: GET, req_id: bb53e584-c52f-4d56-bea5-b60d1d498be4, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45441 Sep 22 23:15:12.669 INFO request completed, latency_us: 212, response_code: 200, uri: /extent/66/files, method: GET, req_id: bb53e584-c52f-4d56-bea5-b60d1d498be4, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45442 Sep 22 23:15:12.669 INFO eid:66 Found repair files: ["042", "042.db"]
45443 Sep 22 23:15:12.669 TRCE incoming request, uri: /newextent/66/data, method: GET, req_id: 3975a6d3-6f9e-407b-80dc-e05c7208bb73, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45444 Sep 22 23:15:12.670 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/66/data, method: GET, req_id: 3975a6d3-6f9e-407b-80dc-e05c7208bb73, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45445 Sep 22 23:15:12.675 TRCE incoming request, uri: /newextent/66/db, method: GET, req_id: d6fb1749-8f02-43b8-97a8-e702e3da6454, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45446 Sep 22 23:15:12.675 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/66/db, method: GET, req_id: d6fb1749-8f02-43b8-97a8-e702e3da6454, remote_addr: 127.0.0.1:49244, local_addr: 127.0.0.1:46213, task: repair
45447 Sep 22 23:15:12.676 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/042.copy" to "/tmp/downstairs-vrx8aK6L/00/000/042.replace"
45448 Sep 22 23:15:12.676 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45449 Sep 22 23:15:12.677 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/042.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45450 Sep 22 23:15:12.678 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/042"
45451 Sep 22 23:15:12.678 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/042.db"
45452 Sep 22 23:15:12.678 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45453 Sep 22 23:15:12.678 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/042.replace" to "/tmp/downstairs-vrx8aK6L/00/000/042.completed"
45454 Sep 22 23:15:12.678 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45455 Sep 22 23:15:12.678 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45456 Sep 22 23:15:12.678 DEBG [0] It's time to notify for 462
45457 Sep 22 23:15:12.678 INFO Completion from [0] id:462 status:true
45458 Sep 22 23:15:12.678 INFO [463/752] Repair commands completed
45459 Sep 22 23:15:12.678 INFO Pop front: ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }, state: ClientData([New, New, New]) }
45460 Sep 22 23:15:12.678 INFO Sent repair work, now wait for resp
45461 Sep 22 23:15:12.678 INFO [0] received reconcile message
45462 Sep 22 23:15:12.678 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }, state: ClientData([InProgress, New, New]) }, : downstairs
45463 Sep 22 23:15:12.678 INFO [0] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }
45464 Sep 22 23:15:12.678 INFO [1] received reconcile message
45465 Sep 22 23:15:12.678 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45466 Sep 22 23:15:12.678 INFO [1] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }
45467 Sep 22 23:15:12.678 INFO [2] received reconcile message
45468 Sep 22 23:15:12.678 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(463), op: ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45469 Sep 22 23:15:12.678 INFO [2] client ExtentReopen { repair_id: ReconciliationId(463), extent_id: 66 }
45470 Sep 22 23:15:12.679 DEBG 463 Reopen extent 66
45471 Sep 22 23:15:12.679 DEBG 463 Reopen extent 66
45472 Sep 22 23:15:12.680 DEBG 463 Reopen extent 66
45473 Sep 22 23:15:12.680 DEBG [2] It's time to notify for 463
45474 Sep 22 23:15:12.680 INFO Completion from [2] id:463 status:true
45475 Sep 22 23:15:12.680 INFO [464/752] Repair commands completed
45476 Sep 22 23:15:12.680 INFO Pop front: ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45477 Sep 22 23:15:12.680 INFO Sent repair work, now wait for resp
45478 Sep 22 23:15:12.680 INFO [0] received reconcile message
45479 Sep 22 23:15:12.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45480 Sep 22 23:15:12.681 INFO [0] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45481 Sep 22 23:15:12.681 INFO [1] received reconcile message
45482 Sep 22 23:15:12.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45483 Sep 22 23:15:12.681 INFO [1] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45484 Sep 22 23:15:12.681 INFO [2] received reconcile message
45485 Sep 22 23:15:12.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(464), op: ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45486 Sep 22 23:15:12.681 INFO [2] client ExtentFlush { repair_id: ReconciliationId(464), extent_id: 132, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45487 Sep 22 23:15:12.681 DEBG 464 Flush extent 132 with f:2 g:2
45488 Sep 22 23:15:12.681 DEBG Flush just extent 132 with f:2 and g:2
45489 Sep 22 23:15:12.681 DEBG [1] It's time to notify for 464
45490 Sep 22 23:15:12.681 INFO Completion from [1] id:464 status:true
45491 Sep 22 23:15:12.681 INFO [465/752] Repair commands completed
45492 Sep 22 23:15:12.681 INFO Pop front: ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }, state: ClientData([New, New, New]) }
45493 Sep 22 23:15:12.681 INFO Sent repair work, now wait for resp
45494 Sep 22 23:15:12.681 INFO [0] received reconcile message
45495 Sep 22 23:15:12.681 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }, state: ClientData([InProgress, New, New]) }, : downstairs
45496 Sep 22 23:15:12.681 INFO [0] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }
45497 Sep 22 23:15:12.681 INFO [1] received reconcile message
45498 Sep 22 23:15:12.681 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45499 Sep 22 23:15:12.681 INFO [1] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }
45500 Sep 22 23:15:12.681 INFO [2] received reconcile message
45501 Sep 22 23:15:12.681 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(465), op: ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45502 Sep 22 23:15:12.681 INFO [2] client ExtentClose { repair_id: ReconciliationId(465), extent_id: 132 }
45503 Sep 22 23:15:12.681 DEBG 465 Close extent 132
45504 Sep 22 23:15:12.682 DEBG 465 Close extent 132
45505 Sep 22 23:15:12.682 DEBG 465 Close extent 132
45506 Sep 22 23:15:12.682 DEBG [2] It's time to notify for 465
45507 Sep 22 23:15:12.682 INFO Completion from [2] id:465 status:true
45508 Sep 22 23:15:12.682 INFO [466/752] Repair commands completed
45509 Sep 22 23:15:12.682 INFO Pop front: ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45510 Sep 22 23:15:12.682 INFO Sent repair work, now wait for resp
45511 Sep 22 23:15:12.682 INFO [0] received reconcile message
45512 Sep 22 23:15:12.683 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45513 Sep 22 23:15:12.683 INFO [0] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45514 Sep 22 23:15:12.683 INFO [0] Sending repair request ReconciliationId(466)
45515 Sep 22 23:15:12.683 INFO [1] received reconcile message
45516 Sep 22 23:15:12.683 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45517 Sep 22 23:15:12.683 INFO [1] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45518 Sep 22 23:15:12.683 INFO [1] No action required ReconciliationId(466)
45519 Sep 22 23:15:12.683 INFO [2] received reconcile message
45520 Sep 22 23:15:12.683 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(466), op: ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45521 Sep 22 23:15:12.683 INFO [2] client ExtentRepair { repair_id: ReconciliationId(466), extent_id: 132, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45522 Sep 22 23:15:12.683 INFO [2] No action required ReconciliationId(466)
45523 Sep 22 23:15:12.683 DEBG 466 Repair extent 132 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45524 Sep 22 23:15:12.683 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/084.copy"
45525 Sep 22 23:15:12.746 DEBG IO Read 1086 has deps [JobId(1085)]
45526 Sep 22 23:15:12.746 DEBG IO Flush 1087 has deps [JobId(1086), JobId(1085)]
45527 Sep 22 23:15:12.748 INFO accepted connection, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45528 Sep 22 23:15:12.748 TRCE incoming request, uri: /extent/132/files, method: GET, req_id: 5d0de0fb-7303-4501-8eed-302bd0a83ec6, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45529 Sep 22 23:15:12.748 INFO request completed, latency_us: 252, response_code: 200, uri: /extent/132/files, method: GET, req_id: 5d0de0fb-7303-4501-8eed-302bd0a83ec6, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45530 Sep 22 23:15:12.748 INFO eid:132 Found repair files: ["084", "084.db"]
45531 Sep 22 23:15:12.749 TRCE incoming request, uri: /newextent/132/data, method: GET, req_id: 1b6dd1c2-37fd-4ce7-860f-09b98c69f38b, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45532 Sep 22 23:15:12.749 INFO request completed, latency_us: 262, response_code: 200, uri: /newextent/132/data, method: GET, req_id: 1b6dd1c2-37fd-4ce7-860f-09b98c69f38b, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45533 Sep 22 23:15:12.754 TRCE incoming request, uri: /newextent/132/db, method: GET, req_id: 080e55f9-196a-49e0-b985-c26fc829cc4e, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45534 Sep 22 23:15:12.755 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/132/db, method: GET, req_id: 080e55f9-196a-49e0-b985-c26fc829cc4e, remote_addr: 127.0.0.1:48116, local_addr: 127.0.0.1:46213, task: repair
45535 Sep 22 23:15:12.756 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/084.copy" to "/tmp/downstairs-vrx8aK6L/00/000/084.replace"
45536 Sep 22 23:15:12.756 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45537 Sep 22 23:15:12.757 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/084.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45538 Sep 22 23:15:12.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/084"
45539 Sep 22 23:15:12.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/084.db"
45540 Sep 22 23:15:12.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45541 Sep 22 23:15:12.757 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/084.replace" to "/tmp/downstairs-vrx8aK6L/00/000/084.completed"
45542 Sep 22 23:15:12.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45543 Sep 22 23:15:12.757 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45544 Sep 22 23:15:12.757 DEBG [0] It's time to notify for 466
45545 Sep 22 23:15:12.758 INFO Completion from [0] id:466 status:true
45546 Sep 22 23:15:12.758 INFO [467/752] Repair commands completed
45547 Sep 22 23:15:12.758 INFO Pop front: ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }, state: ClientData([New, New, New]) }
45548 Sep 22 23:15:12.758 INFO Sent repair work, now wait for resp
45549 Sep 22 23:15:12.758 INFO [0] received reconcile message
45550 Sep 22 23:15:12.758 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }, state: ClientData([InProgress, New, New]) }, : downstairs
45551 Sep 22 23:15:12.758 INFO [0] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }
45552 Sep 22 23:15:12.758 INFO [1] received reconcile message
45553 Sep 22 23:15:12.758 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45554 Sep 22 23:15:12.758 INFO [1] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }
45555 Sep 22 23:15:12.758 INFO [2] received reconcile message
45556 Sep 22 23:15:12.758 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(467), op: ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45557 Sep 22 23:15:12.758 INFO [2] client ExtentReopen { repair_id: ReconciliationId(467), extent_id: 132 }
45558 Sep 22 23:15:12.758 DEBG 467 Reopen extent 132
45559 Sep 22 23:15:12.759 DEBG 467 Reopen extent 132
45560 Sep 22 23:15:12.759 DEBG 467 Reopen extent 132
45561 Sep 22 23:15:12.760 DEBG [2] It's time to notify for 467
45562 Sep 22 23:15:12.760 INFO Completion from [2] id:467 status:true
45563 Sep 22 23:15:12.760 INFO [468/752] Repair commands completed
45564 Sep 22 23:15:12.760 INFO Pop front: ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45565 Sep 22 23:15:12.760 INFO Sent repair work, now wait for resp
45566 Sep 22 23:15:12.760 INFO [0] received reconcile message
45567 Sep 22 23:15:12.760 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45568 Sep 22 23:15:12.760 INFO [0] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45569 Sep 22 23:15:12.760 INFO [1] received reconcile message
45570 Sep 22 23:15:12.760 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45571 Sep 22 23:15:12.760 INFO [1] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45572 Sep 22 23:15:12.760 INFO [2] received reconcile message
45573 Sep 22 23:15:12.760 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(468), op: ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45574 Sep 22 23:15:12.760 INFO [2] client ExtentFlush { repair_id: ReconciliationId(468), extent_id: 173, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45575 Sep 22 23:15:12.760 DEBG 468 Flush extent 173 with f:2 g:2
45576 Sep 22 23:15:12.760 DEBG Flush just extent 173 with f:2 and g:2
45577 Sep 22 23:15:12.761 DEBG [1] It's time to notify for 468
45578 Sep 22 23:15:12.761 INFO Completion from [1] id:468 status:true
45579 Sep 22 23:15:12.761 INFO [469/752] Repair commands completed
45580 Sep 22 23:15:12.761 INFO Pop front: ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }, state: ClientData([New, New, New]) }
45581 Sep 22 23:15:12.761 INFO Sent repair work, now wait for resp
45582 Sep 22 23:15:12.761 INFO [0] received reconcile message
45583 Sep 22 23:15:12.761 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }, state: ClientData([InProgress, New, New]) }, : downstairs
45584 Sep 22 23:15:12.761 INFO [0] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }
45585 Sep 22 23:15:12.761 INFO [1] received reconcile message
45586 Sep 22 23:15:12.761 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45587 Sep 22 23:15:12.761 INFO [1] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }
45588 Sep 22 23:15:12.761 INFO [2] received reconcile message
45589 Sep 22 23:15:12.761 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(469), op: ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45590 Sep 22 23:15:12.761 INFO [2] client ExtentClose { repair_id: ReconciliationId(469), extent_id: 173 }
45591 Sep 22 23:15:12.761 DEBG 469 Close extent 173
45592 Sep 22 23:15:12.761 DEBG 469 Close extent 173
45593 Sep 22 23:15:12.762 DEBG 469 Close extent 173
45594 Sep 22 23:15:12.762 DEBG [2] It's time to notify for 469
45595 Sep 22 23:15:12.762 INFO Completion from [2] id:469 status:true
45596 Sep 22 23:15:12.762 INFO [470/752] Repair commands completed
45597 Sep 22 23:15:12.762 INFO Pop front: ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45598 Sep 22 23:15:12.762 INFO Sent repair work, now wait for resp
45599 Sep 22 23:15:12.762 INFO [0] received reconcile message
45600 Sep 22 23:15:12.762 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45601 Sep 22 23:15:12.762 INFO [0] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45602 Sep 22 23:15:12.762 INFO [0] Sending repair request ReconciliationId(470)
45603 Sep 22 23:15:12.762 INFO [1] received reconcile message
45604 Sep 22 23:15:12.762 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45605 Sep 22 23:15:12.762 INFO [1] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45606 Sep 22 23:15:12.762 INFO [1] No action required ReconciliationId(470)
45607 Sep 22 23:15:12.762 INFO [2] received reconcile message
45608 Sep 22 23:15:12.762 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(470), op: ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45609 Sep 22 23:15:12.762 INFO [2] client ExtentRepair { repair_id: ReconciliationId(470), extent_id: 173, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45610 Sep 22 23:15:12.762 INFO [2] No action required ReconciliationId(470)
45611 Sep 22 23:15:12.762 DEBG 470 Repair extent 173 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45612 Sep 22 23:15:12.763 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AD.copy"
45613 Sep 22 23:15:12.829 INFO accepted connection, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45614 Sep 22 23:15:12.829 TRCE incoming request, uri: /extent/173/files, method: GET, req_id: 08072b74-10d3-4df6-89cd-2a04c8d689fb, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45615 Sep 22 23:15:12.829 INFO request completed, latency_us: 242, response_code: 200, uri: /extent/173/files, method: GET, req_id: 08072b74-10d3-4df6-89cd-2a04c8d689fb, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45616 Sep 22 23:15:12.830 INFO eid:173 Found repair files: ["0AD", "0AD.db"]
45617 Sep 22 23:15:12.830 TRCE incoming request, uri: /newextent/173/data, method: GET, req_id: 39ce5814-ae05-42fc-bc95-96e344c61040, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45618 Sep 22 23:15:12.830 INFO request completed, latency_us: 361, response_code: 200, uri: /newextent/173/data, method: GET, req_id: 39ce5814-ae05-42fc-bc95-96e344c61040, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45619 Sep 22 23:15:12.836 TRCE incoming request, uri: /newextent/173/db, method: GET, req_id: bd09b9ee-0326-4787-a908-9a6777d92438, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45620 Sep 22 23:15:12.836 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/173/db, method: GET, req_id: bd09b9ee-0326-4787-a908-9a6777d92438, remote_addr: 127.0.0.1:41892, local_addr: 127.0.0.1:46213, task: repair
45621 Sep 22 23:15:12.837 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AD.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AD.replace"
45622 Sep 22 23:15:12.837 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45623 Sep 22 23:15:12.838 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AD.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45624 Sep 22 23:15:12.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AD"
45625 Sep 22 23:15:12.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AD.db"
45626 Sep 22 23:15:12.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45627 Sep 22 23:15:12.838 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AD.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AD.completed"
45628 Sep 22 23:15:12.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45629 Sep 22 23:15:12.838 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45630 Sep 22 23:15:12.839 DEBG [0] It's time to notify for 470
45631 Sep 22 23:15:12.839 INFO Completion from [0] id:470 status:true
45632 Sep 22 23:15:12.839 INFO [471/752] Repair commands completed
45633 Sep 22 23:15:12.839 INFO Pop front: ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }, state: ClientData([New, New, New]) }
45634 Sep 22 23:15:12.839 INFO Sent repair work, now wait for resp
45635 Sep 22 23:15:12.839 INFO [0] received reconcile message
45636 Sep 22 23:15:12.839 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }, state: ClientData([InProgress, New, New]) }, : downstairs
45637 Sep 22 23:15:12.839 INFO [0] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }
45638 Sep 22 23:15:12.839 INFO [1] received reconcile message
45639 Sep 22 23:15:12.839 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45640 Sep 22 23:15:12.839 INFO [1] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }
45641 Sep 22 23:15:12.839 INFO [2] received reconcile message
45642 Sep 22 23:15:12.839 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(471), op: ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45643 Sep 22 23:15:12.839 INFO [2] client ExtentReopen { repair_id: ReconciliationId(471), extent_id: 173 }
45644 Sep 22 23:15:12.839 DEBG 471 Reopen extent 173
45645 Sep 22 23:15:12.840 DEBG 471 Reopen extent 173
45646 Sep 22 23:15:12.841 DEBG 471 Reopen extent 173
45647 Sep 22 23:15:12.841 DEBG [2] It's time to notify for 471
45648 Sep 22 23:15:12.841 INFO Completion from [2] id:471 status:true
45649 Sep 22 23:15:12.841 INFO [472/752] Repair commands completed
45650 Sep 22 23:15:12.841 INFO Pop front: ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45651 Sep 22 23:15:12.841 INFO Sent repair work, now wait for resp
45652 Sep 22 23:15:12.841 INFO [0] received reconcile message
45653 Sep 22 23:15:12.841 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45654 Sep 22 23:15:12.841 INFO [0] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45655 Sep 22 23:15:12.841 INFO [1] received reconcile message
45656 Sep 22 23:15:12.841 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45657 Sep 22 23:15:12.841 INFO [1] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45658 Sep 22 23:15:12.841 INFO [2] received reconcile message
45659 Sep 22 23:15:12.841 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(472), op: ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45660 Sep 22 23:15:12.841 INFO [2] client ExtentFlush { repair_id: ReconciliationId(472), extent_id: 180, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45661 Sep 22 23:15:12.842 DEBG 472 Flush extent 180 with f:2 g:2
45662 Sep 22 23:15:12.842 DEBG Flush just extent 180 with f:2 and g:2
45663 Sep 22 23:15:12.842 DEBG [1] It's time to notify for 472
45664 Sep 22 23:15:12.842 INFO Completion from [1] id:472 status:true
45665 Sep 22 23:15:12.842 INFO [473/752] Repair commands completed
45666 Sep 22 23:15:12.842 INFO Pop front: ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }, state: ClientData([New, New, New]) }
45667 Sep 22 23:15:12.842 INFO Sent repair work, now wait for resp
45668 Sep 22 23:15:12.842 INFO [0] received reconcile message
45669 Sep 22 23:15:12.842 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }, state: ClientData([InProgress, New, New]) }, : downstairs
45670 Sep 22 23:15:12.842 INFO [0] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }
45671 Sep 22 23:15:12.842 INFO [1] received reconcile message
45672 Sep 22 23:15:12.842 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45673 Sep 22 23:15:12.842 INFO [1] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }
45674 Sep 22 23:15:12.842 INFO [2] received reconcile message
45675 Sep 22 23:15:12.842 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(473), op: ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45676 Sep 22 23:15:12.842 INFO [2] client ExtentClose { repair_id: ReconciliationId(473), extent_id: 180 }
45677 Sep 22 23:15:12.842 DEBG 473 Close extent 180
45678 Sep 22 23:15:12.842 DEBG 473 Close extent 180
45679 Sep 22 23:15:12.843 DEBG 473 Close extent 180
45680 Sep 22 23:15:12.843 DEBG [2] It's time to notify for 473
45681 Sep 22 23:15:12.843 INFO Completion from [2] id:473 status:true
45682 Sep 22 23:15:12.843 INFO [474/752] Repair commands completed
45683 Sep 22 23:15:12.843 INFO Pop front: ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45684 Sep 22 23:15:12.843 INFO Sent repair work, now wait for resp
45685 Sep 22 23:15:12.843 INFO [0] received reconcile message
45686 Sep 22 23:15:12.843 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45687 Sep 22 23:15:12.843 INFO [0] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45688 Sep 22 23:15:12.843 INFO [0] Sending repair request ReconciliationId(474)
45689 Sep 22 23:15:12.843 INFO [1] received reconcile message
45690 Sep 22 23:15:12.843 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45691 Sep 22 23:15:12.843 INFO [1] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45692 Sep 22 23:15:12.843 INFO [1] No action required ReconciliationId(474)
45693 Sep 22 23:15:12.843 INFO [2] received reconcile message
45694 Sep 22 23:15:12.843 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(474), op: ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45695 Sep 22 23:15:12.844 INFO [2] client ExtentRepair { repair_id: ReconciliationId(474), extent_id: 180, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45696 Sep 22 23:15:12.844 INFO [2] No action required ReconciliationId(474)
45697 Sep 22 23:15:12.844 DEBG 474 Repair extent 180 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45698 Sep 22 23:15:12.844 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B4.copy"
45699 Sep 22 23:15:12.906 INFO accepted connection, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45700 Sep 22 23:15:12.907 TRCE incoming request, uri: /extent/180/files, method: GET, req_id: 68f118d7-9574-44dc-be8f-cbdfb6437e23, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45701 Sep 22 23:15:12.907 INFO request completed, latency_us: 207, response_code: 200, uri: /extent/180/files, method: GET, req_id: 68f118d7-9574-44dc-be8f-cbdfb6437e23, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45702 Sep 22 23:15:12.907 INFO eid:180 Found repair files: ["0B4", "0B4.db"]
45703 Sep 22 23:15:12.907 TRCE incoming request, uri: /newextent/180/data, method: GET, req_id: f83e0cf7-3e23-4864-a6d5-3c097fe6554e, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45704 Sep 22 23:15:12.908 INFO request completed, latency_us: 309, response_code: 200, uri: /newextent/180/data, method: GET, req_id: f83e0cf7-3e23-4864-a6d5-3c097fe6554e, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45705 Sep 22 23:15:12.913 TRCE incoming request, uri: /newextent/180/db, method: GET, req_id: c1fc9cea-1a9d-43c6-9713-dcba7603efd1, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45706 Sep 22 23:15:12.913 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/180/db, method: GET, req_id: c1fc9cea-1a9d-43c6-9713-dcba7603efd1, remote_addr: 127.0.0.1:45527, local_addr: 127.0.0.1:46213, task: repair
45707 Sep 22 23:15:12.914 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B4.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B4.replace"
45708 Sep 22 23:15:12.914 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45709 Sep 22 23:15:12.915 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B4.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45710 Sep 22 23:15:12.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B4"
45711 Sep 22 23:15:12.915 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B4.db"
45712 Sep 22 23:15:12.916 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45713 Sep 22 23:15:12.916 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B4.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B4.completed"
45714 Sep 22 23:15:12.916 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45715 Sep 22 23:15:12.916 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45716 Sep 22 23:15:12.916 DEBG [0] It's time to notify for 474
45717 Sep 22 23:15:12.916 INFO Completion from [0] id:474 status:true
45718 Sep 22 23:15:12.916 INFO [475/752] Repair commands completed
45719 Sep 22 23:15:12.916 INFO Pop front: ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }, state: ClientData([New, New, New]) }
45720 Sep 22 23:15:12.916 INFO Sent repair work, now wait for resp
45721 Sep 22 23:15:12.916 INFO [0] received reconcile message
45722 Sep 22 23:15:12.916 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }, state: ClientData([InProgress, New, New]) }, : downstairs
45723 Sep 22 23:15:12.916 INFO [0] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }
45724 Sep 22 23:15:12.916 INFO [1] received reconcile message
45725 Sep 22 23:15:12.916 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45726 Sep 22 23:15:12.916 INFO [1] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }
45727 Sep 22 23:15:12.916 INFO [2] received reconcile message
45728 Sep 22 23:15:12.916 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(475), op: ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45729 Sep 22 23:15:12.916 INFO [2] client ExtentReopen { repair_id: ReconciliationId(475), extent_id: 180 }
45730 Sep 22 23:15:12.916 DEBG 475 Reopen extent 180
45731 Sep 22 23:15:12.917 DEBG 475 Reopen extent 180
45732 Sep 22 23:15:12.917 DEBG 475 Reopen extent 180
45733 Sep 22 23:15:12.918 DEBG [2] It's time to notify for 475
45734 Sep 22 23:15:12.918 INFO Completion from [2] id:475 status:true
45735 Sep 22 23:15:12.918 INFO [476/752] Repair commands completed
45736 Sep 22 23:15:12.918 INFO Pop front: ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45737 Sep 22 23:15:12.918 INFO Sent repair work, now wait for resp
45738 Sep 22 23:15:12.918 INFO [0] received reconcile message
45739 Sep 22 23:15:12.918 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45740 Sep 22 23:15:12.918 INFO [0] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45741 Sep 22 23:15:12.918 INFO [1] received reconcile message
45742 Sep 22 23:15:12.918 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45743 Sep 22 23:15:12.918 INFO [1] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45744 Sep 22 23:15:12.918 INFO [2] received reconcile message
45745 Sep 22 23:15:12.918 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(476), op: ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45746 Sep 22 23:15:12.918 INFO [2] client ExtentFlush { repair_id: ReconciliationId(476), extent_id: 184, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45747 Sep 22 23:15:12.919 DEBG 476 Flush extent 184 with f:2 g:2
45748 Sep 22 23:15:12.919 DEBG Flush just extent 184 with f:2 and g:2
45749 Sep 22 23:15:12.919 DEBG [1] It's time to notify for 476
45750 Sep 22 23:15:12.919 INFO Completion from [1] id:476 status:true
45751 Sep 22 23:15:12.919 INFO [477/752] Repair commands completed
45752 Sep 22 23:15:12.919 INFO Pop front: ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }, state: ClientData([New, New, New]) }
45753 Sep 22 23:15:12.919 INFO Sent repair work, now wait for resp
45754 Sep 22 23:15:12.919 INFO [0] received reconcile message
45755 Sep 22 23:15:12.919 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }, state: ClientData([InProgress, New, New]) }, : downstairs
45756 Sep 22 23:15:12.919 INFO [0] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }
45757 Sep 22 23:15:12.919 INFO [1] received reconcile message
45758 Sep 22 23:15:12.919 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45759 Sep 22 23:15:12.919 INFO [1] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }
45760 Sep 22 23:15:12.919 INFO [2] received reconcile message
45761 Sep 22 23:15:12.919 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(477), op: ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45762 Sep 22 23:15:12.919 INFO [2] client ExtentClose { repair_id: ReconciliationId(477), extent_id: 184 }
45763 Sep 22 23:15:12.919 DEBG 477 Close extent 184
45764 Sep 22 23:15:12.919 DEBG 477 Close extent 184
45765 Sep 22 23:15:12.920 DEBG 477 Close extent 184
45766 Sep 22 23:15:12.920 DEBG [2] It's time to notify for 477
45767 Sep 22 23:15:12.920 INFO Completion from [2] id:477 status:true
45768 Sep 22 23:15:12.920 INFO [478/752] Repair commands completed
45769 Sep 22 23:15:12.920 INFO Pop front: ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45770 Sep 22 23:15:12.920 INFO Sent repair work, now wait for resp
45771 Sep 22 23:15:12.920 INFO [0] received reconcile message
45772 Sep 22 23:15:12.920 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45773 Sep 22 23:15:12.920 INFO [0] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45774 Sep 22 23:15:12.920 INFO [0] Sending repair request ReconciliationId(478)
45775 Sep 22 23:15:12.920 INFO [1] received reconcile message
45776 Sep 22 23:15:12.920 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45777 Sep 22 23:15:12.920 INFO [1] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45778 Sep 22 23:15:12.920 INFO [1] No action required ReconciliationId(478)
45779 Sep 22 23:15:12.920 INFO [2] received reconcile message
45780 Sep 22 23:15:12.920 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(478), op: ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45781 Sep 22 23:15:12.920 INFO [2] client ExtentRepair { repair_id: ReconciliationId(478), extent_id: 184, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45782 Sep 22 23:15:12.921 INFO [2] No action required ReconciliationId(478)
45783 Sep 22 23:15:12.921 DEBG 478 Repair extent 184 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45784 Sep 22 23:15:12.921 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B8.copy"
45785 Sep 22 23:15:12.983 INFO accepted connection, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45786 Sep 22 23:15:12.983 TRCE incoming request, uri: /extent/184/files, method: GET, req_id: 9e95320a-36f2-4564-bc77-385b136ed18b, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45787 Sep 22 23:15:12.983 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/184/files, method: GET, req_id: 9e95320a-36f2-4564-bc77-385b136ed18b, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45788 Sep 22 23:15:12.983 INFO eid:184 Found repair files: ["0B8", "0B8.db"]
45789 Sep 22 23:15:12.984 TRCE incoming request, uri: /newextent/184/data, method: GET, req_id: 304ec717-f476-4fc9-ac08-9f32fea1f32f, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45790 Sep 22 23:15:12.984 INFO request completed, latency_us: 322, response_code: 200, uri: /newextent/184/data, method: GET, req_id: 304ec717-f476-4fc9-ac08-9f32fea1f32f, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45791 Sep 22 23:15:12.989 TRCE incoming request, uri: /newextent/184/db, method: GET, req_id: bb731711-2ba2-4595-9ef5-69cd5bc7e36c, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45792 Sep 22 23:15:12.989 INFO request completed, latency_us: 309, response_code: 200, uri: /newextent/184/db, method: GET, req_id: bb731711-2ba2-4595-9ef5-69cd5bc7e36c, remote_addr: 127.0.0.1:49065, local_addr: 127.0.0.1:46213, task: repair
45793 Sep 22 23:15:12.990 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B8.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B8.replace"
45794 Sep 22 23:15:12.991 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45795 Sep 22 23:15:12.991 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B8.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45796 Sep 22 23:15:12.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B8"
45797 Sep 22 23:15:12.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B8.db"
45798 Sep 22 23:15:12.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45799 Sep 22 23:15:12.992 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B8.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B8.completed"
45800 Sep 22 23:15:12.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45801 Sep 22 23:15:12.992 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45802 Sep 22 23:15:12.992 DEBG [0] It's time to notify for 478
45803 Sep 22 23:15:12.992 INFO Completion from [0] id:478 status:true
45804 Sep 22 23:15:12.992 INFO [479/752] Repair commands completed
45805 Sep 22 23:15:12.992 INFO Pop front: ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }, state: ClientData([New, New, New]) }
45806 Sep 22 23:15:12.992 INFO Sent repair work, now wait for resp
45807 Sep 22 23:15:12.992 INFO [0] received reconcile message
45808 Sep 22 23:15:12.992 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }, state: ClientData([InProgress, New, New]) }, : downstairs
45809 Sep 22 23:15:12.992 INFO [0] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }
45810 Sep 22 23:15:12.992 INFO [1] received reconcile message
45811 Sep 22 23:15:12.992 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45812 Sep 22 23:15:12.992 INFO [1] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }
45813 Sep 22 23:15:12.992 INFO [2] received reconcile message
45814 Sep 22 23:15:12.992 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(479), op: ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45815 Sep 22 23:15:12.992 INFO [2] client ExtentReopen { repair_id: ReconciliationId(479), extent_id: 184 }
45816 Sep 22 23:15:12.993 DEBG 479 Reopen extent 184
45817 Sep 22 23:15:12.993 DEBG 479 Reopen extent 184
45818 Sep 22 23:15:12.994 DEBG 479 Reopen extent 184
45819 Sep 22 23:15:12.994 DEBG [2] It's time to notify for 479
45820 Sep 22 23:15:12.994 INFO Completion from [2] id:479 status:true
45821 Sep 22 23:15:12.994 INFO [480/752] Repair commands completed
45822 Sep 22 23:15:12.994 INFO Pop front: ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45823 Sep 22 23:15:12.994 INFO Sent repair work, now wait for resp
45824 Sep 22 23:15:12.994 INFO [0] received reconcile message
45825 Sep 22 23:15:12.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45826 Sep 22 23:15:12.995 INFO [0] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45827 Sep 22 23:15:12.995 INFO [1] received reconcile message
45828 Sep 22 23:15:12.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45829 Sep 22 23:15:12.995 INFO [1] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45830 Sep 22 23:15:12.995 INFO [2] received reconcile message
45831 Sep 22 23:15:12.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(480), op: ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45832 Sep 22 23:15:12.995 INFO [2] client ExtentFlush { repair_id: ReconciliationId(480), extent_id: 24, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45833 Sep 22 23:15:12.995 DEBG 480 Flush extent 24 with f:2 g:2
45834 Sep 22 23:15:12.995 DEBG Flush just extent 24 with f:2 and g:2
45835 Sep 22 23:15:12.995 DEBG [1] It's time to notify for 480
45836 Sep 22 23:15:12.995 INFO Completion from [1] id:480 status:true
45837 Sep 22 23:15:12.995 INFO [481/752] Repair commands completed
45838 Sep 22 23:15:12.995 INFO Pop front: ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }, state: ClientData([New, New, New]) }
45839 Sep 22 23:15:12.995 INFO Sent repair work, now wait for resp
45840 Sep 22 23:15:12.995 INFO [0] received reconcile message
45841 Sep 22 23:15:12.995 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }, state: ClientData([InProgress, New, New]) }, : downstairs
45842 Sep 22 23:15:12.995 INFO [0] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }
45843 Sep 22 23:15:12.995 INFO [1] received reconcile message
45844 Sep 22 23:15:12.995 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45845 Sep 22 23:15:12.995 INFO [1] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }
45846 Sep 22 23:15:12.995 INFO [2] received reconcile message
45847 Sep 22 23:15:12.995 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(481), op: ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45848 Sep 22 23:15:12.995 INFO [2] client ExtentClose { repair_id: ReconciliationId(481), extent_id: 24 }
45849 Sep 22 23:15:12.995 DEBG 481 Close extent 24
45850 Sep 22 23:15:12.996 DEBG 481 Close extent 24
45851 Sep 22 23:15:12.996 DEBG 481 Close extent 24
45852 Sep 22 23:15:12.996 DEBG [2] It's time to notify for 481
45853 Sep 22 23:15:12.996 INFO Completion from [2] id:481 status:true
45854 Sep 22 23:15:12.996 INFO [482/752] Repair commands completed
45855 Sep 22 23:15:12.996 INFO Pop front: ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45856 Sep 22 23:15:12.996 INFO Sent repair work, now wait for resp
45857 Sep 22 23:15:12.996 INFO [0] received reconcile message
45858 Sep 22 23:15:12.997 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45859 Sep 22 23:15:12.997 INFO [0] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45860 Sep 22 23:15:12.997 INFO [0] Sending repair request ReconciliationId(482)
45861 Sep 22 23:15:12.997 INFO [1] received reconcile message
45862 Sep 22 23:15:12.997 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45863 Sep 22 23:15:12.997 INFO [1] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45864 Sep 22 23:15:12.997 INFO [1] No action required ReconciliationId(482)
45865 Sep 22 23:15:12.997 INFO [2] received reconcile message
45866 Sep 22 23:15:12.997 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(482), op: ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45867 Sep 22 23:15:12.997 INFO [2] client ExtentRepair { repair_id: ReconciliationId(482), extent_id: 24, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45868 Sep 22 23:15:12.997 INFO [2] No action required ReconciliationId(482)
45869 Sep 22 23:15:12.997 DEBG 482 Repair extent 24 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45870 Sep 22 23:15:12.997 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/018.copy"
45871 Sep 22 23:15:13.061 INFO accepted connection, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45872 Sep 22 23:15:13.061 TRCE incoming request, uri: /extent/24/files, method: GET, req_id: 03f43fcc-c5a7-48f3-a713-af38666b2052, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45873 Sep 22 23:15:13.061 INFO request completed, latency_us: 276, response_code: 200, uri: /extent/24/files, method: GET, req_id: 03f43fcc-c5a7-48f3-a713-af38666b2052, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45874 Sep 22 23:15:13.061 INFO eid:24 Found repair files: ["018", "018.db"]
45875 Sep 22 23:15:13.062 TRCE incoming request, uri: /newextent/24/data, method: GET, req_id: 3380397b-cc84-4cb8-b99c-1e3a7a53dac2, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45876 Sep 22 23:15:13.062 INFO request completed, latency_us: 308, response_code: 200, uri: /newextent/24/data, method: GET, req_id: 3380397b-cc84-4cb8-b99c-1e3a7a53dac2, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45877 Sep 22 23:15:13.067 TRCE incoming request, uri: /newextent/24/db, method: GET, req_id: 767ff906-7ab6-461e-bfd8-60e97219d14d, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45878 Sep 22 23:15:13.068 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/24/db, method: GET, req_id: 767ff906-7ab6-461e-bfd8-60e97219d14d, remote_addr: 127.0.0.1:59076, local_addr: 127.0.0.1:46213, task: repair
45879 Sep 22 23:15:13.069 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/018.copy" to "/tmp/downstairs-vrx8aK6L/00/000/018.replace"
45880 Sep 22 23:15:13.069 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45881 Sep 22 23:15:13.070 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/018.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45882 Sep 22 23:15:13.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/018"
45883 Sep 22 23:15:13.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/018.db"
45884 Sep 22 23:15:13.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45885 Sep 22 23:15:13.070 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/018.replace" to "/tmp/downstairs-vrx8aK6L/00/000/018.completed"
45886 Sep 22 23:15:13.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45887 Sep 22 23:15:13.070 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45888 Sep 22 23:15:13.070 DEBG [0] It's time to notify for 482
45889 Sep 22 23:15:13.070 INFO Completion from [0] id:482 status:true
45890 Sep 22 23:15:13.070 INFO [483/752] Repair commands completed
45891 Sep 22 23:15:13.070 INFO Pop front: ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }, state: ClientData([New, New, New]) }
45892 Sep 22 23:15:13.071 INFO Sent repair work, now wait for resp
45893 Sep 22 23:15:13.071 INFO [0] received reconcile message
45894 Sep 22 23:15:13.071 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }, state: ClientData([InProgress, New, New]) }, : downstairs
45895 Sep 22 23:15:13.071 INFO [0] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }
45896 Sep 22 23:15:13.071 INFO [1] received reconcile message
45897 Sep 22 23:15:13.071 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45898 Sep 22 23:15:13.071 INFO [1] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }
45899 Sep 22 23:15:13.071 INFO [2] received reconcile message
45900 Sep 22 23:15:13.071 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(483), op: ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45901 Sep 22 23:15:13.071 INFO [2] client ExtentReopen { repair_id: ReconciliationId(483), extent_id: 24 }
45902 Sep 22 23:15:13.071 DEBG 483 Reopen extent 24
45903 Sep 22 23:15:13.072 DEBG 483 Reopen extent 24
45904 Sep 22 23:15:13.072 DEBG 483 Reopen extent 24
45905 Sep 22 23:15:13.073 DEBG [2] It's time to notify for 483
45906 Sep 22 23:15:13.073 INFO Completion from [2] id:483 status:true
45907 Sep 22 23:15:13.073 INFO [484/752] Repair commands completed
45908 Sep 22 23:15:13.073 INFO Pop front: ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
45909 Sep 22 23:15:13.073 INFO Sent repair work, now wait for resp
45910 Sep 22 23:15:13.073 INFO [0] received reconcile message
45911 Sep 22 23:15:13.073 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
45912 Sep 22 23:15:13.073 INFO [0] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45913 Sep 22 23:15:13.073 INFO [1] received reconcile message
45914 Sep 22 23:15:13.073 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
45915 Sep 22 23:15:13.073 INFO [1] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45916 Sep 22 23:15:13.073 INFO [2] received reconcile message
45917 Sep 22 23:15:13.073 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(484), op: ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
45918 Sep 22 23:15:13.073 INFO [2] client ExtentFlush { repair_id: ReconciliationId(484), extent_id: 50, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
45919 Sep 22 23:15:13.073 DEBG 484 Flush extent 50 with f:2 g:2
45920 Sep 22 23:15:13.073 DEBG Flush just extent 50 with f:2 and g:2
45921 Sep 22 23:15:13.073 DEBG [1] It's time to notify for 484
45922 Sep 22 23:15:13.074 INFO Completion from [1] id:484 status:true
45923 Sep 22 23:15:13.074 INFO [485/752] Repair commands completed
45924 Sep 22 23:15:13.074 INFO Pop front: ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }, state: ClientData([New, New, New]) }
45925 Sep 22 23:15:13.074 INFO Sent repair work, now wait for resp
45926 Sep 22 23:15:13.074 INFO [0] received reconcile message
45927 Sep 22 23:15:13.074 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }, state: ClientData([InProgress, New, New]) }, : downstairs
45928 Sep 22 23:15:13.074 INFO [0] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }
45929 Sep 22 23:15:13.074 INFO [1] received reconcile message
45930 Sep 22 23:15:13.074 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45931 Sep 22 23:15:13.074 INFO [1] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }
45932 Sep 22 23:15:13.074 INFO [2] received reconcile message
45933 Sep 22 23:15:13.074 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(485), op: ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45934 Sep 22 23:15:13.074 INFO [2] client ExtentClose { repair_id: ReconciliationId(485), extent_id: 50 }
45935 Sep 22 23:15:13.074 DEBG 485 Close extent 50
45936 Sep 22 23:15:13.074 DEBG 485 Close extent 50
45937 Sep 22 23:15:13.075 DEBG 485 Close extent 50
45938 Sep 22 23:15:13.075 DEBG [2] It's time to notify for 485
45939 Sep 22 23:15:13.075 INFO Completion from [2] id:485 status:true
45940 Sep 22 23:15:13.075 INFO [486/752] Repair commands completed
45941 Sep 22 23:15:13.075 INFO Pop front: ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
45942 Sep 22 23:15:13.075 INFO Sent repair work, now wait for resp
45943 Sep 22 23:15:13.075 INFO [0] received reconcile message
45944 Sep 22 23:15:13.075 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
45945 Sep 22 23:15:13.075 INFO [0] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45946 Sep 22 23:15:13.075 INFO [0] Sending repair request ReconciliationId(486)
45947 Sep 22 23:15:13.075 INFO [1] received reconcile message
45948 Sep 22 23:15:13.075 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45949 Sep 22 23:15:13.075 INFO [1] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45950 Sep 22 23:15:13.075 INFO [1] No action required ReconciliationId(486)
45951 Sep 22 23:15:13.075 INFO [2] received reconcile message
45952 Sep 22 23:15:13.075 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(486), op: ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
45953 Sep 22 23:15:13.075 INFO [2] client ExtentRepair { repair_id: ReconciliationId(486), extent_id: 50, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
45954 Sep 22 23:15:13.075 INFO [2] No action required ReconciliationId(486)
45955 Sep 22 23:15:13.075 DEBG 486 Repair extent 50 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
45956 Sep 22 23:15:13.075 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/032.copy"
45957 Sep 22 23:15:13.091 INFO [lossy] sleeping 1 second
45958 Sep 22 23:15:13.091 WARN returning error on flush!
45959 Sep 22 23:15:13.091 DEBG Flush :1085 extent_limit None deps:[JobId(1084)] res:false f:32 g:1
45960 Sep 22 23:15:13.091 DEBG Flush :1085 extent_limit None deps:[JobId(1084)] res:true f:32 g:1
45961 Sep 22 23:15:13.091 INFO [lossy] sleeping 1 second
45962 Sep 22 23:15:13.091 ERRO [0] job id 1085 saw error GenericError("test error")
45963 Sep 22 23:15:13.091 DEBG up_ds_listen was notified
45964 Sep 22 23:15:13.091 DEBG up_ds_listen process 1085
45965 Sep 22 23:15:13.091 DEBG [A] ack job 1085:86, : downstairs
45966 Sep 22 23:15:13.092 DEBG up_ds_listen checked 1 jobs, back to waiting
45967 Sep 22 23:15:13.140 INFO accepted connection, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45968 Sep 22 23:15:13.141 TRCE incoming request, uri: /extent/50/files, method: GET, req_id: 1ce7639f-da64-4f3c-b9bb-24c6bd72d2bc, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45969 Sep 22 23:15:13.141 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/50/files, method: GET, req_id: 1ce7639f-da64-4f3c-b9bb-24c6bd72d2bc, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45970 Sep 22 23:15:13.141 INFO eid:50 Found repair files: ["032", "032.db"]
45971 Sep 22 23:15:13.141 TRCE incoming request, uri: /newextent/50/data, method: GET, req_id: 3a65417e-9019-4ca3-90b3-93681d4ea9af, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45972 Sep 22 23:15:13.142 INFO request completed, latency_us: 317, response_code: 200, uri: /newextent/50/data, method: GET, req_id: 3a65417e-9019-4ca3-90b3-93681d4ea9af, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45973 Sep 22 23:15:13.147 TRCE incoming request, uri: /newextent/50/db, method: GET, req_id: de912fa8-ad69-4f7c-95e4-62e9a35567e7, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45974 Sep 22 23:15:13.147 INFO request completed, latency_us: 310, response_code: 200, uri: /newextent/50/db, method: GET, req_id: de912fa8-ad69-4f7c-95e4-62e9a35567e7, remote_addr: 127.0.0.1:43150, local_addr: 127.0.0.1:46213, task: repair
45975 Sep 22 23:15:13.148 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/032.copy" to "/tmp/downstairs-vrx8aK6L/00/000/032.replace"
45976 Sep 22 23:15:13.148 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45977 Sep 22 23:15:13.149 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/032.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
45978 Sep 22 23:15:13.150 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/032"
45979 Sep 22 23:15:13.150 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/032.db"
45980 Sep 22 23:15:13.150 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45981 Sep 22 23:15:13.150 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/032.replace" to "/tmp/downstairs-vrx8aK6L/00/000/032.completed"
45982 Sep 22 23:15:13.150 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45983 Sep 22 23:15:13.150 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
45984 Sep 22 23:15:13.150 DEBG [0] It's time to notify for 486
45985 Sep 22 23:15:13.150 INFO Completion from [0] id:486 status:true
45986 Sep 22 23:15:13.150 INFO [487/752] Repair commands completed
45987 Sep 22 23:15:13.150 INFO Pop front: ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }, state: ClientData([New, New, New]) }
45988 Sep 22 23:15:13.150 INFO Sent repair work, now wait for resp
45989 Sep 22 23:15:13.150 INFO [0] received reconcile message
45990 Sep 22 23:15:13.150 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }, state: ClientData([InProgress, New, New]) }, : downstairs
45991 Sep 22 23:15:13.150 INFO [0] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }
45992 Sep 22 23:15:13.150 INFO [1] received reconcile message
45993 Sep 22 23:15:13.150 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
45994 Sep 22 23:15:13.150 INFO [1] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }
45995 Sep 22 23:15:13.150 INFO [2] received reconcile message
45996 Sep 22 23:15:13.150 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(487), op: ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
45997 Sep 22 23:15:13.150 INFO [2] client ExtentReopen { repair_id: ReconciliationId(487), extent_id: 50 }
45998 Sep 22 23:15:13.151 DEBG 487 Reopen extent 50
45999 Sep 22 23:15:13.151 DEBG 487 Reopen extent 50
46000 Sep 22 23:15:13.152 DEBG 487 Reopen extent 50
46001 Sep 22 23:15:13.152 DEBG [2] It's time to notify for 487
46002 Sep 22 23:15:13.152 INFO Completion from [2] id:487 status:true
46003 Sep 22 23:15:13.152 INFO [488/752] Repair commands completed
46004 Sep 22 23:15:13.152 INFO Pop front: ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46005 Sep 22 23:15:13.152 INFO Sent repair work, now wait for resp
46006 Sep 22 23:15:13.152 INFO [0] received reconcile message
46007 Sep 22 23:15:13.152 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46008 Sep 22 23:15:13.153 INFO [0] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46009 Sep 22 23:15:13.153 INFO [1] received reconcile message
46010 Sep 22 23:15:13.153 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46011 Sep 22 23:15:13.153 INFO [1] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46012 Sep 22 23:15:13.153 INFO [2] received reconcile message
46013 Sep 22 23:15:13.153 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(488), op: ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46014 Sep 22 23:15:13.153 INFO [2] client ExtentFlush { repair_id: ReconciliationId(488), extent_id: 117, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46015 Sep 22 23:15:13.153 DEBG 488 Flush extent 117 with f:2 g:2
46016 Sep 22 23:15:13.153 DEBG Flush just extent 117 with f:2 and g:2
46017 Sep 22 23:15:13.153 DEBG [1] It's time to notify for 488
46018 Sep 22 23:15:13.153 INFO Completion from [1] id:488 status:true
46019 Sep 22 23:15:13.153 INFO [489/752] Repair commands completed
46020 Sep 22 23:15:13.153 INFO Pop front: ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }, state: ClientData([New, New, New]) }
46021 Sep 22 23:15:13.153 INFO Sent repair work, now wait for resp
46022 Sep 22 23:15:13.153 INFO [0] received reconcile message
46023 Sep 22 23:15:13.153 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }, state: ClientData([InProgress, New, New]) }, : downstairs
46024 Sep 22 23:15:13.153 INFO [0] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }
46025 Sep 22 23:15:13.153 INFO [1] received reconcile message
46026 Sep 22 23:15:13.153 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46027 Sep 22 23:15:13.153 INFO [1] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }
46028 Sep 22 23:15:13.153 INFO [2] received reconcile message
46029 Sep 22 23:15:13.153 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(489), op: ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46030 Sep 22 23:15:13.153 INFO [2] client ExtentClose { repair_id: ReconciliationId(489), extent_id: 117 }
46031 Sep 22 23:15:13.153 DEBG 489 Close extent 117
46032 Sep 22 23:15:13.154 DEBG 489 Close extent 117
46033 Sep 22 23:15:13.154 DEBG 489 Close extent 117
46034 Sep 22 23:15:13.154 DEBG [2] It's time to notify for 489
46035 Sep 22 23:15:13.154 INFO Completion from [2] id:489 status:true
46036 Sep 22 23:15:13.154 INFO [490/752] Repair commands completed
46037 Sep 22 23:15:13.154 INFO Pop front: ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46038 Sep 22 23:15:13.154 INFO Sent repair work, now wait for resp
46039 Sep 22 23:15:13.154 INFO [0] received reconcile message
46040 Sep 22 23:15:13.154 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46041 Sep 22 23:15:13.154 INFO [0] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46042 Sep 22 23:15:13.154 INFO [0] Sending repair request ReconciliationId(490)
46043 Sep 22 23:15:13.155 INFO [1] received reconcile message
46044 Sep 22 23:15:13.155 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46045 Sep 22 23:15:13.155 INFO [1] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46046 Sep 22 23:15:13.155 INFO [1] No action required ReconciliationId(490)
46047 Sep 22 23:15:13.155 INFO [2] received reconcile message
46048 Sep 22 23:15:13.155 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(490), op: ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46049 Sep 22 23:15:13.155 INFO [2] client ExtentRepair { repair_id: ReconciliationId(490), extent_id: 117, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46050 Sep 22 23:15:13.155 INFO [2] No action required ReconciliationId(490)
46051 Sep 22 23:15:13.155 DEBG 490 Repair extent 117 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46052 Sep 22 23:15:13.155 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/075.copy"
46053 Sep 22 23:15:13.219 INFO accepted connection, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46054 Sep 22 23:15:13.219 TRCE incoming request, uri: /extent/117/files, method: GET, req_id: 6dda8120-96a8-4a28-8a35-6ee12d48f80e, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46055 Sep 22 23:15:13.219 INFO request completed, latency_us: 233, response_code: 200, uri: /extent/117/files, method: GET, req_id: 6dda8120-96a8-4a28-8a35-6ee12d48f80e, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46056 Sep 22 23:15:13.219 INFO eid:117 Found repair files: ["075", "075.db"]
46057 Sep 22 23:15:13.220 TRCE incoming request, uri: /newextent/117/data, method: GET, req_id: 1c111a77-052e-4e0c-8b21-43683b2cbb20, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46058 Sep 22 23:15:13.220 INFO request completed, latency_us: 327, response_code: 200, uri: /newextent/117/data, method: GET, req_id: 1c111a77-052e-4e0c-8b21-43683b2cbb20, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46059 Sep 22 23:15:13.225 TRCE incoming request, uri: /newextent/117/db, method: GET, req_id: 855253cc-e39a-4bb5-bfc3-3558f0dd07f9, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46060 Sep 22 23:15:13.225 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/117/db, method: GET, req_id: 855253cc-e39a-4bb5-bfc3-3558f0dd07f9, remote_addr: 127.0.0.1:60516, local_addr: 127.0.0.1:46213, task: repair
46061 Sep 22 23:15:13.227 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/075.copy" to "/tmp/downstairs-vrx8aK6L/00/000/075.replace"
46062 Sep 22 23:15:13.227 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46063 Sep 22 23:15:13.228 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/075.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46064 Sep 22 23:15:13.228 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/075"
46065 Sep 22 23:15:13.228 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/075.db"
46066 Sep 22 23:15:13.228 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46067 Sep 22 23:15:13.228 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/075.replace" to "/tmp/downstairs-vrx8aK6L/00/000/075.completed"
46068 Sep 22 23:15:13.228 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46069 Sep 22 23:15:13.228 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46070 Sep 22 23:15:13.228 DEBG [0] It's time to notify for 490
46071 Sep 22 23:15:13.228 INFO Completion from [0] id:490 status:true
46072 Sep 22 23:15:13.228 INFO [491/752] Repair commands completed
46073 Sep 22 23:15:13.229 INFO Pop front: ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }, state: ClientData([New, New, New]) }
46074 Sep 22 23:15:13.229 INFO Sent repair work, now wait for resp
46075 Sep 22 23:15:13.229 INFO [0] received reconcile message
46076 Sep 22 23:15:13.229 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }, state: ClientData([InProgress, New, New]) }, : downstairs
46077 Sep 22 23:15:13.229 INFO [0] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }
46078 Sep 22 23:15:13.229 INFO [1] received reconcile message
46079 Sep 22 23:15:13.229 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46080 Sep 22 23:15:13.229 INFO [1] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }
46081 Sep 22 23:15:13.229 INFO [2] received reconcile message
46082 Sep 22 23:15:13.229 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(491), op: ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46083 Sep 22 23:15:13.229 INFO [2] client ExtentReopen { repair_id: ReconciliationId(491), extent_id: 117 }
46084 Sep 22 23:15:13.229 DEBG 491 Reopen extent 117
46085 Sep 22 23:15:13.230 DEBG 491 Reopen extent 117
46086 Sep 22 23:15:13.230 DEBG 491 Reopen extent 117
46087 Sep 22 23:15:13.231 DEBG [2] It's time to notify for 491
46088 Sep 22 23:15:13.231 INFO Completion from [2] id:491 status:true
46089 Sep 22 23:15:13.231 INFO [492/752] Repair commands completed
46090 Sep 22 23:15:13.231 INFO Pop front: ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46091 Sep 22 23:15:13.231 INFO Sent repair work, now wait for resp
46092 Sep 22 23:15:13.231 INFO [0] received reconcile message
46093 Sep 22 23:15:13.231 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46094 Sep 22 23:15:13.231 INFO [0] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46095 Sep 22 23:15:13.231 INFO [1] received reconcile message
46096 Sep 22 23:15:13.231 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46097 Sep 22 23:15:13.231 INFO [1] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46098 Sep 22 23:15:13.231 INFO [2] received reconcile message
46099 Sep 22 23:15:13.231 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(492), op: ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46100 Sep 22 23:15:13.231 INFO [2] client ExtentFlush { repair_id: ReconciliationId(492), extent_id: 38, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46101 Sep 22 23:15:13.231 DEBG 492 Flush extent 38 with f:2 g:2
46102 Sep 22 23:15:13.231 DEBG Flush just extent 38 with f:2 and g:2
46103 Sep 22 23:15:13.231 DEBG [1] It's time to notify for 492
46104 Sep 22 23:15:13.231 INFO Completion from [1] id:492 status:true
46105 Sep 22 23:15:13.231 INFO [493/752] Repair commands completed
46106 Sep 22 23:15:13.231 INFO Pop front: ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }, state: ClientData([New, New, New]) }
46107 Sep 22 23:15:13.231 INFO Sent repair work, now wait for resp
46108 Sep 22 23:15:13.231 INFO [0] received reconcile message
46109 Sep 22 23:15:13.231 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }, state: ClientData([InProgress, New, New]) }, : downstairs
46110 Sep 22 23:15:13.231 INFO [0] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }
46111 Sep 22 23:15:13.232 INFO [1] received reconcile message
46112 Sep 22 23:15:13.232 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46113 Sep 22 23:15:13.232 INFO [1] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }
46114 Sep 22 23:15:13.232 INFO [2] received reconcile message
46115 Sep 22 23:15:13.232 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(493), op: ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46116 Sep 22 23:15:13.232 INFO [2] client ExtentClose { repair_id: ReconciliationId(493), extent_id: 38 }
46117 Sep 22 23:15:13.232 DEBG 493 Close extent 38
46118 Sep 22 23:15:13.232 DEBG 493 Close extent 38
46119 Sep 22 23:15:13.232 DEBG 493 Close extent 38
46120 Sep 22 23:15:13.233 DEBG [2] It's time to notify for 493
46121 Sep 22 23:15:13.233 INFO Completion from [2] id:493 status:true
46122 Sep 22 23:15:13.233 INFO [494/752] Repair commands completed
46123 Sep 22 23:15:13.233 INFO Pop front: ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46124 Sep 22 23:15:13.233 INFO Sent repair work, now wait for resp
46125 Sep 22 23:15:13.233 INFO [0] received reconcile message
46126 Sep 22 23:15:13.233 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46127 Sep 22 23:15:13.233 INFO [0] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46128 Sep 22 23:15:13.233 INFO [0] Sending repair request ReconciliationId(494)
46129 Sep 22 23:15:13.233 INFO [1] received reconcile message
46130 Sep 22 23:15:13.233 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46131 Sep 22 23:15:13.233 INFO [1] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46132 Sep 22 23:15:13.233 INFO [1] No action required ReconciliationId(494)
46133 Sep 22 23:15:13.233 INFO [2] received reconcile message
46134 Sep 22 23:15:13.233 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(494), op: ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46135 Sep 22 23:15:13.233 INFO [2] client ExtentRepair { repair_id: ReconciliationId(494), extent_id: 38, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46136 Sep 22 23:15:13.233 INFO [2] No action required ReconciliationId(494)
46137 Sep 22 23:15:13.233 DEBG 494 Repair extent 38 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46138 Sep 22 23:15:13.233 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/026.copy"
46139 Sep 22 23:15:13.297 INFO accepted connection, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46140 Sep 22 23:15:13.297 TRCE incoming request, uri: /extent/38/files, method: GET, req_id: 0af04966-b43f-4b58-b87f-758c3d46058c, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46141 Sep 22 23:15:13.298 INFO request completed, latency_us: 216, response_code: 200, uri: /extent/38/files, method: GET, req_id: 0af04966-b43f-4b58-b87f-758c3d46058c, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46142 Sep 22 23:15:13.298 INFO eid:38 Found repair files: ["026", "026.db"]
46143 Sep 22 23:15:13.298 TRCE incoming request, uri: /newextent/38/data, method: GET, req_id: 3533f4a0-4e02-4bce-a324-d48a9d2112a1, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46144 Sep 22 23:15:13.298 INFO request completed, latency_us: 310, response_code: 200, uri: /newextent/38/data, method: GET, req_id: 3533f4a0-4e02-4bce-a324-d48a9d2112a1, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46145 Sep 22 23:15:13.304 TRCE incoming request, uri: /newextent/38/db, method: GET, req_id: fa4e5ab6-6114-44ce-bee3-d9518c97a760, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46146 Sep 22 23:15:13.304 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/38/db, method: GET, req_id: fa4e5ab6-6114-44ce-bee3-d9518c97a760, remote_addr: 127.0.0.1:64364, local_addr: 127.0.0.1:46213, task: repair
46147 Sep 22 23:15:13.305 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/026.copy" to "/tmp/downstairs-vrx8aK6L/00/000/026.replace"
46148 Sep 22 23:15:13.305 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46149 Sep 22 23:15:13.306 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/026.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46150 Sep 22 23:15:13.306 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/026"
46151 Sep 22 23:15:13.306 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/026.db"
46152 Sep 22 23:15:13.306 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46153 Sep 22 23:15:13.306 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/026.replace" to "/tmp/downstairs-vrx8aK6L/00/000/026.completed"
46154 Sep 22 23:15:13.306 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46155 Sep 22 23:15:13.306 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46156 Sep 22 23:15:13.307 DEBG [0] It's time to notify for 494
46157 Sep 22 23:15:13.307 INFO Completion from [0] id:494 status:true
46158 Sep 22 23:15:13.307 INFO [495/752] Repair commands completed
46159 Sep 22 23:15:13.307 INFO Pop front: ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }, state: ClientData([New, New, New]) }
46160 Sep 22 23:15:13.307 INFO Sent repair work, now wait for resp
46161 Sep 22 23:15:13.307 INFO [0] received reconcile message
46162 Sep 22 23:15:13.307 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }, state: ClientData([InProgress, New, New]) }, : downstairs
46163 Sep 22 23:15:13.307 INFO [0] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }
46164 Sep 22 23:15:13.307 INFO [1] received reconcile message
46165 Sep 22 23:15:13.307 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46166 Sep 22 23:15:13.307 INFO [1] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }
46167 Sep 22 23:15:13.307 INFO [2] received reconcile message
46168 Sep 22 23:15:13.307 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(495), op: ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46169 Sep 22 23:15:13.307 INFO [2] client ExtentReopen { repair_id: ReconciliationId(495), extent_id: 38 }
46170 Sep 22 23:15:13.307 DEBG 495 Reopen extent 38
46171 Sep 22 23:15:13.308 DEBG 495 Reopen extent 38
46172 Sep 22 23:15:13.308 DEBG 495 Reopen extent 38
46173 Sep 22 23:15:13.309 DEBG [2] It's time to notify for 495
46174 Sep 22 23:15:13.309 INFO Completion from [2] id:495 status:true
46175 Sep 22 23:15:13.309 INFO [496/752] Repair commands completed
46176 Sep 22 23:15:13.309 INFO Pop front: ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46177 Sep 22 23:15:13.309 INFO Sent repair work, now wait for resp
46178 Sep 22 23:15:13.309 INFO [0] received reconcile message
46179 Sep 22 23:15:13.309 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46180 Sep 22 23:15:13.309 INFO [0] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46181 Sep 22 23:15:13.309 INFO [1] received reconcile message
46182 Sep 22 23:15:13.309 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46183 Sep 22 23:15:13.309 INFO [1] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46184 Sep 22 23:15:13.309 INFO [2] received reconcile message
46185 Sep 22 23:15:13.309 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(496), op: ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46186 Sep 22 23:15:13.309 INFO [2] client ExtentFlush { repair_id: ReconciliationId(496), extent_id: 129, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46187 Sep 22 23:15:13.309 DEBG 496 Flush extent 129 with f:2 g:2
46188 Sep 22 23:15:13.309 DEBG Flush just extent 129 with f:2 and g:2
46189 Sep 22 23:15:13.310 DEBG [1] It's time to notify for 496
46190 Sep 22 23:15:13.310 INFO Completion from [1] id:496 status:true
46191 Sep 22 23:15:13.310 INFO [497/752] Repair commands completed
46192 Sep 22 23:15:13.310 INFO Pop front: ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }, state: ClientData([New, New, New]) }
46193 Sep 22 23:15:13.310 INFO Sent repair work, now wait for resp
46194 Sep 22 23:15:13.310 INFO [0] received reconcile message
46195 Sep 22 23:15:13.310 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }, state: ClientData([InProgress, New, New]) }, : downstairs
46196 Sep 22 23:15:13.310 INFO [0] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }
46197 Sep 22 23:15:13.310 INFO [1] received reconcile message
46198 Sep 22 23:15:13.310 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46199 Sep 22 23:15:13.310 INFO [1] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }
46200 Sep 22 23:15:13.310 INFO [2] received reconcile message
46201 Sep 22 23:15:13.310 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(497), op: ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46202 Sep 22 23:15:13.310 INFO [2] client ExtentClose { repair_id: ReconciliationId(497), extent_id: 129 }
46203 Sep 22 23:15:13.310 DEBG 497 Close extent 129
46204 Sep 22 23:15:13.310 DEBG 497 Close extent 129
46205 Sep 22 23:15:13.310 DEBG 497 Close extent 129
46206 Sep 22 23:15:13.311 DEBG [2] It's time to notify for 497
46207 Sep 22 23:15:13.311 INFO Completion from [2] id:497 status:true
46208 Sep 22 23:15:13.311 INFO [498/752] Repair commands completed
46209 Sep 22 23:15:13.311 INFO Pop front: ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46210 Sep 22 23:15:13.311 INFO Sent repair work, now wait for resp
46211 Sep 22 23:15:13.311 INFO [0] received reconcile message
46212 Sep 22 23:15:13.311 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46213 Sep 22 23:15:13.311 INFO [0] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46214 Sep 22 23:15:13.311 INFO [0] Sending repair request ReconciliationId(498)
46215 Sep 22 23:15:13.311 INFO [1] received reconcile message
46216 Sep 22 23:15:13.311 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46217 Sep 22 23:15:13.311 INFO [1] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46218 Sep 22 23:15:13.311 INFO [1] No action required ReconciliationId(498)
46219 Sep 22 23:15:13.311 INFO [2] received reconcile message
46220 Sep 22 23:15:13.311 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(498), op: ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46221 Sep 22 23:15:13.311 INFO [2] client ExtentRepair { repair_id: ReconciliationId(498), extent_id: 129, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46222 Sep 22 23:15:13.311 INFO [2] No action required ReconciliationId(498)
46223 Sep 22 23:15:13.311 DEBG 498 Repair extent 129 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46224 Sep 22 23:15:13.311 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/081.copy"
46225 Sep 22 23:15:13.367 INFO [lossy] skipping 1085
46226 Sep 22 23:15:13.367 INFO [lossy] skipping 1086
46227 Sep 22 23:15:13.367 WARN returning error on flush!
46228 Sep 22 23:15:13.367 DEBG Flush :1085 extent_limit None deps:[JobId(1084)] res:false f:32 g:1
46229 Sep 22 23:15:13.367 INFO [lossy] skipping 1086
46230 Sep 22 23:15:13.367 DEBG Flush :1085 extent_limit None deps:[JobId(1084)] res:true f:32 g:1
46231 Sep 22 23:15:13.367 INFO [lossy] skipping 1086
46232 Sep 22 23:15:13.374 DEBG Read :1086 deps:[JobId(1085)] res:true
46233 Sep 22 23:15:13.376 INFO accepted connection, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46234 Sep 22 23:15:13.376 TRCE incoming request, uri: /extent/129/files, method: GET, req_id: aa7072d1-85e4-4181-9432-67e49799d3ff, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46235 Sep 22 23:15:13.376 INFO request completed, latency_us: 228, response_code: 200, uri: /extent/129/files, method: GET, req_id: aa7072d1-85e4-4181-9432-67e49799d3ff, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46236 Sep 22 23:15:13.376 INFO eid:129 Found repair files: ["081", "081.db"]
46237 Sep 22 23:15:13.377 TRCE incoming request, uri: /newextent/129/data, method: GET, req_id: d965f5d0-75fb-41b6-98d3-37ca17fe6325, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46238 Sep 22 23:15:13.377 INFO request completed, latency_us: 280, response_code: 200, uri: /newextent/129/data, method: GET, req_id: d965f5d0-75fb-41b6-98d3-37ca17fe6325, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46239 Sep 22 23:15:13.382 TRCE incoming request, uri: /newextent/129/db, method: GET, req_id: 1d0abd9b-8daf-40e6-b6e5-96c439273276, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46240 Sep 22 23:15:13.383 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/129/db, method: GET, req_id: 1d0abd9b-8daf-40e6-b6e5-96c439273276, remote_addr: 127.0.0.1:49015, local_addr: 127.0.0.1:46213, task: repair
46241 Sep 22 23:15:13.384 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/081.copy" to "/tmp/downstairs-vrx8aK6L/00/000/081.replace"
46242 Sep 22 23:15:13.384 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46243 Sep 22 23:15:13.385 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/081.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46244 Sep 22 23:15:13.385 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/081"
46245 Sep 22 23:15:13.385 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/081.db"
46246 Sep 22 23:15:13.385 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46247 Sep 22 23:15:13.385 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/081.replace" to "/tmp/downstairs-vrx8aK6L/00/000/081.completed"
46248 Sep 22 23:15:13.385 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46249 Sep 22 23:15:13.385 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46250 Sep 22 23:15:13.386 DEBG [0] It's time to notify for 498
46251 Sep 22 23:15:13.386 INFO Completion from [0] id:498 status:true
46252 Sep 22 23:15:13.386 INFO [499/752] Repair commands completed
46253 Sep 22 23:15:13.386 INFO Pop front: ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }, state: ClientData([New, New, New]) }
46254 Sep 22 23:15:13.386 INFO Sent repair work, now wait for resp
46255 Sep 22 23:15:13.386 INFO [0] received reconcile message
46256 Sep 22 23:15:13.386 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }, state: ClientData([InProgress, New, New]) }, : downstairs
46257 Sep 22 23:15:13.386 INFO [0] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }
46258 Sep 22 23:15:13.386 INFO [1] received reconcile message
46259 Sep 22 23:15:13.386 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46260 Sep 22 23:15:13.386 INFO [1] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }
46261 Sep 22 23:15:13.386 INFO [2] received reconcile message
46262 Sep 22 23:15:13.386 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(499), op: ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46263 Sep 22 23:15:13.386 INFO [2] client ExtentReopen { repair_id: ReconciliationId(499), extent_id: 129 }
46264 Sep 22 23:15:13.386 DEBG 499 Reopen extent 129
46265 Sep 22 23:15:13.387 DEBG 499 Reopen extent 129
46266 Sep 22 23:15:13.387 DEBG 499 Reopen extent 129
46267 Sep 22 23:15:13.388 DEBG [2] It's time to notify for 499
46268 Sep 22 23:15:13.388 INFO Completion from [2] id:499 status:true
46269 Sep 22 23:15:13.388 INFO [500/752] Repair commands completed
46270 Sep 22 23:15:13.388 INFO Pop front: ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46271 Sep 22 23:15:13.388 INFO Sent repair work, now wait for resp
46272 Sep 22 23:15:13.388 INFO [0] received reconcile message
46273 Sep 22 23:15:13.388 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46274 Sep 22 23:15:13.388 INFO [0] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46275 Sep 22 23:15:13.388 INFO [1] received reconcile message
46276 Sep 22 23:15:13.388 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46277 Sep 22 23:15:13.388 INFO [1] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46278 Sep 22 23:15:13.388 INFO [2] received reconcile message
46279 Sep 22 23:15:13.388 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(500), op: ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46280 Sep 22 23:15:13.388 INFO [2] client ExtentFlush { repair_id: ReconciliationId(500), extent_id: 153, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46281 Sep 22 23:15:13.388 DEBG 500 Flush extent 153 with f:2 g:2
46282 Sep 22 23:15:13.388 DEBG Flush just extent 153 with f:2 and g:2
46283 Sep 22 23:15:13.388 DEBG [1] It's time to notify for 500
46284 Sep 22 23:15:13.389 INFO Completion from [1] id:500 status:true
46285 Sep 22 23:15:13.389 INFO [501/752] Repair commands completed
46286 Sep 22 23:15:13.389 INFO Pop front: ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }, state: ClientData([New, New, New]) }
46287 Sep 22 23:15:13.389 INFO Sent repair work, now wait for resp
46288 Sep 22 23:15:13.389 INFO [0] received reconcile message
46289 Sep 22 23:15:13.389 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }, state: ClientData([InProgress, New, New]) }, : downstairs
46290 Sep 22 23:15:13.389 INFO [0] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }
46291 Sep 22 23:15:13.389 INFO [1] received reconcile message
46292 Sep 22 23:15:13.389 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46293 Sep 22 23:15:13.389 INFO [1] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }
46294 Sep 22 23:15:13.389 INFO [2] received reconcile message
46295 Sep 22 23:15:13.389 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(501), op: ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46296 Sep 22 23:15:13.389 INFO [2] client ExtentClose { repair_id: ReconciliationId(501), extent_id: 153 }
46297 Sep 22 23:15:13.389 DEBG 501 Close extent 153
46298 Sep 22 23:15:13.389 DEBG 501 Close extent 153
46299 Sep 22 23:15:13.389 DEBG 501 Close extent 153
46300 Sep 22 23:15:13.390 DEBG [2] It's time to notify for 501
46301 Sep 22 23:15:13.390 INFO Completion from [2] id:501 status:true
46302 Sep 22 23:15:13.390 INFO [502/752] Repair commands completed
46303 Sep 22 23:15:13.390 INFO Pop front: ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46304 Sep 22 23:15:13.390 INFO Sent repair work, now wait for resp
46305 Sep 22 23:15:13.390 INFO [0] received reconcile message
46306 Sep 22 23:15:13.390 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46307 Sep 22 23:15:13.390 INFO [0] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46308 Sep 22 23:15:13.390 INFO [0] Sending repair request ReconciliationId(502)
46309 Sep 22 23:15:13.390 INFO [1] received reconcile message
46310 Sep 22 23:15:13.390 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46311 Sep 22 23:15:13.390 INFO [1] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46312 Sep 22 23:15:13.390 INFO [1] No action required ReconciliationId(502)
46313 Sep 22 23:15:13.390 INFO [2] received reconcile message
46314 Sep 22 23:15:13.390 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(502), op: ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46315 Sep 22 23:15:13.390 INFO [2] client ExtentRepair { repair_id: ReconciliationId(502), extent_id: 153, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46316 Sep 22 23:15:13.390 INFO [2] No action required ReconciliationId(502)
46317 Sep 22 23:15:13.390 DEBG 502 Repair extent 153 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46318 Sep 22 23:15:13.390 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/099.copy"
46319 Sep 22 23:15:13.395 ERRO [1] job id 1085 saw error GenericError("test error")
46320 Sep 22 23:15:13.395 DEBG [rc] retire 1085 clears [JobId(1084), JobId(1085)], : downstairs
46321 Sep 22 23:15:13.397 DEBG Flush :1087 extent_limit None deps:[JobId(1086), JobId(1085)] res:true f:33 g:1
46322 Sep 22 23:15:13.397 INFO [lossy] sleeping 1 second
46323 Sep 22 23:15:13.453 INFO accepted connection, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46324 Sep 22 23:15:13.453 TRCE incoming request, uri: /extent/153/files, method: GET, req_id: dade48af-3ee9-4024-ab10-da947ced5fe4, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46325 Sep 22 23:15:13.454 INFO request completed, latency_us: 268, response_code: 200, uri: /extent/153/files, method: GET, req_id: dade48af-3ee9-4024-ab10-da947ced5fe4, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46326 Sep 22 23:15:13.454 INFO eid:153 Found repair files: ["099", "099.db"]
46327 Sep 22 23:15:13.454 TRCE incoming request, uri: /newextent/153/data, method: GET, req_id: 7f238ae3-c180-42ab-a261-0919986a1706, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46328 Sep 22 23:15:13.455 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/153/data, method: GET, req_id: 7f238ae3-c180-42ab-a261-0919986a1706, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46329 Sep 22 23:15:13.460 TRCE incoming request, uri: /newextent/153/db, method: GET, req_id: 4ac6a6f5-8d96-42fc-a104-f413b6ae8753, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46330 Sep 22 23:15:13.460 INFO request completed, latency_us: 330, response_code: 200, uri: /newextent/153/db, method: GET, req_id: 4ac6a6f5-8d96-42fc-a104-f413b6ae8753, remote_addr: 127.0.0.1:43815, local_addr: 127.0.0.1:46213, task: repair
46331 Sep 22 23:15:13.461 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/099.copy" to "/tmp/downstairs-vrx8aK6L/00/000/099.replace"
46332 Sep 22 23:15:13.461 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46333 Sep 22 23:15:13.463 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/099.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46334 Sep 22 23:15:13.463 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/099"
46335 Sep 22 23:15:13.463 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/099.db"
46336 Sep 22 23:15:13.463 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46337 Sep 22 23:15:13.463 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/099.replace" to "/tmp/downstairs-vrx8aK6L/00/000/099.completed"
46338 Sep 22 23:15:13.463 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46339 Sep 22 23:15:13.463 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46340 Sep 22 23:15:13.464 DEBG [0] It's time to notify for 502
46341 Sep 22 23:15:13.464 INFO Completion from [0] id:502 status:true
46342 Sep 22 23:15:13.464 INFO [503/752] Repair commands completed
46343 Sep 22 23:15:13.464 INFO Pop front: ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }, state: ClientData([New, New, New]) }
46344 Sep 22 23:15:13.464 INFO Sent repair work, now wait for resp
46345 Sep 22 23:15:13.464 INFO [0] received reconcile message
46346 Sep 22 23:15:13.464 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }, state: ClientData([InProgress, New, New]) }, : downstairs
46347 Sep 22 23:15:13.464 INFO [0] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }
46348 Sep 22 23:15:13.464 INFO [1] received reconcile message
46349 Sep 22 23:15:13.464 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46350 Sep 22 23:15:13.464 INFO [1] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }
46351 Sep 22 23:15:13.464 INFO [2] received reconcile message
46352 Sep 22 23:15:13.464 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(503), op: ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46353 Sep 22 23:15:13.464 INFO [2] client ExtentReopen { repair_id: ReconciliationId(503), extent_id: 153 }
46354 Sep 22 23:15:13.464 DEBG 503 Reopen extent 153
46355 Sep 22 23:15:13.465 DEBG 503 Reopen extent 153
46356 Sep 22 23:15:13.466 DEBG 503 Reopen extent 153
46357 Sep 22 23:15:13.466 DEBG [2] It's time to notify for 503
46358 Sep 22 23:15:13.466 INFO Completion from [2] id:503 status:true
46359 Sep 22 23:15:13.466 INFO [504/752] Repair commands completed
46360 Sep 22 23:15:13.466 INFO Pop front: ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46361 Sep 22 23:15:13.466 INFO Sent repair work, now wait for resp
46362 Sep 22 23:15:13.466 INFO [0] received reconcile message
46363 Sep 22 23:15:13.466 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46364 Sep 22 23:15:13.466 INFO [0] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46365 Sep 22 23:15:13.466 INFO [1] received reconcile message
46366 Sep 22 23:15:13.466 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46367 Sep 22 23:15:13.466 INFO [1] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46368 Sep 22 23:15:13.466 INFO [2] received reconcile message
46369 Sep 22 23:15:13.467 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(504), op: ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46370 Sep 22 23:15:13.467 INFO [2] client ExtentFlush { repair_id: ReconciliationId(504), extent_id: 181, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46371 Sep 22 23:15:13.467 DEBG 504 Flush extent 181 with f:2 g:2
46372 Sep 22 23:15:13.467 DEBG Flush just extent 181 with f:2 and g:2
46373 Sep 22 23:15:13.467 DEBG [1] It's time to notify for 504
46374 Sep 22 23:15:13.467 INFO Completion from [1] id:504 status:true
46375 Sep 22 23:15:13.467 INFO [505/752] Repair commands completed
46376 Sep 22 23:15:13.467 INFO Pop front: ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }, state: ClientData([New, New, New]) }
46377 Sep 22 23:15:13.467 INFO Sent repair work, now wait for resp
46378 Sep 22 23:15:13.467 INFO [0] received reconcile message
46379 Sep 22 23:15:13.467 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }, state: ClientData([InProgress, New, New]) }, : downstairs
46380 Sep 22 23:15:13.467 INFO [0] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }
46381 Sep 22 23:15:13.467 INFO [1] received reconcile message
46382 Sep 22 23:15:13.467 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46383 Sep 22 23:15:13.467 INFO [1] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }
46384 Sep 22 23:15:13.467 INFO [2] received reconcile message
46385 Sep 22 23:15:13.467 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(505), op: ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46386 Sep 22 23:15:13.467 INFO [2] client ExtentClose { repair_id: ReconciliationId(505), extent_id: 181 }
46387 Sep 22 23:15:13.467 DEBG 505 Close extent 181
46388 Sep 22 23:15:13.467 DEBG 505 Close extent 181
46389 Sep 22 23:15:13.468 DEBG 505 Close extent 181
46390 Sep 22 23:15:13.468 DEBG [2] It's time to notify for 505
46391 Sep 22 23:15:13.468 INFO Completion from [2] id:505 status:true
46392 Sep 22 23:15:13.468 INFO [506/752] Repair commands completed
46393 Sep 22 23:15:13.468 INFO Pop front: ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46394 Sep 22 23:15:13.468 INFO Sent repair work, now wait for resp
46395 Sep 22 23:15:13.468 INFO [0] received reconcile message
46396 Sep 22 23:15:13.468 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46397 Sep 22 23:15:13.468 INFO [0] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46398 Sep 22 23:15:13.468 INFO [0] Sending repair request ReconciliationId(506)
46399 Sep 22 23:15:13.468 INFO [1] received reconcile message
46400 Sep 22 23:15:13.469 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46401 Sep 22 23:15:13.469 INFO [1] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46402 Sep 22 23:15:13.469 INFO [1] No action required ReconciliationId(506)
46403 Sep 22 23:15:13.469 INFO [2] received reconcile message
46404 Sep 22 23:15:13.469 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(506), op: ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46405 Sep 22 23:15:13.469 INFO [2] client ExtentRepair { repair_id: ReconciliationId(506), extent_id: 181, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46406 Sep 22 23:15:13.469 INFO [2] No action required ReconciliationId(506)
46407 Sep 22 23:15:13.469 DEBG 506 Repair extent 181 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46408 Sep 22 23:15:13.469 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B5.copy"
46409 Sep 22 23:15:13.536 INFO accepted connection, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46410 Sep 22 23:15:13.537 TRCE incoming request, uri: /extent/181/files, method: GET, req_id: 2d7f784b-20a6-4f26-bbdf-41edd85812f2, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46411 Sep 22 23:15:13.537 INFO request completed, latency_us: 285, response_code: 200, uri: /extent/181/files, method: GET, req_id: 2d7f784b-20a6-4f26-bbdf-41edd85812f2, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46412 Sep 22 23:15:13.537 INFO eid:181 Found repair files: ["0B5", "0B5.db"]
46413 Sep 22 23:15:13.538 TRCE incoming request, uri: /newextent/181/data, method: GET, req_id: c525f427-4a1d-407b-aca4-998e2f574484, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46414 Sep 22 23:15:13.538 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/181/data, method: GET, req_id: c525f427-4a1d-407b-aca4-998e2f574484, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46415 Sep 22 23:15:13.543 TRCE incoming request, uri: /newextent/181/db, method: GET, req_id: 5863f496-2e94-4220-b8cb-63c2cff379c5, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46416 Sep 22 23:15:13.543 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/181/db, method: GET, req_id: 5863f496-2e94-4220-b8cb-63c2cff379c5, remote_addr: 127.0.0.1:49266, local_addr: 127.0.0.1:46213, task: repair
46417 Sep 22 23:15:13.545 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B5.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B5.replace"
46418 Sep 22 23:15:13.545 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46419 Sep 22 23:15:13.546 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B5.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46420 Sep 22 23:15:13.546 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B5"
46421 Sep 22 23:15:13.546 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B5.db"
46422 Sep 22 23:15:13.546 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46423 Sep 22 23:15:13.546 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B5.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B5.completed"
46424 Sep 22 23:15:13.546 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46425 Sep 22 23:15:13.546 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46426 Sep 22 23:15:13.547 DEBG [0] It's time to notify for 506
46427 Sep 22 23:15:13.547 INFO Completion from [0] id:506 status:true
46428 Sep 22 23:15:13.547 INFO [507/752] Repair commands completed
46429 Sep 22 23:15:13.547 INFO Pop front: ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }, state: ClientData([New, New, New]) }
46430 Sep 22 23:15:13.547 INFO Sent repair work, now wait for resp
46431 Sep 22 23:15:13.547 INFO [0] received reconcile message
46432 Sep 22 23:15:13.547 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }, state: ClientData([InProgress, New, New]) }, : downstairs
46433 Sep 22 23:15:13.547 INFO [0] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }
46434 Sep 22 23:15:13.547 INFO [1] received reconcile message
46435 Sep 22 23:15:13.547 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46436 Sep 22 23:15:13.547 INFO [1] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }
46437 Sep 22 23:15:13.547 INFO [2] received reconcile message
46438 Sep 22 23:15:13.547 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(507), op: ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46439 Sep 22 23:15:13.547 INFO [2] client ExtentReopen { repair_id: ReconciliationId(507), extent_id: 181 }
46440 Sep 22 23:15:13.547 DEBG 507 Reopen extent 181
46441 Sep 22 23:15:13.548 DEBG 507 Reopen extent 181
46442 Sep 22 23:15:13.549 DEBG 507 Reopen extent 181
46443 Sep 22 23:15:13.549 DEBG [2] It's time to notify for 507
46444 Sep 22 23:15:13.549 INFO Completion from [2] id:507 status:true
46445 Sep 22 23:15:13.549 INFO [508/752] Repair commands completed
46446 Sep 22 23:15:13.549 INFO Pop front: ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46447 Sep 22 23:15:13.549 INFO Sent repair work, now wait for resp
46448 Sep 22 23:15:13.549 INFO [0] received reconcile message
46449 Sep 22 23:15:13.549 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46450 Sep 22 23:15:13.549 INFO [0] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46451 Sep 22 23:15:13.549 INFO [1] received reconcile message
46452 Sep 22 23:15:13.549 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46453 Sep 22 23:15:13.549 INFO [1] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46454 Sep 22 23:15:13.549 INFO [2] received reconcile message
46455 Sep 22 23:15:13.549 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(508), op: ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46456 Sep 22 23:15:13.549 INFO [2] client ExtentFlush { repair_id: ReconciliationId(508), extent_id: 128, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46457 Sep 22 23:15:13.550 DEBG 508 Flush extent 128 with f:2 g:2
46458 Sep 22 23:15:13.550 DEBG Flush just extent 128 with f:2 and g:2
46459 Sep 22 23:15:13.550 DEBG [1] It's time to notify for 508
46460 Sep 22 23:15:13.550 INFO Completion from [1] id:508 status:true
46461 Sep 22 23:15:13.550 INFO [509/752] Repair commands completed
46462 Sep 22 23:15:13.550 INFO Pop front: ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }, state: ClientData([New, New, New]) }
46463 Sep 22 23:15:13.550 INFO Sent repair work, now wait for resp
46464 Sep 22 23:15:13.550 INFO [0] received reconcile message
46465 Sep 22 23:15:13.550 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }, state: ClientData([InProgress, New, New]) }, : downstairs
46466 Sep 22 23:15:13.550 INFO [0] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }
46467 Sep 22 23:15:13.550 INFO [1] received reconcile message
46468 Sep 22 23:15:13.550 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46469 Sep 22 23:15:13.550 INFO [1] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }
46470 Sep 22 23:15:13.550 INFO [2] received reconcile message
46471 Sep 22 23:15:13.550 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(509), op: ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46472 Sep 22 23:15:13.550 INFO [2] client ExtentClose { repair_id: ReconciliationId(509), extent_id: 128 }
46473 Sep 22 23:15:13.550 DEBG 509 Close extent 128
46474 Sep 22 23:15:13.550 DEBG 509 Close extent 128
46475 Sep 22 23:15:13.551 DEBG 509 Close extent 128
46476 Sep 22 23:15:13.551 DEBG [2] It's time to notify for 509
46477 Sep 22 23:15:13.551 INFO Completion from [2] id:509 status:true
46478 Sep 22 23:15:13.551 INFO [510/752] Repair commands completed
46479 Sep 22 23:15:13.551 INFO Pop front: ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46480 Sep 22 23:15:13.551 INFO Sent repair work, now wait for resp
46481 Sep 22 23:15:13.551 INFO [0] received reconcile message
46482 Sep 22 23:15:13.551 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46483 Sep 22 23:15:13.551 INFO [0] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46484 Sep 22 23:15:13.551 INFO [0] Sending repair request ReconciliationId(510)
46485 Sep 22 23:15:13.551 INFO [1] received reconcile message
46486 Sep 22 23:15:13.551 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46487 Sep 22 23:15:13.551 INFO [1] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46488 Sep 22 23:15:13.551 INFO [1] No action required ReconciliationId(510)
46489 Sep 22 23:15:13.552 INFO [2] received reconcile message
46490 Sep 22 23:15:13.552 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(510), op: ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46491 Sep 22 23:15:13.552 INFO [2] client ExtentRepair { repair_id: ReconciliationId(510), extent_id: 128, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46492 Sep 22 23:15:13.552 INFO [2] No action required ReconciliationId(510)
46493 Sep 22 23:15:13.552 DEBG 510 Repair extent 128 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46494 Sep 22 23:15:13.552 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/080.copy"
46495 Sep 22 23:15:13.617 INFO accepted connection, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46496 Sep 22 23:15:13.617 TRCE incoming request, uri: /extent/128/files, method: GET, req_id: 6f5e5739-adc2-48e9-9416-3f1219d8a501, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46497 Sep 22 23:15:13.617 INFO request completed, latency_us: 283, response_code: 200, uri: /extent/128/files, method: GET, req_id: 6f5e5739-adc2-48e9-9416-3f1219d8a501, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46498 Sep 22 23:15:13.618 INFO eid:128 Found repair files: ["080", "080.db"]
46499 Sep 22 23:15:13.618 TRCE incoming request, uri: /newextent/128/data, method: GET, req_id: 3b5af781-8b15-4bc7-8adf-bae66398ca9b, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46500 Sep 22 23:15:13.618 INFO request completed, latency_us: 370, response_code: 200, uri: /newextent/128/data, method: GET, req_id: 3b5af781-8b15-4bc7-8adf-bae66398ca9b, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46501 Sep 22 23:15:13.623 TRCE incoming request, uri: /newextent/128/db, method: GET, req_id: f271051d-0b62-4762-a9ec-76ef0cff07fb, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46502 Sep 22 23:15:13.624 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/128/db, method: GET, req_id: f271051d-0b62-4762-a9ec-76ef0cff07fb, remote_addr: 127.0.0.1:37556, local_addr: 127.0.0.1:46213, task: repair
46503 Sep 22 23:15:13.625 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/080.copy" to "/tmp/downstairs-vrx8aK6L/00/000/080.replace"
46504 Sep 22 23:15:13.625 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46505 Sep 22 23:15:13.626 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/080.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46506 Sep 22 23:15:13.626 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/080"
46507 Sep 22 23:15:13.626 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/080.db"
46508 Sep 22 23:15:13.626 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46509 Sep 22 23:15:13.626 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/080.replace" to "/tmp/downstairs-vrx8aK6L/00/000/080.completed"
46510 Sep 22 23:15:13.626 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46511 Sep 22 23:15:13.627 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46512 Sep 22 23:15:13.627 DEBG [0] It's time to notify for 510
46513 Sep 22 23:15:13.627 INFO Completion from [0] id:510 status:true
46514 Sep 22 23:15:13.627 INFO [511/752] Repair commands completed
46515 Sep 22 23:15:13.627 INFO Pop front: ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }, state: ClientData([New, New, New]) }
46516 Sep 22 23:15:13.627 INFO Sent repair work, now wait for resp
46517 Sep 22 23:15:13.627 INFO [0] received reconcile message
46518 Sep 22 23:15:13.627 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }, state: ClientData([InProgress, New, New]) }, : downstairs
46519 Sep 22 23:15:13.627 INFO [0] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }
46520 Sep 22 23:15:13.627 INFO [1] received reconcile message
46521 Sep 22 23:15:13.627 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46522 Sep 22 23:15:13.627 INFO [1] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }
46523 Sep 22 23:15:13.627 INFO [2] received reconcile message
46524 Sep 22 23:15:13.627 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(511), op: ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46525 Sep 22 23:15:13.627 INFO [2] client ExtentReopen { repair_id: ReconciliationId(511), extent_id: 128 }
46526 Sep 22 23:15:13.627 DEBG 511 Reopen extent 128
46527 Sep 22 23:15:13.628 DEBG 511 Reopen extent 128
46528 Sep 22 23:15:13.629 DEBG 511 Reopen extent 128
46529 Sep 22 23:15:13.629 DEBG [2] It's time to notify for 511
46530 Sep 22 23:15:13.629 INFO Completion from [2] id:511 status:true
46531 Sep 22 23:15:13.629 INFO [512/752] Repair commands completed
46532 Sep 22 23:15:13.629 INFO Pop front: ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46533 Sep 22 23:15:13.629 INFO Sent repair work, now wait for resp
46534 Sep 22 23:15:13.629 INFO [0] received reconcile message
46535 Sep 22 23:15:13.629 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46536 Sep 22 23:15:13.629 INFO [0] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46537 Sep 22 23:15:13.630 INFO [1] received reconcile message
46538 Sep 22 23:15:13.630 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46539 Sep 22 23:15:13.630 INFO [1] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46540 Sep 22 23:15:13.630 INFO [2] received reconcile message
46541 Sep 22 23:15:13.630 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(512), op: ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46542 Sep 22 23:15:13.630 INFO [2] client ExtentFlush { repair_id: ReconciliationId(512), extent_id: 72, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46543 Sep 22 23:15:13.630 DEBG 512 Flush extent 72 with f:2 g:2
46544 Sep 22 23:15:13.630 DEBG Flush just extent 72 with f:2 and g:2
46545 Sep 22 23:15:13.630 DEBG [1] It's time to notify for 512
46546 Sep 22 23:15:13.630 INFO Completion from [1] id:512 status:true
46547 Sep 22 23:15:13.630 INFO [513/752] Repair commands completed
46548 Sep 22 23:15:13.630 INFO Pop front: ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }, state: ClientData([New, New, New]) }
46549 Sep 22 23:15:13.630 INFO Sent repair work, now wait for resp
46550 Sep 22 23:15:13.630 INFO [0] received reconcile message
46551 Sep 22 23:15:13.630 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }, state: ClientData([InProgress, New, New]) }, : downstairs
46552 Sep 22 23:15:13.630 INFO [0] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }
46553 Sep 22 23:15:13.630 INFO [1] received reconcile message
46554 Sep 22 23:15:13.630 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46555 Sep 22 23:15:13.630 INFO [1] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }
46556 Sep 22 23:15:13.630 INFO [2] received reconcile message
46557 Sep 22 23:15:13.630 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(513), op: ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46558 Sep 22 23:15:13.630 INFO [2] client ExtentClose { repair_id: ReconciliationId(513), extent_id: 72 }
46559 Sep 22 23:15:13.630 DEBG 513 Close extent 72
46560 Sep 22 23:15:13.631 DEBG 513 Close extent 72
46561 Sep 22 23:15:13.631 DEBG 513 Close extent 72
46562 Sep 22 23:15:13.631 DEBG [2] It's time to notify for 513
46563 Sep 22 23:15:13.631 INFO Completion from [2] id:513 status:true
46564 Sep 22 23:15:13.631 INFO [514/752] Repair commands completed
46565 Sep 22 23:15:13.631 INFO Pop front: ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46566 Sep 22 23:15:13.631 INFO Sent repair work, now wait for resp
46567 Sep 22 23:15:13.631 INFO [0] received reconcile message
46568 Sep 22 23:15:13.631 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46569 Sep 22 23:15:13.632 INFO [0] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46570 Sep 22 23:15:13.632 INFO [0] Sending repair request ReconciliationId(514)
46571 Sep 22 23:15:13.632 INFO [1] received reconcile message
46572 Sep 22 23:15:13.632 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46573 Sep 22 23:15:13.632 INFO [1] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46574 Sep 22 23:15:13.632 INFO [1] No action required ReconciliationId(514)
46575 Sep 22 23:15:13.632 INFO [2] received reconcile message
46576 Sep 22 23:15:13.632 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(514), op: ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46577 Sep 22 23:15:13.632 INFO [2] client ExtentRepair { repair_id: ReconciliationId(514), extent_id: 72, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46578 Sep 22 23:15:13.632 INFO [2] No action required ReconciliationId(514)
46579 Sep 22 23:15:13.632 DEBG 514 Repair extent 72 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46580 Sep 22 23:15:13.632 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/048.copy"
46581 Sep 22 23:15:13.699 INFO accepted connection, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46582 Sep 22 23:15:13.699 TRCE incoming request, uri: /extent/72/files, method: GET, req_id: d0524d83-dd0a-48bf-808d-c8666fc1b432, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46583 Sep 22 23:15:13.699 INFO request completed, latency_us: 295, response_code: 200, uri: /extent/72/files, method: GET, req_id: d0524d83-dd0a-48bf-808d-c8666fc1b432, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46584 Sep 22 23:15:13.699 INFO eid:72 Found repair files: ["048", "048.db"]
46585 Sep 22 23:15:13.700 TRCE incoming request, uri: /newextent/72/data, method: GET, req_id: d09590e0-97f4-4f86-9acb-9897edc55229, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46586 Sep 22 23:15:13.700 INFO request completed, latency_us: 374, response_code: 200, uri: /newextent/72/data, method: GET, req_id: d09590e0-97f4-4f86-9acb-9897edc55229, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46587 Sep 22 23:15:13.705 TRCE incoming request, uri: /newextent/72/db, method: GET, req_id: 8e9658e1-0bb6-4d33-82e3-d4204cd1da41, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46588 Sep 22 23:15:13.706 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/72/db, method: GET, req_id: 8e9658e1-0bb6-4d33-82e3-d4204cd1da41, remote_addr: 127.0.0.1:38705, local_addr: 127.0.0.1:46213, task: repair
46589 Sep 22 23:15:13.707 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/048.copy" to "/tmp/downstairs-vrx8aK6L/00/000/048.replace"
46590 Sep 22 23:15:13.707 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46591 Sep 22 23:15:13.708 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/048.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46592 Sep 22 23:15:13.708 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/048"
46593 Sep 22 23:15:13.709 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/048.db"
46594 Sep 22 23:15:13.709 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46595 Sep 22 23:15:13.709 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/048.replace" to "/tmp/downstairs-vrx8aK6L/00/000/048.completed"
46596 Sep 22 23:15:13.709 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46597 Sep 22 23:15:13.709 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46598 Sep 22 23:15:13.709 DEBG [0] It's time to notify for 514
46599 Sep 22 23:15:13.709 INFO Completion from [0] id:514 status:true
46600 Sep 22 23:15:13.709 INFO [515/752] Repair commands completed
46601 Sep 22 23:15:13.709 INFO Pop front: ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }, state: ClientData([New, New, New]) }
46602 Sep 22 23:15:13.709 INFO Sent repair work, now wait for resp
46603 Sep 22 23:15:13.709 INFO [0] received reconcile message
46604 Sep 22 23:15:13.709 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }, state: ClientData([InProgress, New, New]) }, : downstairs
46605 Sep 22 23:15:13.709 INFO [0] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }
46606 Sep 22 23:15:13.709 INFO [1] received reconcile message
46607 Sep 22 23:15:13.709 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46608 Sep 22 23:15:13.709 INFO [1] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }
46609 Sep 22 23:15:13.709 INFO [2] received reconcile message
46610 Sep 22 23:15:13.709 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(515), op: ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46611 Sep 22 23:15:13.709 INFO [2] client ExtentReopen { repair_id: ReconciliationId(515), extent_id: 72 }
46612 Sep 22 23:15:13.710 DEBG 515 Reopen extent 72
46613 Sep 22 23:15:13.710 DEBG 515 Reopen extent 72
46614 Sep 22 23:15:13.711 DEBG 515 Reopen extent 72
46615 Sep 22 23:15:13.712 DEBG [2] It's time to notify for 515
46616 Sep 22 23:15:13.712 INFO Completion from [2] id:515 status:true
46617 Sep 22 23:15:13.712 INFO [516/752] Repair commands completed
46618 Sep 22 23:15:13.712 INFO Pop front: ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46619 Sep 22 23:15:13.712 INFO Sent repair work, now wait for resp
46620 Sep 22 23:15:13.712 INFO [0] received reconcile message
46621 Sep 22 23:15:13.712 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46622 Sep 22 23:15:13.712 INFO [0] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46623 Sep 22 23:15:13.712 INFO [1] received reconcile message
46624 Sep 22 23:15:13.712 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46625 Sep 22 23:15:13.712 INFO [1] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46626 Sep 22 23:15:13.712 INFO [2] received reconcile message
46627 Sep 22 23:15:13.712 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(516), op: ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46628 Sep 22 23:15:13.712 INFO [2] client ExtentFlush { repair_id: ReconciliationId(516), extent_id: 21, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46629 Sep 22 23:15:13.712 DEBG 516 Flush extent 21 with f:2 g:2
46630 Sep 22 23:15:13.712 DEBG Flush just extent 21 with f:2 and g:2
46631 Sep 22 23:15:13.712 DEBG [1] It's time to notify for 516
46632 Sep 22 23:15:13.712 INFO Completion from [1] id:516 status:true
46633 Sep 22 23:15:13.712 INFO [517/752] Repair commands completed
46634 Sep 22 23:15:13.712 INFO Pop front: ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }, state: ClientData([New, New, New]) }
46635 Sep 22 23:15:13.712 INFO Sent repair work, now wait for resp
46636 Sep 22 23:15:13.712 INFO [0] received reconcile message
46637 Sep 22 23:15:13.712 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }, state: ClientData([InProgress, New, New]) }, : downstairs
46638 Sep 22 23:15:13.712 INFO [0] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }
46639 Sep 22 23:15:13.712 INFO [1] received reconcile message
46640 Sep 22 23:15:13.712 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46641 Sep 22 23:15:13.712 INFO [1] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }
46642 Sep 22 23:15:13.712 INFO [2] received reconcile message
46643 Sep 22 23:15:13.712 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(517), op: ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46644 Sep 22 23:15:13.712 INFO [2] client ExtentClose { repair_id: ReconciliationId(517), extent_id: 21 }
46645 Sep 22 23:15:13.713 DEBG 517 Close extent 21
46646 Sep 22 23:15:13.713 DEBG 517 Close extent 21
46647 Sep 22 23:15:13.713 DEBG 517 Close extent 21
46648 Sep 22 23:15:13.714 DEBG [2] It's time to notify for 517
46649 Sep 22 23:15:13.714 INFO Completion from [2] id:517 status:true
46650 Sep 22 23:15:13.714 INFO [518/752] Repair commands completed
46651 Sep 22 23:15:13.714 INFO Pop front: ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46652 Sep 22 23:15:13.714 INFO Sent repair work, now wait for resp
46653 Sep 22 23:15:13.714 INFO [0] received reconcile message
46654 Sep 22 23:15:13.714 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46655 Sep 22 23:15:13.714 INFO [0] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46656 Sep 22 23:15:13.714 INFO [0] Sending repair request ReconciliationId(518)
46657 Sep 22 23:15:13.714 INFO [1] received reconcile message
46658 Sep 22 23:15:13.714 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46659 Sep 22 23:15:13.714 INFO [1] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46660 Sep 22 23:15:13.714 INFO [1] No action required ReconciliationId(518)
46661 Sep 22 23:15:13.714 INFO [2] received reconcile message
46662 Sep 22 23:15:13.714 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(518), op: ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46663 Sep 22 23:15:13.714 INFO [2] client ExtentRepair { repair_id: ReconciliationId(518), extent_id: 21, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46664 Sep 22 23:15:13.714 INFO [2] No action required ReconciliationId(518)
46665 Sep 22 23:15:13.714 DEBG 518 Repair extent 21 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46666 Sep 22 23:15:13.714 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/015.copy"
46667 Sep 22 23:15:13.776 DEBG [1] Read AckReady 1086, : downstairs
46668 Sep 22 23:15:13.777 DEBG up_ds_listen was notified
46669 Sep 22 23:15:13.777 DEBG up_ds_listen process 1086
46670 Sep 22 23:15:13.777 DEBG [A] ack job 1086:87, : downstairs
46671 Sep 22 23:15:13.780 INFO accepted connection, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46672 Sep 22 23:15:13.780 TRCE incoming request, uri: /extent/21/files, method: GET, req_id: f8570990-1b21-46d1-b129-15f4b85d0a14, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46673 Sep 22 23:15:13.781 INFO request completed, latency_us: 268, response_code: 200, uri: /extent/21/files, method: GET, req_id: f8570990-1b21-46d1-b129-15f4b85d0a14, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46674 Sep 22 23:15:13.781 INFO eid:21 Found repair files: ["015", "015.db"]
46675 Sep 22 23:15:13.781 TRCE incoming request, uri: /newextent/21/data, method: GET, req_id: 19f942f7-3d81-4d01-aa3c-f0b041a70d00, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46676 Sep 22 23:15:13.782 INFO request completed, latency_us: 342, response_code: 200, uri: /newextent/21/data, method: GET, req_id: 19f942f7-3d81-4d01-aa3c-f0b041a70d00, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46677 Sep 22 23:15:13.787 TRCE incoming request, uri: /newextent/21/db, method: GET, req_id: 7ce48312-1415-4b75-86d7-05f425cc7385, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46678 Sep 22 23:15:13.787 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/21/db, method: GET, req_id: 7ce48312-1415-4b75-86d7-05f425cc7385, remote_addr: 127.0.0.1:63666, local_addr: 127.0.0.1:46213, task: repair
46679 Sep 22 23:15:13.788 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/015.copy" to "/tmp/downstairs-vrx8aK6L/00/000/015.replace"
46680 Sep 22 23:15:13.788 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46681 Sep 22 23:15:13.790 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/015.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46682 Sep 22 23:15:13.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/015"
46683 Sep 22 23:15:13.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/015.db"
46684 Sep 22 23:15:13.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46685 Sep 22 23:15:13.790 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/015.replace" to "/tmp/downstairs-vrx8aK6L/00/000/015.completed"
46686 Sep 22 23:15:13.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46687 Sep 22 23:15:13.790 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46688 Sep 22 23:15:13.790 DEBG [0] It's time to notify for 518
46689 Sep 22 23:15:13.790 INFO Completion from [0] id:518 status:true
46690 Sep 22 23:15:13.790 INFO [519/752] Repair commands completed
46691 Sep 22 23:15:13.790 INFO Pop front: ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }, state: ClientData([New, New, New]) }
46692 Sep 22 23:15:13.790 INFO Sent repair work, now wait for resp
46693 Sep 22 23:15:13.791 INFO [0] received reconcile message
46694 Sep 22 23:15:13.791 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }, state: ClientData([InProgress, New, New]) }, : downstairs
46695 Sep 22 23:15:13.791 INFO [0] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }
46696 Sep 22 23:15:13.791 INFO [1] received reconcile message
46697 Sep 22 23:15:13.791 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46698 Sep 22 23:15:13.791 INFO [1] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }
46699 Sep 22 23:15:13.791 INFO [2] received reconcile message
46700 Sep 22 23:15:13.791 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(519), op: ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46701 Sep 22 23:15:13.791 INFO [2] client ExtentReopen { repair_id: ReconciliationId(519), extent_id: 21 }
46702 Sep 22 23:15:13.791 DEBG 519 Reopen extent 21
46703 Sep 22 23:15:13.792 DEBG 519 Reopen extent 21
46704 Sep 22 23:15:13.792 DEBG 519 Reopen extent 21
46705 Sep 22 23:15:13.793 DEBG [2] It's time to notify for 519
46706 Sep 22 23:15:13.793 INFO Completion from [2] id:519 status:true
46707 Sep 22 23:15:13.793 INFO [520/752] Repair commands completed
46708 Sep 22 23:15:13.793 INFO Pop front: ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46709 Sep 22 23:15:13.793 INFO Sent repair work, now wait for resp
46710 Sep 22 23:15:13.793 INFO [0] received reconcile message
46711 Sep 22 23:15:13.793 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46712 Sep 22 23:15:13.793 INFO [0] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46713 Sep 22 23:15:13.793 INFO [1] received reconcile message
46714 Sep 22 23:15:13.793 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46715 Sep 22 23:15:13.793 INFO [1] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46716 Sep 22 23:15:13.793 INFO [2] received reconcile message
46717 Sep 22 23:15:13.793 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(520), op: ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46718 Sep 22 23:15:13.793 INFO [2] client ExtentFlush { repair_id: ReconciliationId(520), extent_id: 61, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46719 Sep 22 23:15:13.793 DEBG 520 Flush extent 61 with f:2 g:2
46720 Sep 22 23:15:13.793 DEBG Flush just extent 61 with f:2 and g:2
46721 Sep 22 23:15:13.794 DEBG [1] It's time to notify for 520
46722 Sep 22 23:15:13.794 INFO Completion from [1] id:520 status:true
46723 Sep 22 23:15:13.794 INFO [521/752] Repair commands completed
46724 Sep 22 23:15:13.794 INFO Pop front: ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }, state: ClientData([New, New, New]) }
46725 Sep 22 23:15:13.794 INFO Sent repair work, now wait for resp
46726 Sep 22 23:15:13.794 INFO [0] received reconcile message
46727 Sep 22 23:15:13.794 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }, state: ClientData([InProgress, New, New]) }, : downstairs
46728 Sep 22 23:15:13.794 INFO [0] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }
46729 Sep 22 23:15:13.794 INFO [1] received reconcile message
46730 Sep 22 23:15:13.794 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46731 Sep 22 23:15:13.794 INFO [1] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }
46732 Sep 22 23:15:13.794 INFO [2] received reconcile message
46733 Sep 22 23:15:13.794 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(521), op: ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46734 Sep 22 23:15:13.794 INFO [2] client ExtentClose { repair_id: ReconciliationId(521), extent_id: 61 }
46735 Sep 22 23:15:13.794 DEBG 521 Close extent 61
46736 Sep 22 23:15:13.794 DEBG 521 Close extent 61
46737 Sep 22 23:15:13.795 DEBG 521 Close extent 61
46738 Sep 22 23:15:13.795 DEBG [2] It's time to notify for 521
46739 Sep 22 23:15:13.795 INFO Completion from [2] id:521 status:true
46740 Sep 22 23:15:13.795 INFO [522/752] Repair commands completed
46741 Sep 22 23:15:13.795 INFO Pop front: ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46742 Sep 22 23:15:13.795 INFO Sent repair work, now wait for resp
46743 Sep 22 23:15:13.795 INFO [0] received reconcile message
46744 Sep 22 23:15:13.795 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46745 Sep 22 23:15:13.795 INFO [0] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46746 Sep 22 23:15:13.795 INFO [0] Sending repair request ReconciliationId(522)
46747 Sep 22 23:15:13.795 INFO [1] received reconcile message
46748 Sep 22 23:15:13.795 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46749 Sep 22 23:15:13.795 INFO [1] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46750 Sep 22 23:15:13.795 INFO [1] No action required ReconciliationId(522)
46751 Sep 22 23:15:13.795 INFO [2] received reconcile message
46752 Sep 22 23:15:13.795 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(522), op: ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46753 Sep 22 23:15:13.795 INFO [2] client ExtentRepair { repair_id: ReconciliationId(522), extent_id: 61, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46754 Sep 22 23:15:13.795 INFO [2] No action required ReconciliationId(522)
46755 Sep 22 23:15:13.795 DEBG 522 Repair extent 61 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46756 Sep 22 23:15:13.796 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03D.copy"
46757 Sep 22 23:15:13.830 DEBG up_ds_listen checked 1 jobs, back to waiting
46758 Sep 22 23:15:13.832 DEBG IO Read 1088 has deps [JobId(1087)]
46759 Sep 22 23:15:13.858 INFO accepted connection, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46760 Sep 22 23:15:13.858 TRCE incoming request, uri: /extent/61/files, method: GET, req_id: 4d0cf51c-4325-4e6a-ac66-376cd904811e, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46761 Sep 22 23:15:13.858 INFO request completed, latency_us: 270, response_code: 200, uri: /extent/61/files, method: GET, req_id: 4d0cf51c-4325-4e6a-ac66-376cd904811e, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46762 Sep 22 23:15:13.858 INFO eid:61 Found repair files: ["03D", "03D.db"]
46763 Sep 22 23:15:13.859 TRCE incoming request, uri: /newextent/61/data, method: GET, req_id: 7a757647-b0b7-4a02-8153-334e7a870d52, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46764 Sep 22 23:15:13.859 INFO request completed, latency_us: 366, response_code: 200, uri: /newextent/61/data, method: GET, req_id: 7a757647-b0b7-4a02-8153-334e7a870d52, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46765 Sep 22 23:15:13.864 TRCE incoming request, uri: /newextent/61/db, method: GET, req_id: df57f013-93c1-4fff-adec-dd7d76521b10, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46766 Sep 22 23:15:13.865 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/61/db, method: GET, req_id: df57f013-93c1-4fff-adec-dd7d76521b10, remote_addr: 127.0.0.1:41065, local_addr: 127.0.0.1:46213, task: repair
46767 Sep 22 23:15:13.866 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03D.replace"
46768 Sep 22 23:15:13.866 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46769 Sep 22 23:15:13.867 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46770 Sep 22 23:15:13.867 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03D"
46771 Sep 22 23:15:13.867 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03D.db"
46772 Sep 22 23:15:13.867 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46773 Sep 22 23:15:13.867 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03D.completed"
46774 Sep 22 23:15:13.867 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46775 Sep 22 23:15:13.867 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46776 Sep 22 23:15:13.868 DEBG [0] It's time to notify for 522
46777 Sep 22 23:15:13.868 INFO Completion from [0] id:522 status:true
46778 Sep 22 23:15:13.868 INFO [523/752] Repair commands completed
46779 Sep 22 23:15:13.868 INFO Pop front: ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }, state: ClientData([New, New, New]) }
46780 Sep 22 23:15:13.868 INFO Sent repair work, now wait for resp
46781 Sep 22 23:15:13.868 INFO [0] received reconcile message
46782 Sep 22 23:15:13.868 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }, state: ClientData([InProgress, New, New]) }, : downstairs
46783 Sep 22 23:15:13.868 INFO [0] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }
46784 Sep 22 23:15:13.868 INFO [1] received reconcile message
46785 Sep 22 23:15:13.868 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46786 Sep 22 23:15:13.868 INFO [1] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }
46787 Sep 22 23:15:13.868 INFO [2] received reconcile message
46788 Sep 22 23:15:13.868 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(523), op: ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46789 Sep 22 23:15:13.868 INFO [2] client ExtentReopen { repair_id: ReconciliationId(523), extent_id: 61 }
46790 Sep 22 23:15:13.868 DEBG 523 Reopen extent 61
46791 Sep 22 23:15:13.869 DEBG 523 Reopen extent 61
46792 Sep 22 23:15:13.869 DEBG 523 Reopen extent 61
46793 Sep 22 23:15:13.870 DEBG [2] It's time to notify for 523
46794 Sep 22 23:15:13.870 INFO Completion from [2] id:523 status:true
46795 Sep 22 23:15:13.870 INFO [524/752] Repair commands completed
46796 Sep 22 23:15:13.870 INFO Pop front: ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46797 Sep 22 23:15:13.870 INFO Sent repair work, now wait for resp
46798 Sep 22 23:15:13.870 INFO [0] received reconcile message
46799 Sep 22 23:15:13.870 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46800 Sep 22 23:15:13.870 INFO [0] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46801 Sep 22 23:15:13.870 INFO [1] received reconcile message
46802 Sep 22 23:15:13.870 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46803 Sep 22 23:15:13.870 INFO [1] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46804 Sep 22 23:15:13.870 INFO [2] received reconcile message
46805 Sep 22 23:15:13.870 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(524), op: ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46806 Sep 22 23:15:13.870 INFO [2] client ExtentFlush { repair_id: ReconciliationId(524), extent_id: 134, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46807 Sep 22 23:15:13.871 DEBG 524 Flush extent 134 with f:2 g:2
46808 Sep 22 23:15:13.871 DEBG Flush just extent 134 with f:2 and g:2
46809 Sep 22 23:15:13.871 DEBG [1] It's time to notify for 524
46810 Sep 22 23:15:13.871 INFO Completion from [1] id:524 status:true
46811 Sep 22 23:15:13.871 INFO [525/752] Repair commands completed
46812 Sep 22 23:15:13.871 INFO Pop front: ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }, state: ClientData([New, New, New]) }
46813 Sep 22 23:15:13.871 INFO Sent repair work, now wait for resp
46814 Sep 22 23:15:13.871 INFO [0] received reconcile message
46815 Sep 22 23:15:13.871 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }, state: ClientData([InProgress, New, New]) }, : downstairs
46816 Sep 22 23:15:13.871 INFO [0] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }
46817 Sep 22 23:15:13.871 INFO [1] received reconcile message
46818 Sep 22 23:15:13.871 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46819 Sep 22 23:15:13.871 INFO [1] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }
46820 Sep 22 23:15:13.871 INFO [2] received reconcile message
46821 Sep 22 23:15:13.871 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(525), op: ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46822 Sep 22 23:15:13.871 INFO [2] client ExtentClose { repair_id: ReconciliationId(525), extent_id: 134 }
46823 Sep 22 23:15:13.871 DEBG 525 Close extent 134
46824 Sep 22 23:15:13.871 DEBG 525 Close extent 134
46825 Sep 22 23:15:13.872 DEBG 525 Close extent 134
46826 Sep 22 23:15:13.872 DEBG [2] It's time to notify for 525
46827 Sep 22 23:15:13.872 INFO Completion from [2] id:525 status:true
46828 Sep 22 23:15:13.872 INFO [526/752] Repair commands completed
46829 Sep 22 23:15:13.872 INFO Pop front: ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46830 Sep 22 23:15:13.872 INFO Sent repair work, now wait for resp
46831 Sep 22 23:15:13.872 INFO [0] received reconcile message
46832 Sep 22 23:15:13.872 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46833 Sep 22 23:15:13.872 INFO [0] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46834 Sep 22 23:15:13.872 INFO [0] Sending repair request ReconciliationId(526)
46835 Sep 22 23:15:13.872 INFO [1] received reconcile message
46836 Sep 22 23:15:13.872 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46837 Sep 22 23:15:13.872 INFO [1] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46838 Sep 22 23:15:13.872 INFO [1] No action required ReconciliationId(526)
46839 Sep 22 23:15:13.872 INFO [2] received reconcile message
46840 Sep 22 23:15:13.872 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(526), op: ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46841 Sep 22 23:15:13.872 INFO [2] client ExtentRepair { repair_id: ReconciliationId(526), extent_id: 134, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46842 Sep 22 23:15:13.872 INFO [2] No action required ReconciliationId(526)
46843 Sep 22 23:15:13.873 DEBG 526 Repair extent 134 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46844 Sep 22 23:15:13.873 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/086.copy"
46845 Sep 22 23:15:13.935 INFO accepted connection, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46846 Sep 22 23:15:13.936 TRCE incoming request, uri: /extent/134/files, method: GET, req_id: bbd822cb-34e9-4365-a104-a4653e2342b2, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46847 Sep 22 23:15:13.936 INFO request completed, latency_us: 272, response_code: 200, uri: /extent/134/files, method: GET, req_id: bbd822cb-34e9-4365-a104-a4653e2342b2, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46848 Sep 22 23:15:13.936 INFO eid:134 Found repair files: ["086", "086.db"]
46849 Sep 22 23:15:13.936 TRCE incoming request, uri: /newextent/134/data, method: GET, req_id: f8e649c0-3983-4f1f-86f8-e92d0f1a8e2d, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46850 Sep 22 23:15:13.937 INFO request completed, latency_us: 368, response_code: 200, uri: /newextent/134/data, method: GET, req_id: f8e649c0-3983-4f1f-86f8-e92d0f1a8e2d, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46851 Sep 22 23:15:13.942 TRCE incoming request, uri: /newextent/134/db, method: GET, req_id: d670367b-fc4c-4ebe-a643-36ead02ff54c, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46852 Sep 22 23:15:13.942 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/134/db, method: GET, req_id: d670367b-fc4c-4ebe-a643-36ead02ff54c, remote_addr: 127.0.0.1:54478, local_addr: 127.0.0.1:46213, task: repair
46853 Sep 22 23:15:13.944 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/086.copy" to "/tmp/downstairs-vrx8aK6L/00/000/086.replace"
46854 Sep 22 23:15:13.944 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46855 Sep 22 23:15:13.945 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/086.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46856 Sep 22 23:15:13.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/086"
46857 Sep 22 23:15:13.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/086.db"
46858 Sep 22 23:15:13.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46859 Sep 22 23:15:13.945 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/086.replace" to "/tmp/downstairs-vrx8aK6L/00/000/086.completed"
46860 Sep 22 23:15:13.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46861 Sep 22 23:15:13.945 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46862 Sep 22 23:15:13.946 DEBG [0] It's time to notify for 526
46863 Sep 22 23:15:13.946 INFO Completion from [0] id:526 status:true
46864 Sep 22 23:15:13.946 INFO [527/752] Repair commands completed
46865 Sep 22 23:15:13.946 INFO Pop front: ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }, state: ClientData([New, New, New]) }
46866 Sep 22 23:15:13.946 INFO Sent repair work, now wait for resp
46867 Sep 22 23:15:13.946 INFO [0] received reconcile message
46868 Sep 22 23:15:13.946 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }, state: ClientData([InProgress, New, New]) }, : downstairs
46869 Sep 22 23:15:13.946 INFO [0] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }
46870 Sep 22 23:15:13.946 INFO [1] received reconcile message
46871 Sep 22 23:15:13.946 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46872 Sep 22 23:15:13.946 INFO [1] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }
46873 Sep 22 23:15:13.946 INFO [2] received reconcile message
46874 Sep 22 23:15:13.946 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(527), op: ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46875 Sep 22 23:15:13.946 INFO [2] client ExtentReopen { repair_id: ReconciliationId(527), extent_id: 134 }
46876 Sep 22 23:15:13.946 DEBG 527 Reopen extent 134
46877 Sep 22 23:15:13.947 DEBG 527 Reopen extent 134
46878 Sep 22 23:15:13.948 DEBG 527 Reopen extent 134
46879 Sep 22 23:15:13.948 DEBG [2] It's time to notify for 527
46880 Sep 22 23:15:13.948 INFO Completion from [2] id:527 status:true
46881 Sep 22 23:15:13.948 INFO [528/752] Repair commands completed
46882 Sep 22 23:15:13.948 INFO Pop front: ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46883 Sep 22 23:15:13.948 INFO Sent repair work, now wait for resp
46884 Sep 22 23:15:13.948 INFO [0] received reconcile message
46885 Sep 22 23:15:13.948 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46886 Sep 22 23:15:13.948 INFO [0] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46887 Sep 22 23:15:13.948 INFO [1] received reconcile message
46888 Sep 22 23:15:13.948 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46889 Sep 22 23:15:13.949 INFO [1] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46890 Sep 22 23:15:13.949 INFO [2] received reconcile message
46891 Sep 22 23:15:13.949 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(528), op: ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46892 Sep 22 23:15:13.949 INFO [2] client ExtentFlush { repair_id: ReconciliationId(528), extent_id: 46, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46893 Sep 22 23:15:13.949 DEBG 528 Flush extent 46 with f:2 g:2
46894 Sep 22 23:15:13.949 DEBG Flush just extent 46 with f:2 and g:2
46895 Sep 22 23:15:13.949 DEBG [1] It's time to notify for 528
46896 Sep 22 23:15:13.949 INFO Completion from [1] id:528 status:true
46897 Sep 22 23:15:13.949 INFO [529/752] Repair commands completed
46898 Sep 22 23:15:13.949 INFO Pop front: ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }, state: ClientData([New, New, New]) }
46899 Sep 22 23:15:13.949 INFO Sent repair work, now wait for resp
46900 Sep 22 23:15:13.949 INFO [0] received reconcile message
46901 Sep 22 23:15:13.949 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }, state: ClientData([InProgress, New, New]) }, : downstairs
46902 Sep 22 23:15:13.949 INFO [0] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }
46903 Sep 22 23:15:13.949 INFO [1] received reconcile message
46904 Sep 22 23:15:13.949 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46905 Sep 22 23:15:13.949 INFO [1] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }
46906 Sep 22 23:15:13.949 INFO [2] received reconcile message
46907 Sep 22 23:15:13.949 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(529), op: ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46908 Sep 22 23:15:13.949 INFO [2] client ExtentClose { repair_id: ReconciliationId(529), extent_id: 46 }
46909 Sep 22 23:15:13.949 DEBG 529 Close extent 46
46910 Sep 22 23:15:13.950 DEBG 529 Close extent 46
46911 Sep 22 23:15:13.950 DEBG 529 Close extent 46
46912 Sep 22 23:15:13.950 DEBG [2] It's time to notify for 529
46913 Sep 22 23:15:13.950 INFO Completion from [2] id:529 status:true
46914 Sep 22 23:15:13.950 INFO [530/752] Repair commands completed
46915 Sep 22 23:15:13.950 INFO Pop front: ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
46916 Sep 22 23:15:13.950 INFO Sent repair work, now wait for resp
46917 Sep 22 23:15:13.950 INFO [0] received reconcile message
46918 Sep 22 23:15:13.950 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
46919 Sep 22 23:15:13.950 INFO [0] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46920 Sep 22 23:15:13.950 INFO [0] Sending repair request ReconciliationId(530)
46921 Sep 22 23:15:13.950 INFO [1] received reconcile message
46922 Sep 22 23:15:13.950 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46923 Sep 22 23:15:13.950 INFO [1] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46924 Sep 22 23:15:13.951 INFO [1] No action required ReconciliationId(530)
46925 Sep 22 23:15:13.951 INFO [2] received reconcile message
46926 Sep 22 23:15:13.951 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(530), op: ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
46927 Sep 22 23:15:13.951 INFO [2] client ExtentRepair { repair_id: ReconciliationId(530), extent_id: 46, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
46928 Sep 22 23:15:13.951 INFO [2] No action required ReconciliationId(530)
46929 Sep 22 23:15:13.951 DEBG 530 Repair extent 46 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
46930 Sep 22 23:15:13.951 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02E.copy"
46931 Sep 22 23:15:14.016 INFO accepted connection, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46932 Sep 22 23:15:14.016 TRCE incoming request, uri: /extent/46/files, method: GET, req_id: 6a7f8166-98c2-4bf6-88e7-1fdb39d9cf88, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46933 Sep 22 23:15:14.016 INFO request completed, latency_us: 266, response_code: 200, uri: /extent/46/files, method: GET, req_id: 6a7f8166-98c2-4bf6-88e7-1fdb39d9cf88, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46934 Sep 22 23:15:14.017 INFO eid:46 Found repair files: ["02E", "02E.db"]
46935 Sep 22 23:15:14.017 TRCE incoming request, uri: /newextent/46/data, method: GET, req_id: 8b1a172d-9453-4122-9664-af183f4cb360, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46936 Sep 22 23:15:14.017 INFO request completed, latency_us: 356, response_code: 200, uri: /newextent/46/data, method: GET, req_id: 8b1a172d-9453-4122-9664-af183f4cb360, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46937 Sep 22 23:15:14.023 TRCE incoming request, uri: /newextent/46/db, method: GET, req_id: dbce502e-9300-4180-88be-aa363f185c98, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46938 Sep 22 23:15:14.023 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/46/db, method: GET, req_id: dbce502e-9300-4180-88be-aa363f185c98, remote_addr: 127.0.0.1:46657, local_addr: 127.0.0.1:46213, task: repair
46939 Sep 22 23:15:14.024 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02E.replace"
46940 Sep 22 23:15:14.024 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46941 Sep 22 23:15:14.025 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
46942 Sep 22 23:15:14.026 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02E"
46943 Sep 22 23:15:14.026 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02E.db"
46944 Sep 22 23:15:14.026 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46945 Sep 22 23:15:14.026 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02E.completed"
46946 Sep 22 23:15:14.026 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46947 Sep 22 23:15:14.026 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
46948 Sep 22 23:15:14.026 DEBG [0] It's time to notify for 530
46949 Sep 22 23:15:14.026 INFO Completion from [0] id:530 status:true
46950 Sep 22 23:15:14.026 INFO [531/752] Repair commands completed
46951 Sep 22 23:15:14.026 INFO Pop front: ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }, state: ClientData([New, New, New]) }
46952 Sep 22 23:15:14.026 INFO Sent repair work, now wait for resp
46953 Sep 22 23:15:14.026 INFO [0] received reconcile message
46954 Sep 22 23:15:14.026 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }, state: ClientData([InProgress, New, New]) }, : downstairs
46955 Sep 22 23:15:14.027 INFO [0] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }
46956 Sep 22 23:15:14.027 INFO [1] received reconcile message
46957 Sep 22 23:15:14.027 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46958 Sep 22 23:15:14.027 INFO [1] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }
46959 Sep 22 23:15:14.027 INFO [2] received reconcile message
46960 Sep 22 23:15:14.027 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(531), op: ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46961 Sep 22 23:15:14.027 INFO [2] client ExtentReopen { repair_id: ReconciliationId(531), extent_id: 46 }
46962 Sep 22 23:15:14.027 DEBG 531 Reopen extent 46
46963 Sep 22 23:15:14.028 DEBG 531 Reopen extent 46
46964 Sep 22 23:15:14.028 DEBG 531 Reopen extent 46
46965 Sep 22 23:15:14.029 DEBG [2] It's time to notify for 531
46966 Sep 22 23:15:14.029 INFO Completion from [2] id:531 status:true
46967 Sep 22 23:15:14.029 INFO [532/752] Repair commands completed
46968 Sep 22 23:15:14.029 INFO Pop front: ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
46969 Sep 22 23:15:14.029 INFO Sent repair work, now wait for resp
46970 Sep 22 23:15:14.029 INFO [0] received reconcile message
46971 Sep 22 23:15:14.029 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
46972 Sep 22 23:15:14.029 INFO [0] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46973 Sep 22 23:15:14.029 INFO [1] received reconcile message
46974 Sep 22 23:15:14.029 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
46975 Sep 22 23:15:14.029 INFO [1] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46976 Sep 22 23:15:14.029 INFO [2] received reconcile message
46977 Sep 22 23:15:14.029 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(532), op: ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
46978 Sep 22 23:15:14.029 INFO [2] client ExtentFlush { repair_id: ReconciliationId(532), extent_id: 41, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
46979 Sep 22 23:15:14.029 DEBG 532 Flush extent 41 with f:2 g:2
46980 Sep 22 23:15:14.029 DEBG Flush just extent 41 with f:2 and g:2
46981 Sep 22 23:15:14.029 DEBG [1] It's time to notify for 532
46982 Sep 22 23:15:14.029 INFO Completion from [1] id:532 status:true
46983 Sep 22 23:15:14.029 INFO [533/752] Repair commands completed
46984 Sep 22 23:15:14.029 INFO Pop front: ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }, state: ClientData([New, New, New]) }
46985 Sep 22 23:15:14.030 INFO Sent repair work, now wait for resp
46986 Sep 22 23:15:14.030 INFO [0] received reconcile message
46987 Sep 22 23:15:14.030 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }, state: ClientData([InProgress, New, New]) }, : downstairs
46988 Sep 22 23:15:14.030 INFO [0] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }
46989 Sep 22 23:15:14.030 INFO [1] received reconcile message
46990 Sep 22 23:15:14.030 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
46991 Sep 22 23:15:14.030 INFO [1] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }
46992 Sep 22 23:15:14.030 INFO [2] received reconcile message
46993 Sep 22 23:15:14.030 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(533), op: ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
46994 Sep 22 23:15:14.030 INFO [2] client ExtentClose { repair_id: ReconciliationId(533), extent_id: 41 }
46995 Sep 22 23:15:14.030 DEBG 533 Close extent 41
46996 Sep 22 23:15:14.030 DEBG 533 Close extent 41
46997 Sep 22 23:15:14.030 DEBG 533 Close extent 41
46998 Sep 22 23:15:14.031 DEBG [2] It's time to notify for 533
46999 Sep 22 23:15:14.031 INFO Completion from [2] id:533 status:true
47000 Sep 22 23:15:14.031 INFO [534/752] Repair commands completed
47001 Sep 22 23:15:14.031 INFO Pop front: ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47002 Sep 22 23:15:14.031 INFO Sent repair work, now wait for resp
47003 Sep 22 23:15:14.031 INFO [0] received reconcile message
47004 Sep 22 23:15:14.031 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47005 Sep 22 23:15:14.031 INFO [0] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47006 Sep 22 23:15:14.031 INFO [0] Sending repair request ReconciliationId(534)
47007 Sep 22 23:15:14.031 INFO [1] received reconcile message
47008 Sep 22 23:15:14.031 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47009 Sep 22 23:15:14.031 INFO [1] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47010 Sep 22 23:15:14.031 INFO [1] No action required ReconciliationId(534)
47011 Sep 22 23:15:14.031 INFO [2] received reconcile message
47012 Sep 22 23:15:14.031 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(534), op: ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47013 Sep 22 23:15:14.031 INFO [2] client ExtentRepair { repair_id: ReconciliationId(534), extent_id: 41, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47014 Sep 22 23:15:14.031 INFO [2] No action required ReconciliationId(534)
47015 Sep 22 23:15:14.031 DEBG 534 Repair extent 41 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47016 Sep 22 23:15:14.031 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/029.copy"
47017 Sep 22 23:15:14.096 INFO accepted connection, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47018 Sep 22 23:15:14.097 TRCE incoming request, uri: /extent/41/files, method: GET, req_id: 725ac720-fad4-4c89-98d9-0b1ababd161e, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47019 Sep 22 23:15:14.097 INFO request completed, latency_us: 244, response_code: 200, uri: /extent/41/files, method: GET, req_id: 725ac720-fad4-4c89-98d9-0b1ababd161e, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47020 Sep 22 23:15:14.097 INFO eid:41 Found repair files: ["029", "029.db"]
47021 Sep 22 23:15:14.097 TRCE incoming request, uri: /newextent/41/data, method: GET, req_id: 009e6c50-8441-4e9e-9246-4ca0383bac29, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47022 Sep 22 23:15:14.098 INFO request completed, latency_us: 341, response_code: 200, uri: /newextent/41/data, method: GET, req_id: 009e6c50-8441-4e9e-9246-4ca0383bac29, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47023 Sep 22 23:15:14.098 DEBG Read :1086 deps:[JobId(1085)] res:true
47024 Sep 22 23:15:14.103 TRCE incoming request, uri: /newextent/41/db, method: GET, req_id: 76cde1a7-856c-409c-9926-a5a2011a5da1, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47025 Sep 22 23:15:14.103 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/41/db, method: GET, req_id: 76cde1a7-856c-409c-9926-a5a2011a5da1, remote_addr: 127.0.0.1:63961, local_addr: 127.0.0.1:46213, task: repair
47026 Sep 22 23:15:14.105 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/029.copy" to "/tmp/downstairs-vrx8aK6L/00/000/029.replace"
47027 Sep 22 23:15:14.105 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47028 Sep 22 23:15:14.106 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/029.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47029 Sep 22 23:15:14.106 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/029"
47030 Sep 22 23:15:14.106 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/029.db"
47031 Sep 22 23:15:14.106 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47032 Sep 22 23:15:14.106 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/029.replace" to "/tmp/downstairs-vrx8aK6L/00/000/029.completed"
47033 Sep 22 23:15:14.106 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47034 Sep 22 23:15:14.106 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47035 Sep 22 23:15:14.106 DEBG [0] It's time to notify for 534
47036 Sep 22 23:15:14.107 INFO Completion from [0] id:534 status:true
47037 Sep 22 23:15:14.107 INFO [535/752] Repair commands completed
47038 Sep 22 23:15:14.107 INFO Pop front: ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }, state: ClientData([New, New, New]) }
47039 Sep 22 23:15:14.107 INFO Sent repair work, now wait for resp
47040 Sep 22 23:15:14.107 INFO [0] received reconcile message
47041 Sep 22 23:15:14.107 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }, state: ClientData([InProgress, New, New]) }, : downstairs
47042 Sep 22 23:15:14.107 INFO [0] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }
47043 Sep 22 23:15:14.107 INFO [1] received reconcile message
47044 Sep 22 23:15:14.107 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47045 Sep 22 23:15:14.107 INFO [1] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }
47046 Sep 22 23:15:14.107 INFO [2] received reconcile message
47047 Sep 22 23:15:14.107 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(535), op: ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47048 Sep 22 23:15:14.107 INFO [2] client ExtentReopen { repair_id: ReconciliationId(535), extent_id: 41 }
47049 Sep 22 23:15:14.107 DEBG 535 Reopen extent 41
47050 Sep 22 23:15:14.108 DEBG 535 Reopen extent 41
47051 Sep 22 23:15:14.108 DEBG 535 Reopen extent 41
47052 Sep 22 23:15:14.109 DEBG [2] It's time to notify for 535
47053 Sep 22 23:15:14.109 INFO Completion from [2] id:535 status:true
47054 Sep 22 23:15:14.109 INFO [536/752] Repair commands completed
47055 Sep 22 23:15:14.109 INFO Pop front: ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47056 Sep 22 23:15:14.109 INFO Sent repair work, now wait for resp
47057 Sep 22 23:15:14.109 INFO [0] received reconcile message
47058 Sep 22 23:15:14.109 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47059 Sep 22 23:15:14.109 INFO [0] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47060 Sep 22 23:15:14.109 INFO [1] received reconcile message
47061 Sep 22 23:15:14.109 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47062 Sep 22 23:15:14.109 INFO [1] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47063 Sep 22 23:15:14.109 INFO [2] received reconcile message
47064 Sep 22 23:15:14.109 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(536), op: ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47065 Sep 22 23:15:14.109 INFO [2] client ExtentFlush { repair_id: ReconciliationId(536), extent_id: 22, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47066 Sep 22 23:15:14.109 DEBG 536 Flush extent 22 with f:2 g:2
47067 Sep 22 23:15:14.109 DEBG Flush just extent 22 with f:2 and g:2
47068 Sep 22 23:15:14.109 DEBG [1] It's time to notify for 536
47069 Sep 22 23:15:14.110 INFO Completion from [1] id:536 status:true
47070 Sep 22 23:15:14.110 INFO [537/752] Repair commands completed
47071 Sep 22 23:15:14.110 INFO Pop front: ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }, state: ClientData([New, New, New]) }
47072 Sep 22 23:15:14.110 INFO Sent repair work, now wait for resp
47073 Sep 22 23:15:14.110 INFO [0] received reconcile message
47074 Sep 22 23:15:14.110 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }, state: ClientData([InProgress, New, New]) }, : downstairs
47075 Sep 22 23:15:14.110 INFO [0] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }
47076 Sep 22 23:15:14.110 INFO [1] received reconcile message
47077 Sep 22 23:15:14.110 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47078 Sep 22 23:15:14.110 INFO [1] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }
47079 Sep 22 23:15:14.110 INFO [2] received reconcile message
47080 Sep 22 23:15:14.110 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(537), op: ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47081 Sep 22 23:15:14.110 INFO [2] client ExtentClose { repair_id: ReconciliationId(537), extent_id: 22 }
47082 Sep 22 23:15:14.110 DEBG 537 Close extent 22
47083 Sep 22 23:15:14.110 DEBG 537 Close extent 22
47084 Sep 22 23:15:14.111 DEBG 537 Close extent 22
47085 Sep 22 23:15:14.111 DEBG [2] It's time to notify for 537
47086 Sep 22 23:15:14.111 INFO Completion from [2] id:537 status:true
47087 Sep 22 23:15:14.111 INFO [538/752] Repair commands completed
47088 Sep 22 23:15:14.111 INFO Pop front: ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47089 Sep 22 23:15:14.111 INFO Sent repair work, now wait for resp
47090 Sep 22 23:15:14.111 INFO [0] received reconcile message
47091 Sep 22 23:15:14.111 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47092 Sep 22 23:15:14.111 INFO [0] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47093 Sep 22 23:15:14.111 INFO [0] Sending repair request ReconciliationId(538)
47094 Sep 22 23:15:14.111 INFO [1] received reconcile message
47095 Sep 22 23:15:14.111 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47096 Sep 22 23:15:14.111 INFO [1] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47097 Sep 22 23:15:14.111 INFO [1] No action required ReconciliationId(538)
47098 Sep 22 23:15:14.111 INFO [2] received reconcile message
47099 Sep 22 23:15:14.111 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(538), op: ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47100 Sep 22 23:15:14.111 INFO [2] client ExtentRepair { repair_id: ReconciliationId(538), extent_id: 22, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47101 Sep 22 23:15:14.111 INFO [2] No action required ReconciliationId(538)
47102 Sep 22 23:15:14.111 DEBG 538 Repair extent 22 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47103 Sep 22 23:15:14.111 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/016.copy"
47104 Sep 22 23:15:14.120 INFO [lossy] skipping 1086
47105 Sep 22 23:15:14.126 DEBG Read :1086 deps:[JobId(1085)] res:true
47106 Sep 22 23:15:14.150 INFO [lossy] skipping 1087
47107 Sep 22 23:15:14.150 INFO [lossy] skipping 1088
47108 Sep 22 23:15:14.150 DEBG Flush :1087 extent_limit None deps:[JobId(1086), JobId(1085)] res:true f:33 g:1
47109 Sep 22 23:15:14.156 DEBG Read :1088 deps:[JobId(1087)] res:true
47110 Sep 22 23:15:14.176 INFO accepted connection, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47111 Sep 22 23:15:14.176 TRCE incoming request, uri: /extent/22/files, method: GET, req_id: 7abee1b1-15f9-410b-bbf9-acef2c8b6778, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47112 Sep 22 23:15:14.176 INFO request completed, latency_us: 277, response_code: 200, uri: /extent/22/files, method: GET, req_id: 7abee1b1-15f9-410b-bbf9-acef2c8b6778, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47113 Sep 22 23:15:14.177 INFO eid:22 Found repair files: ["016", "016.db"]
47114 Sep 22 23:15:14.177 TRCE incoming request, uri: /newextent/22/data, method: GET, req_id: a01f6768-a5ae-4063-af4d-f1fb5a44c48a, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47115 Sep 22 23:15:14.177 INFO request completed, latency_us: 379, response_code: 200, uri: /newextent/22/data, method: GET, req_id: a01f6768-a5ae-4063-af4d-f1fb5a44c48a, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47116 Sep 22 23:15:14.179 INFO [lossy] sleeping 1 second
47117 Sep 22 23:15:14.183 TRCE incoming request, uri: /newextent/22/db, method: GET, req_id: bb3d2cac-681c-4778-b707-7f86389e59bf, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47118 Sep 22 23:15:14.183 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/22/db, method: GET, req_id: bb3d2cac-681c-4778-b707-7f86389e59bf, remote_addr: 127.0.0.1:39939, local_addr: 127.0.0.1:46213, task: repair
47119 Sep 22 23:15:14.184 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/016.copy" to "/tmp/downstairs-vrx8aK6L/00/000/016.replace"
47120 Sep 22 23:15:14.184 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47121 Sep 22 23:15:14.185 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/016.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47122 Sep 22 23:15:14.185 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/016"
47123 Sep 22 23:15:14.186 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/016.db"
47124 Sep 22 23:15:14.186 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47125 Sep 22 23:15:14.186 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/016.replace" to "/tmp/downstairs-vrx8aK6L/00/000/016.completed"
47126 Sep 22 23:15:14.186 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47127 Sep 22 23:15:14.186 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47128 Sep 22 23:15:14.186 DEBG [0] It's time to notify for 538
47129 Sep 22 23:15:14.186 INFO Completion from [0] id:538 status:true
47130 Sep 22 23:15:14.186 INFO [539/752] Repair commands completed
47131 Sep 22 23:15:14.186 INFO Pop front: ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }, state: ClientData([New, New, New]) }
47132 Sep 22 23:15:14.186 INFO Sent repair work, now wait for resp
47133 Sep 22 23:15:14.186 INFO [0] received reconcile message
47134 Sep 22 23:15:14.186 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }, state: ClientData([InProgress, New, New]) }, : downstairs
47135 Sep 22 23:15:14.186 INFO [0] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }
47136 Sep 22 23:15:14.186 INFO [1] received reconcile message
47137 Sep 22 23:15:14.186 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47138 Sep 22 23:15:14.186 INFO [1] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }
47139 Sep 22 23:15:14.186 INFO [2] received reconcile message
47140 Sep 22 23:15:14.186 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(539), op: ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47141 Sep 22 23:15:14.186 INFO [2] client ExtentReopen { repair_id: ReconciliationId(539), extent_id: 22 }
47142 Sep 22 23:15:14.186 DEBG 539 Reopen extent 22
47143 Sep 22 23:15:14.187 DEBG 539 Reopen extent 22
47144 Sep 22 23:15:14.188 DEBG 539 Reopen extent 22
47145 Sep 22 23:15:14.188 DEBG [2] It's time to notify for 539
47146 Sep 22 23:15:14.188 INFO Completion from [2] id:539 status:true
47147 Sep 22 23:15:14.188 INFO [540/752] Repair commands completed
47148 Sep 22 23:15:14.188 INFO Pop front: ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47149 Sep 22 23:15:14.189 INFO Sent repair work, now wait for resp
47150 Sep 22 23:15:14.189 INFO [0] received reconcile message
47151 Sep 22 23:15:14.189 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47152 Sep 22 23:15:14.189 INFO [0] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47153 Sep 22 23:15:14.189 INFO [1] received reconcile message
47154 Sep 22 23:15:14.189 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47155 Sep 22 23:15:14.189 INFO [1] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47156 Sep 22 23:15:14.189 INFO [2] received reconcile message
47157 Sep 22 23:15:14.189 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(540), op: ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47158 Sep 22 23:15:14.189 INFO [2] client ExtentFlush { repair_id: ReconciliationId(540), extent_id: 56, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47159 Sep 22 23:15:14.189 DEBG 540 Flush extent 56 with f:2 g:2
47160 Sep 22 23:15:14.189 DEBG Flush just extent 56 with f:2 and g:2
47161 Sep 22 23:15:14.189 DEBG [1] It's time to notify for 540
47162 Sep 22 23:15:14.189 INFO Completion from [1] id:540 status:true
47163 Sep 22 23:15:14.189 INFO [541/752] Repair commands completed
47164 Sep 22 23:15:14.189 INFO Pop front: ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }, state: ClientData([New, New, New]) }
47165 Sep 22 23:15:14.189 INFO Sent repair work, now wait for resp
47166 Sep 22 23:15:14.189 INFO [0] received reconcile message
47167 Sep 22 23:15:14.189 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }, state: ClientData([InProgress, New, New]) }, : downstairs
47168 Sep 22 23:15:14.189 INFO [0] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }
47169 Sep 22 23:15:14.189 INFO [1] received reconcile message
47170 Sep 22 23:15:14.189 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47171 Sep 22 23:15:14.189 INFO [1] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }
47172 Sep 22 23:15:14.189 INFO [2] received reconcile message
47173 Sep 22 23:15:14.189 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(541), op: ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47174 Sep 22 23:15:14.189 INFO [2] client ExtentClose { repair_id: ReconciliationId(541), extent_id: 56 }
47175 Sep 22 23:15:14.189 DEBG 541 Close extent 56
47176 Sep 22 23:15:14.190 DEBG 541 Close extent 56
47177 Sep 22 23:15:14.190 DEBG 541 Close extent 56
47178 Sep 22 23:15:14.190 DEBG [2] It's time to notify for 541
47179 Sep 22 23:15:14.190 INFO Completion from [2] id:541 status:true
47180 Sep 22 23:15:14.190 INFO [542/752] Repair commands completed
47181 Sep 22 23:15:14.190 INFO Pop front: ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47182 Sep 22 23:15:14.191 INFO Sent repair work, now wait for resp
47183 Sep 22 23:15:14.191 INFO [0] received reconcile message
47184 Sep 22 23:15:14.191 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47185 Sep 22 23:15:14.191 INFO [0] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47186 Sep 22 23:15:14.191 INFO [0] Sending repair request ReconciliationId(542)
47187 Sep 22 23:15:14.191 INFO [1] received reconcile message
47188 Sep 22 23:15:14.191 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47189 Sep 22 23:15:14.191 INFO [1] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47190 Sep 22 23:15:14.191 INFO [1] No action required ReconciliationId(542)
47191 Sep 22 23:15:14.191 INFO [2] received reconcile message
47192 Sep 22 23:15:14.191 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(542), op: ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47193 Sep 22 23:15:14.191 INFO [2] client ExtentRepair { repair_id: ReconciliationId(542), extent_id: 56, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47194 Sep 22 23:15:14.191 INFO [2] No action required ReconciliationId(542)
47195 Sep 22 23:15:14.191 DEBG 542 Repair extent 56 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47196 Sep 22 23:15:14.191 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/038.copy"
47197 Sep 22 23:15:14.254 INFO accepted connection, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47198 Sep 22 23:15:14.254 TRCE incoming request, uri: /extent/56/files, method: GET, req_id: 8b7ef680-3225-4809-af9a-8f716014fdff, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47199 Sep 22 23:15:14.254 INFO request completed, latency_us: 214, response_code: 200, uri: /extent/56/files, method: GET, req_id: 8b7ef680-3225-4809-af9a-8f716014fdff, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47200 Sep 22 23:15:14.254 INFO eid:56 Found repair files: ["038", "038.db"]
47201 Sep 22 23:15:14.255 TRCE incoming request, uri: /newextent/56/data, method: GET, req_id: 0079d7d8-d962-42b5-854a-6f4b6300c886, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47202 Sep 22 23:15:14.255 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/56/data, method: GET, req_id: 0079d7d8-d962-42b5-854a-6f4b6300c886, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47203 Sep 22 23:15:14.260 TRCE incoming request, uri: /newextent/56/db, method: GET, req_id: 49725e55-88b2-4383-a07f-0c525114522b, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47204 Sep 22 23:15:14.260 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/56/db, method: GET, req_id: 49725e55-88b2-4383-a07f-0c525114522b, remote_addr: 127.0.0.1:58337, local_addr: 127.0.0.1:46213, task: repair
47205 Sep 22 23:15:14.262 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/038.copy" to "/tmp/downstairs-vrx8aK6L/00/000/038.replace"
47206 Sep 22 23:15:14.262 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47207 Sep 22 23:15:14.262 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/038.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47208 Sep 22 23:15:14.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/038"
47209 Sep 22 23:15:14.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/038.db"
47210 Sep 22 23:15:14.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47211 Sep 22 23:15:14.263 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/038.replace" to "/tmp/downstairs-vrx8aK6L/00/000/038.completed"
47212 Sep 22 23:15:14.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47213 Sep 22 23:15:14.263 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47214 Sep 22 23:15:14.263 DEBG [0] It's time to notify for 542
47215 Sep 22 23:15:14.263 INFO Completion from [0] id:542 status:true
47216 Sep 22 23:15:14.263 INFO [543/752] Repair commands completed
47217 Sep 22 23:15:14.263 INFO Pop front: ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }, state: ClientData([New, New, New]) }
47218 Sep 22 23:15:14.263 INFO Sent repair work, now wait for resp
47219 Sep 22 23:15:14.263 INFO [0] received reconcile message
47220 Sep 22 23:15:14.263 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }, state: ClientData([InProgress, New, New]) }, : downstairs
47221 Sep 22 23:15:14.263 INFO [0] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }
47222 Sep 22 23:15:14.263 INFO [1] received reconcile message
47223 Sep 22 23:15:14.263 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47224 Sep 22 23:15:14.263 INFO [1] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }
47225 Sep 22 23:15:14.264 INFO [2] received reconcile message
47226 Sep 22 23:15:14.264 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(543), op: ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47227 Sep 22 23:15:14.264 INFO [2] client ExtentReopen { repair_id: ReconciliationId(543), extent_id: 56 }
47228 Sep 22 23:15:14.264 DEBG 543 Reopen extent 56
47229 Sep 22 23:15:14.264 DEBG 543 Reopen extent 56
47230 Sep 22 23:15:14.265 DEBG 543 Reopen extent 56
47231 Sep 22 23:15:14.265 DEBG [2] It's time to notify for 543
47232 Sep 22 23:15:14.265 INFO Completion from [2] id:543 status:true
47233 Sep 22 23:15:14.265 INFO [544/752] Repair commands completed
47234 Sep 22 23:15:14.265 INFO Pop front: ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47235 Sep 22 23:15:14.265 INFO Sent repair work, now wait for resp
47236 Sep 22 23:15:14.265 INFO [0] received reconcile message
47237 Sep 22 23:15:14.266 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47238 Sep 22 23:15:14.266 INFO [0] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47239 Sep 22 23:15:14.266 INFO [1] received reconcile message
47240 Sep 22 23:15:14.266 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47241 Sep 22 23:15:14.266 INFO [1] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47242 Sep 22 23:15:14.266 INFO [2] received reconcile message
47243 Sep 22 23:15:14.266 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(544), op: ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47244 Sep 22 23:15:14.266 INFO [2] client ExtentFlush { repair_id: ReconciliationId(544), extent_id: 23, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47245 Sep 22 23:15:14.266 DEBG 544 Flush extent 23 with f:2 g:2
47246 Sep 22 23:15:14.266 DEBG Flush just extent 23 with f:2 and g:2
47247 Sep 22 23:15:14.266 DEBG [1] It's time to notify for 544
47248 Sep 22 23:15:14.266 INFO Completion from [1] id:544 status:true
47249 Sep 22 23:15:14.266 INFO [545/752] Repair commands completed
47250 Sep 22 23:15:14.266 INFO Pop front: ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }, state: ClientData([New, New, New]) }
47251 Sep 22 23:15:14.266 INFO Sent repair work, now wait for resp
47252 Sep 22 23:15:14.266 INFO [0] received reconcile message
47253 Sep 22 23:15:14.266 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }, state: ClientData([InProgress, New, New]) }, : downstairs
47254 Sep 22 23:15:14.266 INFO [0] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }
47255 Sep 22 23:15:14.266 INFO [1] received reconcile message
47256 Sep 22 23:15:14.266 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47257 Sep 22 23:15:14.266 INFO [1] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }
47258 Sep 22 23:15:14.266 INFO [2] received reconcile message
47259 Sep 22 23:15:14.266 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(545), op: ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47260 Sep 22 23:15:14.266 INFO [2] client ExtentClose { repair_id: ReconciliationId(545), extent_id: 23 }
47261 Sep 22 23:15:14.266 DEBG 545 Close extent 23
47262 Sep 22 23:15:14.267 DEBG 545 Close extent 23
47263 Sep 22 23:15:14.267 DEBG 545 Close extent 23
47264 Sep 22 23:15:14.267 DEBG [2] It's time to notify for 545
47265 Sep 22 23:15:14.267 INFO Completion from [2] id:545 status:true
47266 Sep 22 23:15:14.267 INFO [546/752] Repair commands completed
47267 Sep 22 23:15:14.267 INFO Pop front: ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47268 Sep 22 23:15:14.267 INFO Sent repair work, now wait for resp
47269 Sep 22 23:15:14.267 INFO [0] received reconcile message
47270 Sep 22 23:15:14.267 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47271 Sep 22 23:15:14.268 INFO [0] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47272 Sep 22 23:15:14.268 INFO [0] Sending repair request ReconciliationId(546)
47273 Sep 22 23:15:14.268 INFO [1] received reconcile message
47274 Sep 22 23:15:14.268 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47275 Sep 22 23:15:14.268 INFO [1] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47276 Sep 22 23:15:14.268 INFO [1] No action required ReconciliationId(546)
47277 Sep 22 23:15:14.268 INFO [2] received reconcile message
47278 Sep 22 23:15:14.268 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(546), op: ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47279 Sep 22 23:15:14.268 INFO [2] client ExtentRepair { repair_id: ReconciliationId(546), extent_id: 23, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47280 Sep 22 23:15:14.268 INFO [2] No action required ReconciliationId(546)
47281 Sep 22 23:15:14.268 DEBG 546 Repair extent 23 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47282 Sep 22 23:15:14.268 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/017.copy"
47283 Sep 22 23:15:14.329 INFO accepted connection, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47284 Sep 22 23:15:14.329 TRCE incoming request, uri: /extent/23/files, method: GET, req_id: f0bfd5b6-e5f3-41ae-bccc-999bf35a13c2, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47285 Sep 22 23:15:14.329 INFO request completed, latency_us: 186, response_code: 200, uri: /extent/23/files, method: GET, req_id: f0bfd5b6-e5f3-41ae-bccc-999bf35a13c2, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47286 Sep 22 23:15:14.329 INFO eid:23 Found repair files: ["017", "017.db"]
47287 Sep 22 23:15:14.330 TRCE incoming request, uri: /newextent/23/data, method: GET, req_id: 46bb07aa-63e7-427c-bd77-50e2c2f01fdd, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47288 Sep 22 23:15:14.330 INFO request completed, latency_us: 328, response_code: 200, uri: /newextent/23/data, method: GET, req_id: 46bb07aa-63e7-427c-bd77-50e2c2f01fdd, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47289 Sep 22 23:15:14.335 TRCE incoming request, uri: /newextent/23/db, method: GET, req_id: 13d70a99-a549-478b-a2e0-fd4d4afabfaa, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47290 Sep 22 23:15:14.336 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/23/db, method: GET, req_id: 13d70a99-a549-478b-a2e0-fd4d4afabfaa, remote_addr: 127.0.0.1:61275, local_addr: 127.0.0.1:46213, task: repair
47291 Sep 22 23:15:14.337 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/017.copy" to "/tmp/downstairs-vrx8aK6L/00/000/017.replace"
47292 Sep 22 23:15:14.337 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47293 Sep 22 23:15:14.338 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/017.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47294 Sep 22 23:15:14.338 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/017"
47295 Sep 22 23:15:14.338 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/017.db"
47296 Sep 22 23:15:14.338 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47297 Sep 22 23:15:14.338 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/017.replace" to "/tmp/downstairs-vrx8aK6L/00/000/017.completed"
47298 Sep 22 23:15:14.338 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47299 Sep 22 23:15:14.338 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47300 Sep 22 23:15:14.338 DEBG [0] It's time to notify for 546
47301 Sep 22 23:15:14.338 INFO Completion from [0] id:546 status:true
47302 Sep 22 23:15:14.338 INFO [547/752] Repair commands completed
47303 Sep 22 23:15:14.339 INFO Pop front: ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }, state: ClientData([New, New, New]) }
47304 Sep 22 23:15:14.339 INFO Sent repair work, now wait for resp
47305 Sep 22 23:15:14.339 INFO [0] received reconcile message
47306 Sep 22 23:15:14.339 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }, state: ClientData([InProgress, New, New]) }, : downstairs
47307 Sep 22 23:15:14.339 INFO [0] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }
47308 Sep 22 23:15:14.339 INFO [1] received reconcile message
47309 Sep 22 23:15:14.339 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47310 Sep 22 23:15:14.339 INFO [1] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }
47311 Sep 22 23:15:14.339 INFO [2] received reconcile message
47312 Sep 22 23:15:14.339 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(547), op: ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47313 Sep 22 23:15:14.339 INFO [2] client ExtentReopen { repair_id: ReconciliationId(547), extent_id: 23 }
47314 Sep 22 23:15:14.339 DEBG 547 Reopen extent 23
47315 Sep 22 23:15:14.339 DEBG 547 Reopen extent 23
47316 Sep 22 23:15:14.340 DEBG 547 Reopen extent 23
47317 Sep 22 23:15:14.341 DEBG [2] It's time to notify for 547
47318 Sep 22 23:15:14.341 INFO Completion from [2] id:547 status:true
47319 Sep 22 23:15:14.341 INFO [548/752] Repair commands completed
47320 Sep 22 23:15:14.341 INFO Pop front: ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47321 Sep 22 23:15:14.341 INFO Sent repair work, now wait for resp
47322 Sep 22 23:15:14.341 INFO [0] received reconcile message
47323 Sep 22 23:15:14.341 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47324 Sep 22 23:15:14.341 INFO [0] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47325 Sep 22 23:15:14.341 INFO [1] received reconcile message
47326 Sep 22 23:15:14.341 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47327 Sep 22 23:15:14.341 INFO [1] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47328 Sep 22 23:15:14.341 INFO [2] received reconcile message
47329 Sep 22 23:15:14.341 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(548), op: ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47330 Sep 22 23:15:14.341 INFO [2] client ExtentFlush { repair_id: ReconciliationId(548), extent_id: 10, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47331 Sep 22 23:15:14.341 DEBG 548 Flush extent 10 with f:2 g:2
47332 Sep 22 23:15:14.341 DEBG Flush just extent 10 with f:2 and g:2
47333 Sep 22 23:15:14.341 DEBG [1] It's time to notify for 548
47334 Sep 22 23:15:14.341 INFO Completion from [1] id:548 status:true
47335 Sep 22 23:15:14.341 INFO [549/752] Repair commands completed
47336 Sep 22 23:15:14.341 INFO Pop front: ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }, state: ClientData([New, New, New]) }
47337 Sep 22 23:15:14.341 INFO Sent repair work, now wait for resp
47338 Sep 22 23:15:14.341 INFO [0] received reconcile message
47339 Sep 22 23:15:14.341 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }, state: ClientData([InProgress, New, New]) }, : downstairs
47340 Sep 22 23:15:14.341 INFO [0] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }
47341 Sep 22 23:15:14.341 INFO [1] received reconcile message
47342 Sep 22 23:15:14.341 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47343 Sep 22 23:15:14.341 INFO [1] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }
47344 Sep 22 23:15:14.341 INFO [2] received reconcile message
47345 Sep 22 23:15:14.341 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(549), op: ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47346 Sep 22 23:15:14.341 INFO [2] client ExtentClose { repair_id: ReconciliationId(549), extent_id: 10 }
47347 Sep 22 23:15:14.342 DEBG 549 Close extent 10
47348 Sep 22 23:15:14.342 DEBG 549 Close extent 10
47349 Sep 22 23:15:14.342 DEBG 549 Close extent 10
47350 Sep 22 23:15:14.343 DEBG [2] It's time to notify for 549
47351 Sep 22 23:15:14.343 INFO Completion from [2] id:549 status:true
47352 Sep 22 23:15:14.343 INFO [550/752] Repair commands completed
47353 Sep 22 23:15:14.343 INFO Pop front: ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47354 Sep 22 23:15:14.343 INFO Sent repair work, now wait for resp
47355 Sep 22 23:15:14.343 INFO [0] received reconcile message
47356 Sep 22 23:15:14.343 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47357 Sep 22 23:15:14.343 INFO [0] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47358 Sep 22 23:15:14.343 INFO [0] Sending repair request ReconciliationId(550)
47359 Sep 22 23:15:14.343 INFO [1] received reconcile message
47360 Sep 22 23:15:14.343 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47361 Sep 22 23:15:14.343 INFO [1] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47362 Sep 22 23:15:14.343 INFO [1] No action required ReconciliationId(550)
47363 Sep 22 23:15:14.343 INFO [2] received reconcile message
47364 Sep 22 23:15:14.343 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(550), op: ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47365 Sep 22 23:15:14.343 INFO [2] client ExtentRepair { repair_id: ReconciliationId(550), extent_id: 10, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47366 Sep 22 23:15:14.343 INFO [2] No action required ReconciliationId(550)
47367 Sep 22 23:15:14.343 DEBG 550 Repair extent 10 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47368 Sep 22 23:15:14.343 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00A.copy"
47369 Sep 22 23:15:14.407 INFO accepted connection, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47370 Sep 22 23:15:14.407 TRCE incoming request, uri: /extent/10/files, method: GET, req_id: aae45806-4bc9-48f8-a2c1-e2cf3fd80b16, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47371 Sep 22 23:15:14.408 INFO request completed, latency_us: 199, response_code: 200, uri: /extent/10/files, method: GET, req_id: aae45806-4bc9-48f8-a2c1-e2cf3fd80b16, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47372 Sep 22 23:15:14.408 INFO eid:10 Found repair files: ["00A", "00A.db"]
47373 Sep 22 23:15:14.408 TRCE incoming request, uri: /newextent/10/data, method: GET, req_id: c6d77998-d667-457b-8ece-8cfe4da06af2, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47374 Sep 22 23:15:14.408 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/10/data, method: GET, req_id: c6d77998-d667-457b-8ece-8cfe4da06af2, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47375 Sep 22 23:15:14.414 TRCE incoming request, uri: /newextent/10/db, method: GET, req_id: 0343c35f-053e-419b-8136-54c5d6584e17, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47376 Sep 22 23:15:14.414 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/10/db, method: GET, req_id: 0343c35f-053e-419b-8136-54c5d6584e17, remote_addr: 127.0.0.1:42941, local_addr: 127.0.0.1:46213, task: repair
47377 Sep 22 23:15:14.415 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00A.replace"
47378 Sep 22 23:15:14.415 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47379 Sep 22 23:15:14.416 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47380 Sep 22 23:15:14.416 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00A"
47381 Sep 22 23:15:14.416 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00A.db"
47382 Sep 22 23:15:14.416 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47383 Sep 22 23:15:14.416 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00A.completed"
47384 Sep 22 23:15:14.416 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47385 Sep 22 23:15:14.416 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47386 Sep 22 23:15:14.417 DEBG [0] It's time to notify for 550
47387 Sep 22 23:15:14.417 INFO Completion from [0] id:550 status:true
47388 Sep 22 23:15:14.417 INFO [551/752] Repair commands completed
47389 Sep 22 23:15:14.417 INFO Pop front: ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }, state: ClientData([New, New, New]) }
47390 Sep 22 23:15:14.417 INFO Sent repair work, now wait for resp
47391 Sep 22 23:15:14.417 INFO [0] received reconcile message
47392 Sep 22 23:15:14.417 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }, state: ClientData([InProgress, New, New]) }, : downstairs
47393 Sep 22 23:15:14.417 INFO [0] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }
47394 Sep 22 23:15:14.417 INFO [1] received reconcile message
47395 Sep 22 23:15:14.417 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47396 Sep 22 23:15:14.417 INFO [1] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }
47397 Sep 22 23:15:14.417 INFO [2] received reconcile message
47398 Sep 22 23:15:14.417 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(551), op: ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47399 Sep 22 23:15:14.417 INFO [2] client ExtentReopen { repair_id: ReconciliationId(551), extent_id: 10 }
47400 Sep 22 23:15:14.417 DEBG 551 Reopen extent 10
47401 Sep 22 23:15:14.418 DEBG 551 Reopen extent 10
47402 Sep 22 23:15:14.418 DEBG 551 Reopen extent 10
47403 Sep 22 23:15:14.419 DEBG [2] It's time to notify for 551
47404 Sep 22 23:15:14.419 INFO Completion from [2] id:551 status:true
47405 Sep 22 23:15:14.419 INFO [552/752] Repair commands completed
47406 Sep 22 23:15:14.419 INFO Pop front: ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47407 Sep 22 23:15:14.419 INFO Sent repair work, now wait for resp
47408 Sep 22 23:15:14.419 INFO [0] received reconcile message
47409 Sep 22 23:15:14.419 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47410 Sep 22 23:15:14.419 INFO [0] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47411 Sep 22 23:15:14.419 INFO [1] received reconcile message
47412 Sep 22 23:15:14.419 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47413 Sep 22 23:15:14.419 INFO [1] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47414 Sep 22 23:15:14.419 INFO [2] received reconcile message
47415 Sep 22 23:15:14.419 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(552), op: ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47416 Sep 22 23:15:14.419 INFO [2] client ExtentFlush { repair_id: ReconciliationId(552), extent_id: 44, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47417 Sep 22 23:15:14.419 DEBG 552 Flush extent 44 with f:2 g:2
47418 Sep 22 23:15:14.419 DEBG Flush just extent 44 with f:2 and g:2
47419 Sep 22 23:15:14.419 DEBG [1] It's time to notify for 552
47420 Sep 22 23:15:14.420 INFO Completion from [1] id:552 status:true
47421 Sep 22 23:15:14.420 INFO [553/752] Repair commands completed
47422 Sep 22 23:15:14.420 INFO Pop front: ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }, state: ClientData([New, New, New]) }
47423 Sep 22 23:15:14.420 INFO Sent repair work, now wait for resp
47424 Sep 22 23:15:14.420 INFO [0] received reconcile message
47425 Sep 22 23:15:14.420 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }, state: ClientData([InProgress, New, New]) }, : downstairs
47426 Sep 22 23:15:14.420 INFO [0] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }
47427 Sep 22 23:15:14.420 INFO [1] received reconcile message
47428 Sep 22 23:15:14.420 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47429 Sep 22 23:15:14.420 INFO [1] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }
47430 Sep 22 23:15:14.420 INFO [2] received reconcile message
47431 Sep 22 23:15:14.420 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(553), op: ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47432 Sep 22 23:15:14.420 INFO [2] client ExtentClose { repair_id: ReconciliationId(553), extent_id: 44 }
47433 Sep 22 23:15:14.420 DEBG 553 Close extent 44
47434 Sep 22 23:15:14.420 DEBG 553 Close extent 44
47435 Sep 22 23:15:14.420 DEBG 553 Close extent 44
47436 Sep 22 23:15:14.421 DEBG [2] It's time to notify for 553
47437 Sep 22 23:15:14.421 INFO Completion from [2] id:553 status:true
47438 Sep 22 23:15:14.421 INFO [554/752] Repair commands completed
47439 Sep 22 23:15:14.421 INFO Pop front: ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47440 Sep 22 23:15:14.421 INFO Sent repair work, now wait for resp
47441 Sep 22 23:15:14.421 INFO [0] received reconcile message
47442 Sep 22 23:15:14.421 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47443 Sep 22 23:15:14.421 INFO [0] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47444 Sep 22 23:15:14.421 INFO [0] Sending repair request ReconciliationId(554)
47445 Sep 22 23:15:14.421 INFO [1] received reconcile message
47446 Sep 22 23:15:14.421 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47447 Sep 22 23:15:14.421 INFO [1] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47448 Sep 22 23:15:14.421 INFO [1] No action required ReconciliationId(554)
47449 Sep 22 23:15:14.421 INFO [2] received reconcile message
47450 Sep 22 23:15:14.421 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(554), op: ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47451 Sep 22 23:15:14.421 INFO [2] client ExtentRepair { repair_id: ReconciliationId(554), extent_id: 44, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47452 Sep 22 23:15:14.421 INFO [2] No action required ReconciliationId(554)
47453 Sep 22 23:15:14.421 DEBG 554 Repair extent 44 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47454 Sep 22 23:15:14.421 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/02C.copy"
47455 Sep 22 23:15:14.484 INFO accepted connection, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47456 Sep 22 23:15:14.484 TRCE incoming request, uri: /extent/44/files, method: GET, req_id: 16e33043-1d06-4ad0-9699-84e5d47ec2e7, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47457 Sep 22 23:15:14.484 INFO request completed, latency_us: 206, response_code: 200, uri: /extent/44/files, method: GET, req_id: 16e33043-1d06-4ad0-9699-84e5d47ec2e7, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47458 Sep 22 23:15:14.484 INFO eid:44 Found repair files: ["02C", "02C.db"]
47459 Sep 22 23:15:14.485 TRCE incoming request, uri: /newextent/44/data, method: GET, req_id: 45eee28d-e6ff-4d69-bde1-3c093aced218, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47460 Sep 22 23:15:14.485 INFO request completed, latency_us: 256, response_code: 200, uri: /newextent/44/data, method: GET, req_id: 45eee28d-e6ff-4d69-bde1-3c093aced218, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47461 Sep 22 23:15:14.490 TRCE incoming request, uri: /newextent/44/db, method: GET, req_id: b5a3db77-8da9-46e7-ad80-24a3699d85cc, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47462 Sep 22 23:15:14.490 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/44/db, method: GET, req_id: b5a3db77-8da9-46e7-ad80-24a3699d85cc, remote_addr: 127.0.0.1:50605, local_addr: 127.0.0.1:46213, task: repair
47463 Sep 22 23:15:14.491 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/02C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/02C.replace"
47464 Sep 22 23:15:14.491 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47465 Sep 22 23:15:14.492 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/02C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47466 Sep 22 23:15:14.492 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02C"
47467 Sep 22 23:15:14.492 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/02C.db"
47468 Sep 22 23:15:14.492 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47469 Sep 22 23:15:14.492 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/02C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/02C.completed"
47470 Sep 22 23:15:14.492 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47471 Sep 22 23:15:14.493 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47472 Sep 22 23:15:14.493 DEBG [0] It's time to notify for 554
47473 Sep 22 23:15:14.493 INFO Completion from [0] id:554 status:true
47474 Sep 22 23:15:14.493 INFO [555/752] Repair commands completed
47475 Sep 22 23:15:14.493 INFO Pop front: ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }, state: ClientData([New, New, New]) }
47476 Sep 22 23:15:14.493 INFO Sent repair work, now wait for resp
47477 Sep 22 23:15:14.493 INFO [0] received reconcile message
47478 Sep 22 23:15:14.493 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }, state: ClientData([InProgress, New, New]) }, : downstairs
47479 Sep 22 23:15:14.493 INFO [0] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }
47480 Sep 22 23:15:14.493 INFO [1] received reconcile message
47481 Sep 22 23:15:14.493 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47482 Sep 22 23:15:14.493 INFO [1] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }
47483 Sep 22 23:15:14.493 INFO [2] received reconcile message
47484 Sep 22 23:15:14.493 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(555), op: ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47485 Sep 22 23:15:14.493 INFO [2] client ExtentReopen { repair_id: ReconciliationId(555), extent_id: 44 }
47486 Sep 22 23:15:14.493 DEBG 555 Reopen extent 44
47487 Sep 22 23:15:14.494 DEBG 555 Reopen extent 44
47488 Sep 22 23:15:14.494 DEBG 555 Reopen extent 44
47489 Sep 22 23:15:14.495 DEBG [2] It's time to notify for 555
47490 Sep 22 23:15:14.495 INFO Completion from [2] id:555 status:true
47491 Sep 22 23:15:14.495 INFO [556/752] Repair commands completed
47492 Sep 22 23:15:14.495 INFO Pop front: ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47493 Sep 22 23:15:14.495 INFO Sent repair work, now wait for resp
47494 Sep 22 23:15:14.495 INFO [0] received reconcile message
47495 Sep 22 23:15:14.495 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47496 Sep 22 23:15:14.495 INFO [0] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47497 Sep 22 23:15:14.495 INFO [1] received reconcile message
47498 Sep 22 23:15:14.495 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47499 Sep 22 23:15:14.495 INFO [1] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47500 Sep 22 23:15:14.495 INFO [2] received reconcile message
47501 Sep 22 23:15:14.495 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(556), op: ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47502 Sep 22 23:15:14.495 INFO [2] client ExtentFlush { repair_id: ReconciliationId(556), extent_id: 59, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47503 Sep 22 23:15:14.495 DEBG 556 Flush extent 59 with f:2 g:2
47504 Sep 22 23:15:14.495 DEBG Flush just extent 59 with f:2 and g:2
47505 Sep 22 23:15:14.496 DEBG [1] It's time to notify for 556
47506 Sep 22 23:15:14.496 INFO Completion from [1] id:556 status:true
47507 Sep 22 23:15:14.496 INFO [557/752] Repair commands completed
47508 Sep 22 23:15:14.496 INFO Pop front: ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }, state: ClientData([New, New, New]) }
47509 Sep 22 23:15:14.496 INFO Sent repair work, now wait for resp
47510 Sep 22 23:15:14.496 INFO [0] received reconcile message
47511 Sep 22 23:15:14.496 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }, state: ClientData([InProgress, New, New]) }, : downstairs
47512 Sep 22 23:15:14.496 INFO [0] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }
47513 Sep 22 23:15:14.496 INFO [1] received reconcile message
47514 Sep 22 23:15:14.496 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47515 Sep 22 23:15:14.496 INFO [1] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }
47516 Sep 22 23:15:14.496 INFO [2] received reconcile message
47517 Sep 22 23:15:14.496 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(557), op: ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47518 Sep 22 23:15:14.496 INFO [2] client ExtentClose { repair_id: ReconciliationId(557), extent_id: 59 }
47519 Sep 22 23:15:14.496 DEBG 557 Close extent 59
47520 Sep 22 23:15:14.496 DEBG 557 Close extent 59
47521 Sep 22 23:15:14.497 DEBG 557 Close extent 59
47522 Sep 22 23:15:14.497 DEBG [2] It's time to notify for 557
47523 Sep 22 23:15:14.497 INFO Completion from [2] id:557 status:true
47524 Sep 22 23:15:14.497 INFO [558/752] Repair commands completed
47525 Sep 22 23:15:14.497 INFO Pop front: ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47526 Sep 22 23:15:14.497 INFO Sent repair work, now wait for resp
47527 Sep 22 23:15:14.497 INFO [0] received reconcile message
47528 Sep 22 23:15:14.497 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47529 Sep 22 23:15:14.497 INFO [0] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47530 Sep 22 23:15:14.497 INFO [0] Sending repair request ReconciliationId(558)
47531 Sep 22 23:15:14.497 INFO [1] received reconcile message
47532 Sep 22 23:15:14.497 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47533 Sep 22 23:15:14.497 INFO [1] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47534 Sep 22 23:15:14.497 INFO [1] No action required ReconciliationId(558)
47535 Sep 22 23:15:14.497 INFO [2] received reconcile message
47536 Sep 22 23:15:14.497 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(558), op: ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47537 Sep 22 23:15:14.497 INFO [2] client ExtentRepair { repair_id: ReconciliationId(558), extent_id: 59, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47538 Sep 22 23:15:14.497 INFO [2] No action required ReconciliationId(558)
47539 Sep 22 23:15:14.497 DEBG 558 Repair extent 59 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47540 Sep 22 23:15:14.498 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03B.copy"
47541 Sep 22 23:15:14.562 INFO accepted connection, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47542 Sep 22 23:15:14.563 TRCE incoming request, uri: /extent/59/files, method: GET, req_id: 397eae9c-467b-46b2-9403-6cd8e1698252, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47543 Sep 22 23:15:14.563 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/59/files, method: GET, req_id: 397eae9c-467b-46b2-9403-6cd8e1698252, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47544 Sep 22 23:15:14.563 INFO eid:59 Found repair files: ["03B", "03B.db"]
47545 Sep 22 23:15:14.563 TRCE incoming request, uri: /newextent/59/data, method: GET, req_id: ed35b770-5183-4d65-a620-0cb4724c0d49, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47546 Sep 22 23:15:14.564 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/59/data, method: GET, req_id: ed35b770-5183-4d65-a620-0cb4724c0d49, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47547 Sep 22 23:15:14.569 TRCE incoming request, uri: /newextent/59/db, method: GET, req_id: e3714a00-51f3-4de4-9bbf-9b4fa3f91c63, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47548 Sep 22 23:15:14.569 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/59/db, method: GET, req_id: e3714a00-51f3-4de4-9bbf-9b4fa3f91c63, remote_addr: 127.0.0.1:60441, local_addr: 127.0.0.1:46213, task: repair
47549 Sep 22 23:15:14.570 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03B.replace"
47550 Sep 22 23:15:14.570 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47551 Sep 22 23:15:14.571 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47552 Sep 22 23:15:14.571 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03B"
47553 Sep 22 23:15:14.571 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03B.db"
47554 Sep 22 23:15:14.571 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47555 Sep 22 23:15:14.571 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03B.completed"
47556 Sep 22 23:15:14.571 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47557 Sep 22 23:15:14.572 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47558 Sep 22 23:15:14.572 DEBG [0] It's time to notify for 558
47559 Sep 22 23:15:14.572 INFO Completion from [0] id:558 status:true
47560 Sep 22 23:15:14.572 INFO [559/752] Repair commands completed
47561 Sep 22 23:15:14.572 INFO Pop front: ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }, state: ClientData([New, New, New]) }
47562 Sep 22 23:15:14.572 INFO Sent repair work, now wait for resp
47563 Sep 22 23:15:14.572 INFO [0] received reconcile message
47564 Sep 22 23:15:14.572 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }, state: ClientData([InProgress, New, New]) }, : downstairs
47565 Sep 22 23:15:14.572 INFO [0] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }
47566 Sep 22 23:15:14.572 INFO [1] received reconcile message
47567 Sep 22 23:15:14.572 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47568 Sep 22 23:15:14.572 INFO [1] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }
47569 Sep 22 23:15:14.572 INFO [2] received reconcile message
47570 Sep 22 23:15:14.572 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(559), op: ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47571 Sep 22 23:15:14.572 INFO [2] client ExtentReopen { repair_id: ReconciliationId(559), extent_id: 59 }
47572 Sep 22 23:15:14.572 DEBG 559 Reopen extent 59
47573 Sep 22 23:15:14.573 DEBG 559 Reopen extent 59
47574 Sep 22 23:15:14.573 DEBG 559 Reopen extent 59
47575 Sep 22 23:15:14.574 DEBG [2] It's time to notify for 559
47576 Sep 22 23:15:14.574 INFO Completion from [2] id:559 status:true
47577 Sep 22 23:15:14.574 INFO [560/752] Repair commands completed
47578 Sep 22 23:15:14.574 INFO Pop front: ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47579 Sep 22 23:15:14.574 INFO Sent repair work, now wait for resp
47580 Sep 22 23:15:14.574 INFO [0] received reconcile message
47581 Sep 22 23:15:14.574 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47582 Sep 22 23:15:14.574 INFO [0] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47583 Sep 22 23:15:14.574 INFO [1] received reconcile message
47584 Sep 22 23:15:14.574 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47585 Sep 22 23:15:14.574 INFO [1] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47586 Sep 22 23:15:14.574 INFO [2] received reconcile message
47587 Sep 22 23:15:14.574 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(560), op: ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47588 Sep 22 23:15:14.574 INFO [2] client ExtentFlush { repair_id: ReconciliationId(560), extent_id: 154, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47589 Sep 22 23:15:14.574 DEBG 560 Flush extent 154 with f:2 g:2
47590 Sep 22 23:15:14.574 DEBG Flush just extent 154 with f:2 and g:2
47591 Sep 22 23:15:14.574 DEBG [1] It's time to notify for 560
47592 Sep 22 23:15:14.575 INFO Completion from [1] id:560 status:true
47593 Sep 22 23:15:14.575 INFO [561/752] Repair commands completed
47594 Sep 22 23:15:14.575 INFO Pop front: ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }, state: ClientData([New, New, New]) }
47595 Sep 22 23:15:14.575 INFO Sent repair work, now wait for resp
47596 Sep 22 23:15:14.575 INFO [0] received reconcile message
47597 Sep 22 23:15:14.575 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }, state: ClientData([InProgress, New, New]) }, : downstairs
47598 Sep 22 23:15:14.575 INFO [0] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }
47599 Sep 22 23:15:14.575 INFO [1] received reconcile message
47600 Sep 22 23:15:14.575 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47601 Sep 22 23:15:14.575 INFO [1] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }
47602 Sep 22 23:15:14.575 INFO [2] received reconcile message
47603 Sep 22 23:15:14.575 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(561), op: ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47604 Sep 22 23:15:14.575 INFO [2] client ExtentClose { repair_id: ReconciliationId(561), extent_id: 154 }
47605 Sep 22 23:15:14.575 DEBG 561 Close extent 154
47606 Sep 22 23:15:14.575 DEBG 561 Close extent 154
47607 Sep 22 23:15:14.575 DEBG 561 Close extent 154
47608 Sep 22 23:15:14.576 DEBG [2] It's time to notify for 561
47609 Sep 22 23:15:14.576 INFO Completion from [2] id:561 status:true
47610 Sep 22 23:15:14.576 INFO [562/752] Repair commands completed
47611 Sep 22 23:15:14.576 INFO Pop front: ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47612 Sep 22 23:15:14.576 INFO Sent repair work, now wait for resp
47613 Sep 22 23:15:14.576 INFO [0] received reconcile message
47614 Sep 22 23:15:14.576 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47615 Sep 22 23:15:14.576 INFO [0] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47616 Sep 22 23:15:14.576 INFO [0] Sending repair request ReconciliationId(562)
47617 Sep 22 23:15:14.576 INFO [1] received reconcile message
47618 Sep 22 23:15:14.576 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47619 Sep 22 23:15:14.576 INFO [1] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47620 Sep 22 23:15:14.576 INFO [1] No action required ReconciliationId(562)
47621 Sep 22 23:15:14.576 INFO [2] received reconcile message
47622 Sep 22 23:15:14.576 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(562), op: ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47623 Sep 22 23:15:14.576 INFO [2] client ExtentRepair { repair_id: ReconciliationId(562), extent_id: 154, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47624 Sep 22 23:15:14.576 INFO [2] No action required ReconciliationId(562)
47625 Sep 22 23:15:14.576 DEBG 562 Repair extent 154 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47626 Sep 22 23:15:14.576 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09A.copy"
47627 Sep 22 23:15:14.641 INFO accepted connection, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47628 Sep 22 23:15:14.641 TRCE incoming request, uri: /extent/154/files, method: GET, req_id: 5d637d86-796a-4103-b055-fdd5a151823a, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47629 Sep 22 23:15:14.641 INFO request completed, latency_us: 210, response_code: 200, uri: /extent/154/files, method: GET, req_id: 5d637d86-796a-4103-b055-fdd5a151823a, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47630 Sep 22 23:15:14.641 INFO eid:154 Found repair files: ["09A", "09A.db"]
47631 Sep 22 23:15:14.642 TRCE incoming request, uri: /newextent/154/data, method: GET, req_id: 5fa35195-9700-48d6-a03e-5b0fc9bcbdc4, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47632 Sep 22 23:15:14.642 INFO request completed, latency_us: 253, response_code: 200, uri: /newextent/154/data, method: GET, req_id: 5fa35195-9700-48d6-a03e-5b0fc9bcbdc4, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47633 Sep 22 23:15:14.647 TRCE incoming request, uri: /newextent/154/db, method: GET, req_id: 8b970be9-6d7a-4474-9868-0a00299e5940, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47634 Sep 22 23:15:14.647 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/154/db, method: GET, req_id: 8b970be9-6d7a-4474-9868-0a00299e5940, remote_addr: 127.0.0.1:35750, local_addr: 127.0.0.1:46213, task: repair
47635 Sep 22 23:15:14.649 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09A.replace"
47636 Sep 22 23:15:14.649 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47637 Sep 22 23:15:14.649 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47638 Sep 22 23:15:14.650 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09A"
47639 Sep 22 23:15:14.650 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09A.db"
47640 Sep 22 23:15:14.650 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47641 Sep 22 23:15:14.650 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09A.completed"
47642 Sep 22 23:15:14.650 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47643 Sep 22 23:15:14.650 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47644 Sep 22 23:15:14.650 DEBG [0] It's time to notify for 562
47645 Sep 22 23:15:14.650 INFO Completion from [0] id:562 status:true
47646 Sep 22 23:15:14.650 INFO [563/752] Repair commands completed
47647 Sep 22 23:15:14.650 INFO Pop front: ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }, state: ClientData([New, New, New]) }
47648 Sep 22 23:15:14.650 INFO Sent repair work, now wait for resp
47649 Sep 22 23:15:14.650 INFO [0] received reconcile message
47650 Sep 22 23:15:14.650 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }, state: ClientData([InProgress, New, New]) }, : downstairs
47651 Sep 22 23:15:14.650 INFO [0] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }
47652 Sep 22 23:15:14.650 INFO [1] received reconcile message
47653 Sep 22 23:15:14.650 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47654 Sep 22 23:15:14.650 INFO [1] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }
47655 Sep 22 23:15:14.650 INFO [2] received reconcile message
47656 Sep 22 23:15:14.650 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(563), op: ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47657 Sep 22 23:15:14.650 INFO [2] client ExtentReopen { repair_id: ReconciliationId(563), extent_id: 154 }
47658 Sep 22 23:15:14.651 DEBG 563 Reopen extent 154
47659 Sep 22 23:15:14.651 DEBG 563 Reopen extent 154
47660 Sep 22 23:15:14.652 DEBG 563 Reopen extent 154
47661 Sep 22 23:15:14.652 DEBG [2] It's time to notify for 563
47662 Sep 22 23:15:14.652 INFO Completion from [2] id:563 status:true
47663 Sep 22 23:15:14.652 INFO [564/752] Repair commands completed
47664 Sep 22 23:15:14.652 INFO Pop front: ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47665 Sep 22 23:15:14.652 INFO Sent repair work, now wait for resp
47666 Sep 22 23:15:14.652 INFO [0] received reconcile message
47667 Sep 22 23:15:14.652 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47668 Sep 22 23:15:14.652 INFO [0] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47669 Sep 22 23:15:14.652 INFO [1] received reconcile message
47670 Sep 22 23:15:14.653 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47671 Sep 22 23:15:14.653 INFO [1] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47672 Sep 22 23:15:14.653 INFO [2] received reconcile message
47673 Sep 22 23:15:14.653 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(564), op: ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47674 Sep 22 23:15:14.653 INFO [2] client ExtentFlush { repair_id: ReconciliationId(564), extent_id: 159, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47675 Sep 22 23:15:14.653 DEBG 564 Flush extent 159 with f:2 g:2
47676 Sep 22 23:15:14.653 DEBG Flush just extent 159 with f:2 and g:2
47677 Sep 22 23:15:14.653 DEBG [1] It's time to notify for 564
47678 Sep 22 23:15:14.653 INFO Completion from [1] id:564 status:true
47679 Sep 22 23:15:14.653 INFO [565/752] Repair commands completed
47680 Sep 22 23:15:14.653 INFO Pop front: ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }, state: ClientData([New, New, New]) }
47681 Sep 22 23:15:14.653 INFO Sent repair work, now wait for resp
47682 Sep 22 23:15:14.653 INFO [0] received reconcile message
47683 Sep 22 23:15:14.653 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }, state: ClientData([InProgress, New, New]) }, : downstairs
47684 Sep 22 23:15:14.653 INFO [0] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }
47685 Sep 22 23:15:14.653 INFO [1] received reconcile message
47686 Sep 22 23:15:14.653 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47687 Sep 22 23:15:14.653 INFO [1] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }
47688 Sep 22 23:15:14.653 INFO [2] received reconcile message
47689 Sep 22 23:15:14.653 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(565), op: ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47690 Sep 22 23:15:14.653 INFO [2] client ExtentClose { repair_id: ReconciliationId(565), extent_id: 159 }
47691 Sep 22 23:15:14.653 DEBG 565 Close extent 159
47692 Sep 22 23:15:14.654 DEBG 565 Close extent 159
47693 Sep 22 23:15:14.654 DEBG 565 Close extent 159
47694 Sep 22 23:15:14.654 DEBG [2] It's time to notify for 565
47695 Sep 22 23:15:14.654 INFO Completion from [2] id:565 status:true
47696 Sep 22 23:15:14.654 INFO [566/752] Repair commands completed
47697 Sep 22 23:15:14.654 INFO Pop front: ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47698 Sep 22 23:15:14.654 INFO Sent repair work, now wait for resp
47699 Sep 22 23:15:14.654 INFO [0] received reconcile message
47700 Sep 22 23:15:14.654 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47701 Sep 22 23:15:14.654 INFO [0] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47702 Sep 22 23:15:14.654 INFO [0] Sending repair request ReconciliationId(566)
47703 Sep 22 23:15:14.654 INFO [1] received reconcile message
47704 Sep 22 23:15:14.654 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47705 Sep 22 23:15:14.655 INFO [1] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47706 Sep 22 23:15:14.655 INFO [1] No action required ReconciliationId(566)
47707 Sep 22 23:15:14.655 INFO [2] received reconcile message
47708 Sep 22 23:15:14.655 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(566), op: ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47709 Sep 22 23:15:14.655 INFO [2] client ExtentRepair { repair_id: ReconciliationId(566), extent_id: 159, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47710 Sep 22 23:15:14.655 INFO [2] No action required ReconciliationId(566)
47711 Sep 22 23:15:14.655 DEBG 566 Repair extent 159 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47712 Sep 22 23:15:14.655 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09F.copy"
47713 Sep 22 23:15:14.717 INFO accepted connection, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47714 Sep 22 23:15:14.717 TRCE incoming request, uri: /extent/159/files, method: GET, req_id: 5ea05213-b42a-4633-8ab3-c1d96e620c84, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47715 Sep 22 23:15:14.717 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/159/files, method: GET, req_id: 5ea05213-b42a-4633-8ab3-c1d96e620c84, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47716 Sep 22 23:15:14.718 INFO eid:159 Found repair files: ["09F", "09F.db"]
47717 Sep 22 23:15:14.718 TRCE incoming request, uri: /newextent/159/data, method: GET, req_id: 23e998fb-99a1-448c-8c04-2d6488a752c0, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47718 Sep 22 23:15:14.718 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/159/data, method: GET, req_id: 23e998fb-99a1-448c-8c04-2d6488a752c0, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47719 Sep 22 23:15:14.723 TRCE incoming request, uri: /newextent/159/db, method: GET, req_id: addef73c-1b6c-4b62-a7df-b01183742f63, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47720 Sep 22 23:15:14.724 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/159/db, method: GET, req_id: addef73c-1b6c-4b62-a7df-b01183742f63, remote_addr: 127.0.0.1:56877, local_addr: 127.0.0.1:46213, task: repair
47721 Sep 22 23:15:14.725 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09F.replace"
47722 Sep 22 23:15:14.725 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47723 Sep 22 23:15:14.725 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47724 Sep 22 23:15:14.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09F"
47725 Sep 22 23:15:14.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09F.db"
47726 Sep 22 23:15:14.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47727 Sep 22 23:15:14.726 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09F.completed"
47728 Sep 22 23:15:14.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47729 Sep 22 23:15:14.726 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47730 Sep 22 23:15:14.726 DEBG [0] It's time to notify for 566
47731 Sep 22 23:15:14.726 INFO Completion from [0] id:566 status:true
47732 Sep 22 23:15:14.726 INFO [567/752] Repair commands completed
47733 Sep 22 23:15:14.726 INFO Pop front: ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }, state: ClientData([New, New, New]) }
47734 Sep 22 23:15:14.726 INFO Sent repair work, now wait for resp
47735 Sep 22 23:15:14.726 INFO [0] received reconcile message
47736 Sep 22 23:15:14.726 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }, state: ClientData([InProgress, New, New]) }, : downstairs
47737 Sep 22 23:15:14.726 INFO [0] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }
47738 Sep 22 23:15:14.726 INFO [1] received reconcile message
47739 Sep 22 23:15:14.726 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47740 Sep 22 23:15:14.726 INFO [1] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }
47741 Sep 22 23:15:14.727 INFO [2] received reconcile message
47742 Sep 22 23:15:14.727 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(567), op: ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47743 Sep 22 23:15:14.727 INFO [2] client ExtentReopen { repair_id: ReconciliationId(567), extent_id: 159 }
47744 Sep 22 23:15:14.727 DEBG 567 Reopen extent 159
47745 Sep 22 23:15:14.727 DEBG 567 Reopen extent 159
47746 Sep 22 23:15:14.728 DEBG 567 Reopen extent 159
47747 Sep 22 23:15:14.728 DEBG [2] It's time to notify for 567
47748 Sep 22 23:15:14.728 INFO Completion from [2] id:567 status:true
47749 Sep 22 23:15:14.728 INFO [568/752] Repair commands completed
47750 Sep 22 23:15:14.728 INFO Pop front: ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47751 Sep 22 23:15:14.728 INFO Sent repair work, now wait for resp
47752 Sep 22 23:15:14.728 INFO [0] received reconcile message
47753 Sep 22 23:15:14.728 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47754 Sep 22 23:15:14.728 INFO [0] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47755 Sep 22 23:15:14.729 INFO [1] received reconcile message
47756 Sep 22 23:15:14.729 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47757 Sep 22 23:15:14.729 INFO [1] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47758 Sep 22 23:15:14.729 INFO [2] received reconcile message
47759 Sep 22 23:15:14.729 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(568), op: ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47760 Sep 22 23:15:14.729 INFO [2] client ExtentFlush { repair_id: ReconciliationId(568), extent_id: 31, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47761 Sep 22 23:15:14.729 DEBG 568 Flush extent 31 with f:2 g:2
47762 Sep 22 23:15:14.729 DEBG Flush just extent 31 with f:2 and g:2
47763 Sep 22 23:15:14.729 DEBG [1] It's time to notify for 568
47764 Sep 22 23:15:14.729 INFO Completion from [1] id:568 status:true
47765 Sep 22 23:15:14.729 INFO [569/752] Repair commands completed
47766 Sep 22 23:15:14.729 INFO Pop front: ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }, state: ClientData([New, New, New]) }
47767 Sep 22 23:15:14.729 INFO Sent repair work, now wait for resp
47768 Sep 22 23:15:14.729 INFO [0] received reconcile message
47769 Sep 22 23:15:14.729 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }, state: ClientData([InProgress, New, New]) }, : downstairs
47770 Sep 22 23:15:14.729 INFO [0] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }
47771 Sep 22 23:15:14.729 INFO [1] received reconcile message
47772 Sep 22 23:15:14.729 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47773 Sep 22 23:15:14.729 INFO [1] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }
47774 Sep 22 23:15:14.729 INFO [2] received reconcile message
47775 Sep 22 23:15:14.729 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(569), op: ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47776 Sep 22 23:15:14.729 INFO [2] client ExtentClose { repair_id: ReconciliationId(569), extent_id: 31 }
47777 Sep 22 23:15:14.729 DEBG 569 Close extent 31
47778 Sep 22 23:15:14.730 DEBG 569 Close extent 31
47779 Sep 22 23:15:14.730 DEBG 569 Close extent 31
47780 Sep 22 23:15:14.730 DEBG [2] It's time to notify for 569
47781 Sep 22 23:15:14.730 INFO Completion from [2] id:569 status:true
47782 Sep 22 23:15:14.730 INFO [570/752] Repair commands completed
47783 Sep 22 23:15:14.730 INFO Pop front: ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47784 Sep 22 23:15:14.730 INFO Sent repair work, now wait for resp
47785 Sep 22 23:15:14.730 INFO [0] received reconcile message
47786 Sep 22 23:15:14.730 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47787 Sep 22 23:15:14.730 INFO [0] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47788 Sep 22 23:15:14.730 INFO [0] Sending repair request ReconciliationId(570)
47789 Sep 22 23:15:14.731 INFO [1] received reconcile message
47790 Sep 22 23:15:14.731 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47791 Sep 22 23:15:14.731 INFO [1] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47792 Sep 22 23:15:14.731 INFO [1] No action required ReconciliationId(570)
47793 Sep 22 23:15:14.731 INFO [2] received reconcile message
47794 Sep 22 23:15:14.731 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(570), op: ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47795 Sep 22 23:15:14.731 INFO [2] client ExtentRepair { repair_id: ReconciliationId(570), extent_id: 31, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47796 Sep 22 23:15:14.731 INFO [2] No action required ReconciliationId(570)
47797 Sep 22 23:15:14.731 DEBG 570 Repair extent 31 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47798 Sep 22 23:15:14.731 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01F.copy"
47799 Sep 22 23:15:14.794 INFO accepted connection, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47800 Sep 22 23:15:14.794 TRCE incoming request, uri: /extent/31/files, method: GET, req_id: d56d3b6d-a0ed-45b0-aca4-a556ee13558e, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47801 Sep 22 23:15:14.794 INFO request completed, latency_us: 194, response_code: 200, uri: /extent/31/files, method: GET, req_id: d56d3b6d-a0ed-45b0-aca4-a556ee13558e, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47802 Sep 22 23:15:14.794 INFO eid:31 Found repair files: ["01F", "01F.db"]
47803 Sep 22 23:15:14.795 TRCE incoming request, uri: /newextent/31/data, method: GET, req_id: 8e798a32-233e-46fe-a1e8-54c5f216fe80, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47804 Sep 22 23:15:14.795 INFO request completed, latency_us: 252, response_code: 200, uri: /newextent/31/data, method: GET, req_id: 8e798a32-233e-46fe-a1e8-54c5f216fe80, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47805 Sep 22 23:15:14.800 TRCE incoming request, uri: /newextent/31/db, method: GET, req_id: 479c5ec0-d964-4894-8ca0-a2a3e6064c2c, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47806 Sep 22 23:15:14.800 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/31/db, method: GET, req_id: 479c5ec0-d964-4894-8ca0-a2a3e6064c2c, remote_addr: 127.0.0.1:56105, local_addr: 127.0.0.1:46213, task: repair
47807 Sep 22 23:15:14.801 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01F.replace"
47808 Sep 22 23:15:14.801 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47809 Sep 22 23:15:14.802 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47810 Sep 22 23:15:14.802 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01F"
47811 Sep 22 23:15:14.802 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01F.db"
47812 Sep 22 23:15:14.802 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47813 Sep 22 23:15:14.802 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01F.completed"
47814 Sep 22 23:15:14.802 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47815 Sep 22 23:15:14.803 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47816 Sep 22 23:15:14.803 DEBG [0] It's time to notify for 570
47817 Sep 22 23:15:14.803 INFO Completion from [0] id:570 status:true
47818 Sep 22 23:15:14.803 INFO [571/752] Repair commands completed
47819 Sep 22 23:15:14.803 INFO Pop front: ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }, state: ClientData([New, New, New]) }
47820 Sep 22 23:15:14.803 INFO Sent repair work, now wait for resp
47821 Sep 22 23:15:14.803 INFO [0] received reconcile message
47822 Sep 22 23:15:14.803 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }, state: ClientData([InProgress, New, New]) }, : downstairs
47823 Sep 22 23:15:14.803 INFO [0] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }
47824 Sep 22 23:15:14.803 INFO [1] received reconcile message
47825 Sep 22 23:15:14.803 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47826 Sep 22 23:15:14.803 INFO [1] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }
47827 Sep 22 23:15:14.803 INFO [2] received reconcile message
47828 Sep 22 23:15:14.803 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(571), op: ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47829 Sep 22 23:15:14.803 INFO [2] client ExtentReopen { repair_id: ReconciliationId(571), extent_id: 31 }
47830 Sep 22 23:15:14.803 DEBG 571 Reopen extent 31
47831 Sep 22 23:15:14.804 DEBG 571 Reopen extent 31
47832 Sep 22 23:15:14.804 DEBG 571 Reopen extent 31
47833 Sep 22 23:15:14.805 DEBG [2] It's time to notify for 571
47834 Sep 22 23:15:14.805 INFO Completion from [2] id:571 status:true
47835 Sep 22 23:15:14.805 INFO [572/752] Repair commands completed
47836 Sep 22 23:15:14.805 INFO Pop front: ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47837 Sep 22 23:15:14.805 INFO Sent repair work, now wait for resp
47838 Sep 22 23:15:14.805 INFO [0] received reconcile message
47839 Sep 22 23:15:14.805 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47840 Sep 22 23:15:14.805 INFO [0] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47841 Sep 22 23:15:14.805 INFO [1] received reconcile message
47842 Sep 22 23:15:14.805 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47843 Sep 22 23:15:14.805 INFO [1] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47844 Sep 22 23:15:14.805 INFO [2] received reconcile message
47845 Sep 22 23:15:14.805 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(572), op: ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47846 Sep 22 23:15:14.805 INFO [2] client ExtentFlush { repair_id: ReconciliationId(572), extent_id: 87, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47847 Sep 22 23:15:14.805 DEBG 572 Flush extent 87 with f:2 g:2
47848 Sep 22 23:15:14.805 DEBG Flush just extent 87 with f:2 and g:2
47849 Sep 22 23:15:14.806 DEBG [1] It's time to notify for 572
47850 Sep 22 23:15:14.806 INFO Completion from [1] id:572 status:true
47851 Sep 22 23:15:14.806 INFO [573/752] Repair commands completed
47852 Sep 22 23:15:14.806 INFO Pop front: ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }, state: ClientData([New, New, New]) }
47853 Sep 22 23:15:14.806 INFO Sent repair work, now wait for resp
47854 Sep 22 23:15:14.806 INFO [0] received reconcile message
47855 Sep 22 23:15:14.806 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }, state: ClientData([InProgress, New, New]) }, : downstairs
47856 Sep 22 23:15:14.806 INFO [0] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }
47857 Sep 22 23:15:14.806 INFO [1] received reconcile message
47858 Sep 22 23:15:14.806 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47859 Sep 22 23:15:14.806 INFO [1] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }
47860 Sep 22 23:15:14.806 INFO [2] received reconcile message
47861 Sep 22 23:15:14.806 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(573), op: ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47862 Sep 22 23:15:14.806 INFO [2] client ExtentClose { repair_id: ReconciliationId(573), extent_id: 87 }
47863 Sep 22 23:15:14.806 DEBG 573 Close extent 87
47864 Sep 22 23:15:14.806 DEBG 573 Close extent 87
47865 Sep 22 23:15:14.807 DEBG 573 Close extent 87
47866 Sep 22 23:15:14.807 DEBG [2] It's time to notify for 573
47867 Sep 22 23:15:14.807 INFO Completion from [2] id:573 status:true
47868 Sep 22 23:15:14.807 INFO [574/752] Repair commands completed
47869 Sep 22 23:15:14.807 INFO Pop front: ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47870 Sep 22 23:15:14.807 INFO Sent repair work, now wait for resp
47871 Sep 22 23:15:14.807 INFO [0] received reconcile message
47872 Sep 22 23:15:14.807 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47873 Sep 22 23:15:14.807 INFO [0] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47874 Sep 22 23:15:14.807 INFO [0] Sending repair request ReconciliationId(574)
47875 Sep 22 23:15:14.807 INFO [1] received reconcile message
47876 Sep 22 23:15:14.807 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47877 Sep 22 23:15:14.807 INFO [1] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47878 Sep 22 23:15:14.807 INFO [1] No action required ReconciliationId(574)
47879 Sep 22 23:15:14.807 INFO [2] received reconcile message
47880 Sep 22 23:15:14.807 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(574), op: ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47881 Sep 22 23:15:14.807 INFO [2] client ExtentRepair { repair_id: ReconciliationId(574), extent_id: 87, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47882 Sep 22 23:15:14.807 INFO [2] No action required ReconciliationId(574)
47883 Sep 22 23:15:14.807 DEBG 574 Repair extent 87 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47884 Sep 22 23:15:14.808 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/057.copy"
47885 Sep 22 23:15:14.870 INFO accepted connection, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47886 Sep 22 23:15:14.870 TRCE incoming request, uri: /extent/87/files, method: GET, req_id: 442cd468-bf3a-4438-b35f-8e947dede51e, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47887 Sep 22 23:15:14.870 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/87/files, method: GET, req_id: 442cd468-bf3a-4438-b35f-8e947dede51e, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47888 Sep 22 23:15:14.871 INFO eid:87 Found repair files: ["057", "057.db"]
47889 Sep 22 23:15:14.871 TRCE incoming request, uri: /newextent/87/data, method: GET, req_id: 663ae246-fd5d-47aa-9c1d-d9466e772733, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47890 Sep 22 23:15:14.871 INFO request completed, latency_us: 318, response_code: 200, uri: /newextent/87/data, method: GET, req_id: 663ae246-fd5d-47aa-9c1d-d9466e772733, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47891 Sep 22 23:15:14.876 TRCE incoming request, uri: /newextent/87/db, method: GET, req_id: b09e60ae-56d5-4052-ae2b-e6d235a8e518, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47892 Sep 22 23:15:14.876 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/87/db, method: GET, req_id: b09e60ae-56d5-4052-ae2b-e6d235a8e518, remote_addr: 127.0.0.1:42401, local_addr: 127.0.0.1:46213, task: repair
47893 Sep 22 23:15:14.878 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/057.copy" to "/tmp/downstairs-vrx8aK6L/00/000/057.replace"
47894 Sep 22 23:15:14.878 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47895 Sep 22 23:15:14.878 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/057.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47896 Sep 22 23:15:14.879 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/057"
47897 Sep 22 23:15:14.879 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/057.db"
47898 Sep 22 23:15:14.879 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47899 Sep 22 23:15:14.879 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/057.replace" to "/tmp/downstairs-vrx8aK6L/00/000/057.completed"
47900 Sep 22 23:15:14.879 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47901 Sep 22 23:15:14.879 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47902 Sep 22 23:15:14.879 DEBG [0] It's time to notify for 574
47903 Sep 22 23:15:14.879 INFO Completion from [0] id:574 status:true
47904 Sep 22 23:15:14.879 INFO [575/752] Repair commands completed
47905 Sep 22 23:15:14.879 INFO Pop front: ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }, state: ClientData([New, New, New]) }
47906 Sep 22 23:15:14.879 INFO Sent repair work, now wait for resp
47907 Sep 22 23:15:14.879 INFO [0] received reconcile message
47908 Sep 22 23:15:14.879 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }, state: ClientData([InProgress, New, New]) }, : downstairs
47909 Sep 22 23:15:14.879 INFO [0] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }
47910 Sep 22 23:15:14.879 INFO [1] received reconcile message
47911 Sep 22 23:15:14.879 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47912 Sep 22 23:15:14.879 INFO [1] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }
47913 Sep 22 23:15:14.879 INFO [2] received reconcile message
47914 Sep 22 23:15:14.879 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(575), op: ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47915 Sep 22 23:15:14.879 INFO [2] client ExtentReopen { repair_id: ReconciliationId(575), extent_id: 87 }
47916 Sep 22 23:15:14.880 DEBG 575 Reopen extent 87
47917 Sep 22 23:15:14.880 DEBG 575 Reopen extent 87
47918 Sep 22 23:15:14.881 DEBG 575 Reopen extent 87
47919 Sep 22 23:15:14.881 DEBG [2] It's time to notify for 575
47920 Sep 22 23:15:14.881 INFO Completion from [2] id:575 status:true
47921 Sep 22 23:15:14.881 INFO [576/752] Repair commands completed
47922 Sep 22 23:15:14.881 INFO Pop front: ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
47923 Sep 22 23:15:14.881 INFO Sent repair work, now wait for resp
47924 Sep 22 23:15:14.881 INFO [0] received reconcile message
47925 Sep 22 23:15:14.881 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
47926 Sep 22 23:15:14.881 INFO [0] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47927 Sep 22 23:15:14.881 INFO [1] received reconcile message
47928 Sep 22 23:15:14.881 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
47929 Sep 22 23:15:14.881 INFO [1] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47930 Sep 22 23:15:14.881 INFO [2] received reconcile message
47931 Sep 22 23:15:14.881 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(576), op: ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
47932 Sep 22 23:15:14.881 INFO [2] client ExtentFlush { repair_id: ReconciliationId(576), extent_id: 148, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
47933 Sep 22 23:15:14.882 DEBG 576 Flush extent 148 with f:2 g:2
47934 Sep 22 23:15:14.882 DEBG Flush just extent 148 with f:2 and g:2
47935 Sep 22 23:15:14.882 DEBG [1] It's time to notify for 576
47936 Sep 22 23:15:14.882 INFO Completion from [1] id:576 status:true
47937 Sep 22 23:15:14.882 INFO [577/752] Repair commands completed
47938 Sep 22 23:15:14.882 INFO Pop front: ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }, state: ClientData([New, New, New]) }
47939 Sep 22 23:15:14.882 INFO Sent repair work, now wait for resp
47940 Sep 22 23:15:14.882 INFO [0] received reconcile message
47941 Sep 22 23:15:14.882 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }, state: ClientData([InProgress, New, New]) }, : downstairs
47942 Sep 22 23:15:14.882 INFO [0] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }
47943 Sep 22 23:15:14.882 INFO [1] received reconcile message
47944 Sep 22 23:15:14.882 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47945 Sep 22 23:15:14.882 INFO [1] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }
47946 Sep 22 23:15:14.882 INFO [2] received reconcile message
47947 Sep 22 23:15:14.882 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(577), op: ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
47948 Sep 22 23:15:14.882 INFO [2] client ExtentClose { repair_id: ReconciliationId(577), extent_id: 148 }
47949 Sep 22 23:15:14.882 DEBG 577 Close extent 148
47950 Sep 22 23:15:14.882 DEBG 577 Close extent 148
47951 Sep 22 23:15:14.883 DEBG 577 Close extent 148
47952 Sep 22 23:15:14.883 DEBG [2] It's time to notify for 577
47953 Sep 22 23:15:14.883 INFO Completion from [2] id:577 status:true
47954 Sep 22 23:15:14.883 INFO [578/752] Repair commands completed
47955 Sep 22 23:15:14.883 INFO Pop front: ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
47956 Sep 22 23:15:14.883 INFO Sent repair work, now wait for resp
47957 Sep 22 23:15:14.883 INFO [0] received reconcile message
47958 Sep 22 23:15:14.883 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
47959 Sep 22 23:15:14.883 INFO [0] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47960 Sep 22 23:15:14.883 INFO [0] Sending repair request ReconciliationId(578)
47961 Sep 22 23:15:14.883 INFO [1] received reconcile message
47962 Sep 22 23:15:14.883 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
47963 Sep 22 23:15:14.883 INFO [1] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47964 Sep 22 23:15:14.883 INFO [1] No action required ReconciliationId(578)
47965 Sep 22 23:15:14.883 INFO [2] received reconcile message
47966 Sep 22 23:15:14.883 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(578), op: ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
47967 Sep 22 23:15:14.884 INFO [2] client ExtentRepair { repair_id: ReconciliationId(578), extent_id: 148, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
47968 Sep 22 23:15:14.884 INFO [2] No action required ReconciliationId(578)
47969 Sep 22 23:15:14.884 DEBG 578 Repair extent 148 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
47970 Sep 22 23:15:14.884 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/094.copy"
47971 Sep 22 23:15:14.944 DEBG up_ds_listen was notified
47972 Sep 22 23:15:14.944 DEBG up_ds_listen process 1087
47973 Sep 22 23:15:14.944 DEBG [A] ack job 1087:88, : downstairs
47974 Sep 22 23:15:14.944 DEBG up_ds_listen checked 1 jobs, back to waiting
47975 Sep 22 23:15:14.944 DEBG IO Flush 1089 has deps [JobId(1088), JobId(1087)]
47976 Sep 22 23:15:14.944 WARN returning error on read!
47977 Sep 22 23:15:14.944 DEBG Read :1088 deps:[JobId(1087)] res:false
47978 Sep 22 23:15:14.948 INFO accepted connection, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47979 Sep 22 23:15:14.948 TRCE incoming request, uri: /extent/148/files, method: GET, req_id: 2135013c-1c22-4b14-95ad-2233260370b0, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47980 Sep 22 23:15:14.948 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/148/files, method: GET, req_id: 2135013c-1c22-4b14-95ad-2233260370b0, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47981 Sep 22 23:15:14.949 INFO eid:148 Found repair files: ["094", "094.db"]
47982 Sep 22 23:15:14.949 TRCE incoming request, uri: /newextent/148/data, method: GET, req_id: c70ccd0b-3da6-41a8-b3aa-d0e2004bdec1, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47983 Sep 22 23:15:14.949 INFO request completed, latency_us: 323, response_code: 200, uri: /newextent/148/data, method: GET, req_id: c70ccd0b-3da6-41a8-b3aa-d0e2004bdec1, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47984 Sep 22 23:15:14.950 DEBG Read :1088 deps:[JobId(1087)] res:true
47985 Sep 22 23:15:14.954 TRCE incoming request, uri: /newextent/148/db, method: GET, req_id: ca54f289-46d3-4a36-9f62-77ae8f77d56e, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47986 Sep 22 23:15:14.955 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/148/db, method: GET, req_id: ca54f289-46d3-4a36-9f62-77ae8f77d56e, remote_addr: 127.0.0.1:46187, local_addr: 127.0.0.1:46213, task: repair
47987 Sep 22 23:15:14.956 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/094.copy" to "/tmp/downstairs-vrx8aK6L/00/000/094.replace"
47988 Sep 22 23:15:14.956 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47989 Sep 22 23:15:14.956 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/094.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
47990 Sep 22 23:15:14.957 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/094"
47991 Sep 22 23:15:14.957 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/094.db"
47992 Sep 22 23:15:14.957 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47993 Sep 22 23:15:14.957 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/094.replace" to "/tmp/downstairs-vrx8aK6L/00/000/094.completed"
47994 Sep 22 23:15:14.957 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47995 Sep 22 23:15:14.957 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
47996 Sep 22 23:15:14.957 DEBG [0] It's time to notify for 578
47997 Sep 22 23:15:14.957 INFO Completion from [0] id:578 status:true
47998 Sep 22 23:15:14.957 INFO [579/752] Repair commands completed
47999 Sep 22 23:15:14.957 INFO Pop front: ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }, state: ClientData([New, New, New]) }
48000 Sep 22 23:15:14.957 INFO Sent repair work, now wait for resp
48001 Sep 22 23:15:14.957 INFO [0] received reconcile message
48002 Sep 22 23:15:14.957 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }, state: ClientData([InProgress, New, New]) }, : downstairs
48003 Sep 22 23:15:14.957 INFO [0] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }
48004 Sep 22 23:15:14.957 INFO [1] received reconcile message
48005 Sep 22 23:15:14.957 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48006 Sep 22 23:15:14.957 INFO [1] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }
48007 Sep 22 23:15:14.958 INFO [2] received reconcile message
48008 Sep 22 23:15:14.958 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(579), op: ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48009 Sep 22 23:15:14.958 INFO [2] client ExtentReopen { repair_id: ReconciliationId(579), extent_id: 148 }
48010 Sep 22 23:15:14.958 DEBG 579 Reopen extent 148
48011 Sep 22 23:15:14.958 DEBG 579 Reopen extent 148
48012 Sep 22 23:15:14.959 DEBG 579 Reopen extent 148
48013 Sep 22 23:15:14.959 DEBG [2] It's time to notify for 579
48014 Sep 22 23:15:14.959 INFO Completion from [2] id:579 status:true
48015 Sep 22 23:15:14.959 INFO [580/752] Repair commands completed
48016 Sep 22 23:15:14.959 INFO Pop front: ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48017 Sep 22 23:15:14.959 INFO Sent repair work, now wait for resp
48018 Sep 22 23:15:14.959 INFO [0] received reconcile message
48019 Sep 22 23:15:14.959 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48020 Sep 22 23:15:14.959 INFO [0] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48021 Sep 22 23:15:14.960 INFO [1] received reconcile message
48022 Sep 22 23:15:14.960 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48023 Sep 22 23:15:14.960 INFO [1] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48024 Sep 22 23:15:14.960 INFO [2] received reconcile message
48025 Sep 22 23:15:14.960 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(580), op: ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48026 Sep 22 23:15:14.960 INFO [2] client ExtentFlush { repair_id: ReconciliationId(580), extent_id: 162, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48027 Sep 22 23:15:14.960 DEBG 580 Flush extent 162 with f:2 g:2
48028 Sep 22 23:15:14.960 DEBG Flush just extent 162 with f:2 and g:2
48029 Sep 22 23:15:14.960 DEBG [1] It's time to notify for 580
48030 Sep 22 23:15:14.960 INFO Completion from [1] id:580 status:true
48031 Sep 22 23:15:14.960 INFO [581/752] Repair commands completed
48032 Sep 22 23:15:14.960 INFO Pop front: ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }, state: ClientData([New, New, New]) }
48033 Sep 22 23:15:14.960 INFO Sent repair work, now wait for resp
48034 Sep 22 23:15:14.960 INFO [0] received reconcile message
48035 Sep 22 23:15:14.960 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }, state: ClientData([InProgress, New, New]) }, : downstairs
48036 Sep 22 23:15:14.960 INFO [0] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }
48037 Sep 22 23:15:14.960 INFO [1] received reconcile message
48038 Sep 22 23:15:14.960 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48039 Sep 22 23:15:14.960 INFO [1] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }
48040 Sep 22 23:15:14.960 INFO [2] received reconcile message
48041 Sep 22 23:15:14.960 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(581), op: ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48042 Sep 22 23:15:14.960 INFO [2] client ExtentClose { repair_id: ReconciliationId(581), extent_id: 162 }
48043 Sep 22 23:15:14.960 DEBG 581 Close extent 162
48044 Sep 22 23:15:14.961 DEBG 581 Close extent 162
48045 Sep 22 23:15:14.961 DEBG 581 Close extent 162
48046 Sep 22 23:15:14.961 DEBG [2] It's time to notify for 581
48047 Sep 22 23:15:14.961 INFO Completion from [2] id:581 status:true
48048 Sep 22 23:15:14.961 INFO [582/752] Repair commands completed
48049 Sep 22 23:15:14.961 INFO Pop front: ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48050 Sep 22 23:15:14.961 INFO Sent repair work, now wait for resp
48051 Sep 22 23:15:14.961 INFO [0] received reconcile message
48052 Sep 22 23:15:14.961 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48053 Sep 22 23:15:14.961 INFO [0] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48054 Sep 22 23:15:14.961 INFO [0] Sending repair request ReconciliationId(582)
48055 Sep 22 23:15:14.962 INFO [1] received reconcile message
48056 Sep 22 23:15:14.962 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48057 Sep 22 23:15:14.962 INFO [1] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48058 Sep 22 23:15:14.962 INFO [1] No action required ReconciliationId(582)
48059 Sep 22 23:15:14.962 INFO [2] received reconcile message
48060 Sep 22 23:15:14.962 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(582), op: ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48061 Sep 22 23:15:14.962 INFO [2] client ExtentRepair { repair_id: ReconciliationId(582), extent_id: 162, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48062 Sep 22 23:15:14.962 INFO [2] No action required ReconciliationId(582)
48063 Sep 22 23:15:14.962 DEBG 582 Repair extent 162 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48064 Sep 22 23:15:14.962 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A2.copy"
48065 Sep 22 23:15:14.973 ERRO [1] job id 1088 saw error GenericError("test error")
48066 Sep 22 23:15:14.975 INFO [lossy] sleeping 1 second
48067 Sep 22 23:15:15.026 INFO accepted connection, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48068 Sep 22 23:15:15.026 TRCE incoming request, uri: /extent/162/files, method: GET, req_id: a87aa6a4-b695-40a4-85bf-fab4b456057c, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48069 Sep 22 23:15:15.026 INFO request completed, latency_us: 266, response_code: 200, uri: /extent/162/files, method: GET, req_id: a87aa6a4-b695-40a4-85bf-fab4b456057c, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48070 Sep 22 23:15:15.026 INFO eid:162 Found repair files: ["0A2", "0A2.db"]
48071 Sep 22 23:15:15.027 TRCE incoming request, uri: /newextent/162/data, method: GET, req_id: 64309948-5be8-4229-96a3-bd73a2762c45, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48072 Sep 22 23:15:15.027 INFO request completed, latency_us: 372, response_code: 200, uri: /newextent/162/data, method: GET, req_id: 64309948-5be8-4229-96a3-bd73a2762c45, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48073 Sep 22 23:15:15.032 TRCE incoming request, uri: /newextent/162/db, method: GET, req_id: d9bb3628-a437-471b-92e9-34a85eb25756, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48074 Sep 22 23:15:15.033 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/162/db, method: GET, req_id: d9bb3628-a437-471b-92e9-34a85eb25756, remote_addr: 127.0.0.1:58578, local_addr: 127.0.0.1:46213, task: repair
48075 Sep 22 23:15:15.034 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A2.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A2.replace"
48076 Sep 22 23:15:15.034 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48077 Sep 22 23:15:15.035 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A2.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48078 Sep 22 23:15:15.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A2"
48079 Sep 22 23:15:15.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A2.db"
48080 Sep 22 23:15:15.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48081 Sep 22 23:15:15.035 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A2.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A2.completed"
48082 Sep 22 23:15:15.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48083 Sep 22 23:15:15.035 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48084 Sep 22 23:15:15.036 DEBG [0] It's time to notify for 582
48085 Sep 22 23:15:15.036 INFO Completion from [0] id:582 status:true
48086 Sep 22 23:15:15.036 INFO [583/752] Repair commands completed
48087 Sep 22 23:15:15.036 INFO Pop front: ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }, state: ClientData([New, New, New]) }
48088 Sep 22 23:15:15.036 INFO Sent repair work, now wait for resp
48089 Sep 22 23:15:15.036 INFO [0] received reconcile message
48090 Sep 22 23:15:15.036 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }, state: ClientData([InProgress, New, New]) }, : downstairs
48091 Sep 22 23:15:15.036 INFO [0] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }
48092 Sep 22 23:15:15.036 INFO [1] received reconcile message
48093 Sep 22 23:15:15.036 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48094 Sep 22 23:15:15.036 INFO [1] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }
48095 Sep 22 23:15:15.036 INFO [2] received reconcile message
48096 Sep 22 23:15:15.036 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(583), op: ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48097 Sep 22 23:15:15.036 INFO [2] client ExtentReopen { repair_id: ReconciliationId(583), extent_id: 162 }
48098 Sep 22 23:15:15.036 DEBG 583 Reopen extent 162
48099 Sep 22 23:15:15.037 DEBG 583 Reopen extent 162
48100 Sep 22 23:15:15.037 DEBG 583 Reopen extent 162
48101 Sep 22 23:15:15.038 DEBG [2] It's time to notify for 583
48102 Sep 22 23:15:15.038 INFO Completion from [2] id:583 status:true
48103 Sep 22 23:15:15.038 INFO [584/752] Repair commands completed
48104 Sep 22 23:15:15.038 INFO Pop front: ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48105 Sep 22 23:15:15.038 INFO Sent repair work, now wait for resp
48106 Sep 22 23:15:15.038 INFO [0] received reconcile message
48107 Sep 22 23:15:15.038 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48108 Sep 22 23:15:15.038 INFO [0] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48109 Sep 22 23:15:15.038 INFO [1] received reconcile message
48110 Sep 22 23:15:15.038 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48111 Sep 22 23:15:15.038 INFO [1] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48112 Sep 22 23:15:15.038 INFO [2] received reconcile message
48113 Sep 22 23:15:15.038 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(584), op: ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48114 Sep 22 23:15:15.038 INFO [2] client ExtentFlush { repair_id: ReconciliationId(584), extent_id: 51, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48115 Sep 22 23:15:15.038 DEBG 584 Flush extent 51 with f:2 g:2
48116 Sep 22 23:15:15.038 DEBG Flush just extent 51 with f:2 and g:2
48117 Sep 22 23:15:15.039 DEBG [1] It's time to notify for 584
48118 Sep 22 23:15:15.039 INFO Completion from [1] id:584 status:true
48119 Sep 22 23:15:15.039 INFO [585/752] Repair commands completed
48120 Sep 22 23:15:15.039 INFO Pop front: ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }, state: ClientData([New, New, New]) }
48121 Sep 22 23:15:15.039 INFO Sent repair work, now wait for resp
48122 Sep 22 23:15:15.039 INFO [0] received reconcile message
48123 Sep 22 23:15:15.039 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }, state: ClientData([InProgress, New, New]) }, : downstairs
48124 Sep 22 23:15:15.039 INFO [0] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }
48125 Sep 22 23:15:15.039 INFO [1] received reconcile message
48126 Sep 22 23:15:15.039 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48127 Sep 22 23:15:15.039 INFO [1] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }
48128 Sep 22 23:15:15.039 INFO [2] received reconcile message
48129 Sep 22 23:15:15.039 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(585), op: ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48130 Sep 22 23:15:15.039 INFO [2] client ExtentClose { repair_id: ReconciliationId(585), extent_id: 51 }
48131 Sep 22 23:15:15.039 DEBG 585 Close extent 51
48132 Sep 22 23:15:15.039 DEBG 585 Close extent 51
48133 Sep 22 23:15:15.040 DEBG 585 Close extent 51
48134 Sep 22 23:15:15.040 DEBG [2] It's time to notify for 585
48135 Sep 22 23:15:15.040 INFO Completion from [2] id:585 status:true
48136 Sep 22 23:15:15.040 INFO [586/752] Repair commands completed
48137 Sep 22 23:15:15.040 INFO Pop front: ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48138 Sep 22 23:15:15.040 INFO Sent repair work, now wait for resp
48139 Sep 22 23:15:15.040 INFO [0] received reconcile message
48140 Sep 22 23:15:15.040 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48141 Sep 22 23:15:15.040 INFO [0] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48142 Sep 22 23:15:15.040 INFO [0] Sending repair request ReconciliationId(586)
48143 Sep 22 23:15:15.040 INFO [1] received reconcile message
48144 Sep 22 23:15:15.040 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48145 Sep 22 23:15:15.040 INFO [1] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48146 Sep 22 23:15:15.040 INFO [1] No action required ReconciliationId(586)
48147 Sep 22 23:15:15.040 INFO [2] received reconcile message
48148 Sep 22 23:15:15.040 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(586), op: ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48149 Sep 22 23:15:15.040 INFO [2] client ExtentRepair { repair_id: ReconciliationId(586), extent_id: 51, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48150 Sep 22 23:15:15.040 INFO [2] No action required ReconciliationId(586)
48151 Sep 22 23:15:15.040 DEBG 586 Repair extent 51 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48152 Sep 22 23:15:15.041 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/033.copy"
48153 Sep 22 23:15:15.102 INFO accepted connection, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48154 Sep 22 23:15:15.103 TRCE incoming request, uri: /extent/51/files, method: GET, req_id: 0f6bc778-159c-4b7a-b14a-f6bd963b34ab, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48155 Sep 22 23:15:15.103 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/51/files, method: GET, req_id: 0f6bc778-159c-4b7a-b14a-f6bd963b34ab, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48156 Sep 22 23:15:15.103 INFO eid:51 Found repair files: ["033", "033.db"]
48157 Sep 22 23:15:15.103 TRCE incoming request, uri: /newextent/51/data, method: GET, req_id: 4a5b5cc1-7c08-4b86-9203-563aee94d012, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48158 Sep 22 23:15:15.104 INFO request completed, latency_us: 305, response_code: 200, uri: /newextent/51/data, method: GET, req_id: 4a5b5cc1-7c08-4b86-9203-563aee94d012, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48159 Sep 22 23:15:15.109 TRCE incoming request, uri: /newextent/51/db, method: GET, req_id: 86d92040-b160-4e5a-b356-2019f151fa2d, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48160 Sep 22 23:15:15.109 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/51/db, method: GET, req_id: 86d92040-b160-4e5a-b356-2019f151fa2d, remote_addr: 127.0.0.1:45475, local_addr: 127.0.0.1:46213, task: repair
48161 Sep 22 23:15:15.110 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/033.copy" to "/tmp/downstairs-vrx8aK6L/00/000/033.replace"
48162 Sep 22 23:15:15.110 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48163 Sep 22 23:15:15.111 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/033.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48164 Sep 22 23:15:15.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/033"
48165 Sep 22 23:15:15.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/033.db"
48166 Sep 22 23:15:15.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48167 Sep 22 23:15:15.111 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/033.replace" to "/tmp/downstairs-vrx8aK6L/00/000/033.completed"
48168 Sep 22 23:15:15.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48169 Sep 22 23:15:15.111 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48170 Sep 22 23:15:15.111 DEBG [0] It's time to notify for 586
48171 Sep 22 23:15:15.111 INFO Completion from [0] id:586 status:true
48172 Sep 22 23:15:15.112 INFO [587/752] Repair commands completed
48173 Sep 22 23:15:15.112 INFO Pop front: ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }, state: ClientData([New, New, New]) }
48174 Sep 22 23:15:15.112 INFO Sent repair work, now wait for resp
48175 Sep 22 23:15:15.112 INFO [0] received reconcile message
48176 Sep 22 23:15:15.112 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }, state: ClientData([InProgress, New, New]) }, : downstairs
48177 Sep 22 23:15:15.112 INFO [0] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }
48178 Sep 22 23:15:15.112 INFO [1] received reconcile message
48179 Sep 22 23:15:15.112 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48180 Sep 22 23:15:15.112 INFO [1] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }
48181 Sep 22 23:15:15.112 INFO [2] received reconcile message
48182 Sep 22 23:15:15.112 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(587), op: ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48183 Sep 22 23:15:15.112 INFO [2] client ExtentReopen { repair_id: ReconciliationId(587), extent_id: 51 }
48184 Sep 22 23:15:15.112 DEBG 587 Reopen extent 51
48185 Sep 22 23:15:15.112 DEBG 587 Reopen extent 51
48186 Sep 22 23:15:15.113 DEBG 587 Reopen extent 51
48187 Sep 22 23:15:15.114 DEBG [2] It's time to notify for 587
48188 Sep 22 23:15:15.114 INFO Completion from [2] id:587 status:true
48189 Sep 22 23:15:15.114 INFO [588/752] Repair commands completed
48190 Sep 22 23:15:15.114 INFO Pop front: ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48191 Sep 22 23:15:15.114 INFO Sent repair work, now wait for resp
48192 Sep 22 23:15:15.114 INFO [0] received reconcile message
48193 Sep 22 23:15:15.114 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48194 Sep 22 23:15:15.114 INFO [0] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48195 Sep 22 23:15:15.114 INFO [1] received reconcile message
48196 Sep 22 23:15:15.114 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48197 Sep 22 23:15:15.114 INFO [1] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48198 Sep 22 23:15:15.114 INFO [2] received reconcile message
48199 Sep 22 23:15:15.114 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(588), op: ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48200 Sep 22 23:15:15.114 INFO [2] client ExtentFlush { repair_id: ReconciliationId(588), extent_id: 39, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48201 Sep 22 23:15:15.114 DEBG 588 Flush extent 39 with f:2 g:2
48202 Sep 22 23:15:15.114 DEBG Flush just extent 39 with f:2 and g:2
48203 Sep 22 23:15:15.114 DEBG [1] It's time to notify for 588
48204 Sep 22 23:15:15.114 INFO Completion from [1] id:588 status:true
48205 Sep 22 23:15:15.114 INFO [589/752] Repair commands completed
48206 Sep 22 23:15:15.114 INFO Pop front: ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }, state: ClientData([New, New, New]) }
48207 Sep 22 23:15:15.114 INFO Sent repair work, now wait for resp
48208 Sep 22 23:15:15.114 INFO [0] received reconcile message
48209 Sep 22 23:15:15.114 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }, state: ClientData([InProgress, New, New]) }, : downstairs
48210 Sep 22 23:15:15.114 INFO [0] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }
48211 Sep 22 23:15:15.114 INFO [1] received reconcile message
48212 Sep 22 23:15:15.114 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48213 Sep 22 23:15:15.114 INFO [1] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }
48214 Sep 22 23:15:15.114 INFO [2] received reconcile message
48215 Sep 22 23:15:15.114 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(589), op: ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48216 Sep 22 23:15:15.115 INFO [2] client ExtentClose { repair_id: ReconciliationId(589), extent_id: 39 }
48217 Sep 22 23:15:15.115 DEBG 589 Close extent 39
48218 Sep 22 23:15:15.115 DEBG 589 Close extent 39
48219 Sep 22 23:15:15.115 DEBG 589 Close extent 39
48220 Sep 22 23:15:15.116 DEBG [2] It's time to notify for 589
48221 Sep 22 23:15:15.116 INFO Completion from [2] id:589 status:true
48222 Sep 22 23:15:15.116 INFO [590/752] Repair commands completed
48223 Sep 22 23:15:15.116 INFO Pop front: ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48224 Sep 22 23:15:15.116 INFO Sent repair work, now wait for resp
48225 Sep 22 23:15:15.116 INFO [0] received reconcile message
48226 Sep 22 23:15:15.116 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48227 Sep 22 23:15:15.116 INFO [0] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48228 Sep 22 23:15:15.116 INFO [0] Sending repair request ReconciliationId(590)
48229 Sep 22 23:15:15.116 INFO [1] received reconcile message
48230 Sep 22 23:15:15.116 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48231 Sep 22 23:15:15.116 INFO [1] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48232 Sep 22 23:15:15.116 INFO [1] No action required ReconciliationId(590)
48233 Sep 22 23:15:15.116 INFO [2] received reconcile message
48234 Sep 22 23:15:15.116 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(590), op: ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48235 Sep 22 23:15:15.116 INFO [2] client ExtentRepair { repair_id: ReconciliationId(590), extent_id: 39, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48236 Sep 22 23:15:15.116 INFO [2] No action required ReconciliationId(590)
48237 Sep 22 23:15:15.116 DEBG 590 Repair extent 39 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48238 Sep 22 23:15:15.116 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/027.copy"
48239 Sep 22 23:15:15.180 INFO accepted connection, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48240 Sep 22 23:15:15.180 TRCE incoming request, uri: /extent/39/files, method: GET, req_id: b68b4c1a-ae9d-4bd9-aa41-5f91bb4c240e, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48241 Sep 22 23:15:15.180 INFO request completed, latency_us: 190, response_code: 200, uri: /extent/39/files, method: GET, req_id: b68b4c1a-ae9d-4bd9-aa41-5f91bb4c240e, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48242 Sep 22 23:15:15.180 INFO eid:39 Found repair files: ["027", "027.db"]
48243 Sep 22 23:15:15.181 TRCE incoming request, uri: /newextent/39/data, method: GET, req_id: efa14085-a44b-4164-87d4-45db2581c6a9, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48244 Sep 22 23:15:15.181 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/39/data, method: GET, req_id: efa14085-a44b-4164-87d4-45db2581c6a9, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48245 Sep 22 23:15:15.186 TRCE incoming request, uri: /newextent/39/db, method: GET, req_id: a284ae0c-a07f-41ac-beab-f0497e27a379, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48246 Sep 22 23:15:15.186 INFO request completed, latency_us: 302, response_code: 200, uri: /newextent/39/db, method: GET, req_id: a284ae0c-a07f-41ac-beab-f0497e27a379, remote_addr: 127.0.0.1:57350, local_addr: 127.0.0.1:46213, task: repair
48247 Sep 22 23:15:15.187 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/027.copy" to "/tmp/downstairs-vrx8aK6L/00/000/027.replace"
48248 Sep 22 23:15:15.187 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48249 Sep 22 23:15:15.188 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/027.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48250 Sep 22 23:15:15.188 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/027"
48251 Sep 22 23:15:15.188 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/027.db"
48252 Sep 22 23:15:15.188 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48253 Sep 22 23:15:15.188 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/027.replace" to "/tmp/downstairs-vrx8aK6L/00/000/027.completed"
48254 Sep 22 23:15:15.188 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48255 Sep 22 23:15:15.188 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48256 Sep 22 23:15:15.188 DEBG [0] It's time to notify for 590
48257 Sep 22 23:15:15.189 INFO Completion from [0] id:590 status:true
48258 Sep 22 23:15:15.189 INFO [591/752] Repair commands completed
48259 Sep 22 23:15:15.189 INFO Pop front: ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }, state: ClientData([New, New, New]) }
48260 Sep 22 23:15:15.189 INFO Sent repair work, now wait for resp
48261 Sep 22 23:15:15.189 INFO [0] received reconcile message
48262 Sep 22 23:15:15.189 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }, state: ClientData([InProgress, New, New]) }, : downstairs
48263 Sep 22 23:15:15.189 INFO [0] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }
48264 Sep 22 23:15:15.189 INFO [1] received reconcile message
48265 Sep 22 23:15:15.189 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48266 Sep 22 23:15:15.189 INFO [1] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }
48267 Sep 22 23:15:15.189 INFO [2] received reconcile message
48268 Sep 22 23:15:15.189 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(591), op: ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48269 Sep 22 23:15:15.189 INFO [2] client ExtentReopen { repair_id: ReconciliationId(591), extent_id: 39 }
48270 Sep 22 23:15:15.189 DEBG 591 Reopen extent 39
48271 Sep 22 23:15:15.189 DEBG 591 Reopen extent 39
48272 Sep 22 23:15:15.190 DEBG 591 Reopen extent 39
48273 Sep 22 23:15:15.191 DEBG [2] It's time to notify for 591
48274 Sep 22 23:15:15.191 INFO Completion from [2] id:591 status:true
48275 Sep 22 23:15:15.191 INFO [592/752] Repair commands completed
48276 Sep 22 23:15:15.191 INFO Pop front: ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48277 Sep 22 23:15:15.191 INFO Sent repair work, now wait for resp
48278 Sep 22 23:15:15.191 INFO [0] received reconcile message
48279 Sep 22 23:15:15.191 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48280 Sep 22 23:15:15.191 INFO [0] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48281 Sep 22 23:15:15.191 INFO [1] received reconcile message
48282 Sep 22 23:15:15.191 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48283 Sep 22 23:15:15.191 INFO [1] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48284 Sep 22 23:15:15.191 INFO [2] received reconcile message
48285 Sep 22 23:15:15.191 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(592), op: ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48286 Sep 22 23:15:15.191 INFO [2] client ExtentFlush { repair_id: ReconciliationId(592), extent_id: 155, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48287 Sep 22 23:15:15.191 DEBG 592 Flush extent 155 with f:2 g:2
48288 Sep 22 23:15:15.191 DEBG Flush just extent 155 with f:2 and g:2
48289 Sep 22 23:15:15.191 DEBG [1] It's time to notify for 592
48290 Sep 22 23:15:15.191 INFO Completion from [1] id:592 status:true
48291 Sep 22 23:15:15.191 INFO [593/752] Repair commands completed
48292 Sep 22 23:15:15.191 INFO Pop front: ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }, state: ClientData([New, New, New]) }
48293 Sep 22 23:15:15.191 INFO Sent repair work, now wait for resp
48294 Sep 22 23:15:15.191 INFO [0] received reconcile message
48295 Sep 22 23:15:15.191 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }, state: ClientData([InProgress, New, New]) }, : downstairs
48296 Sep 22 23:15:15.191 INFO [0] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }
48297 Sep 22 23:15:15.191 INFO [1] received reconcile message
48298 Sep 22 23:15:15.191 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48299 Sep 22 23:15:15.191 INFO [1] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }
48300 Sep 22 23:15:15.191 INFO [2] received reconcile message
48301 Sep 22 23:15:15.191 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(593), op: ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48302 Sep 22 23:15:15.191 INFO [2] client ExtentClose { repair_id: ReconciliationId(593), extent_id: 155 }
48303 Sep 22 23:15:15.192 DEBG 593 Close extent 155
48304 Sep 22 23:15:15.192 DEBG 593 Close extent 155
48305 Sep 22 23:15:15.192 DEBG 593 Close extent 155
48306 Sep 22 23:15:15.193 DEBG [2] It's time to notify for 593
48307 Sep 22 23:15:15.193 INFO Completion from [2] id:593 status:true
48308 Sep 22 23:15:15.193 INFO [594/752] Repair commands completed
48309 Sep 22 23:15:15.193 INFO Pop front: ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48310 Sep 22 23:15:15.193 INFO Sent repair work, now wait for resp
48311 Sep 22 23:15:15.193 INFO [0] received reconcile message
48312 Sep 22 23:15:15.193 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48313 Sep 22 23:15:15.193 INFO [0] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48314 Sep 22 23:15:15.193 INFO [0] Sending repair request ReconciliationId(594)
48315 Sep 22 23:15:15.193 INFO [1] received reconcile message
48316 Sep 22 23:15:15.193 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48317 Sep 22 23:15:15.193 INFO [1] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48318 Sep 22 23:15:15.193 INFO [1] No action required ReconciliationId(594)
48319 Sep 22 23:15:15.193 INFO [2] received reconcile message
48320 Sep 22 23:15:15.193 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(594), op: ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48321 Sep 22 23:15:15.193 INFO [2] client ExtentRepair { repair_id: ReconciliationId(594), extent_id: 155, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48322 Sep 22 23:15:15.193 INFO [2] No action required ReconciliationId(594)
48323 Sep 22 23:15:15.193 DEBG 594 Repair extent 155 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48324 Sep 22 23:15:15.193 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/09B.copy"
48325 Sep 22 23:15:15.257 INFO accepted connection, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48326 Sep 22 23:15:15.257 TRCE incoming request, uri: /extent/155/files, method: GET, req_id: 21739cb4-3a30-4450-8a8b-eb38ff565a76, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48327 Sep 22 23:15:15.257 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/155/files, method: GET, req_id: 21739cb4-3a30-4450-8a8b-eb38ff565a76, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48328 Sep 22 23:15:15.257 INFO eid:155 Found repair files: ["09B", "09B.db"]
48329 Sep 22 23:15:15.258 TRCE incoming request, uri: /newextent/155/data, method: GET, req_id: 89485d93-3a87-4c1b-be39-1e2eec364824, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48330 Sep 22 23:15:15.258 INFO request completed, latency_us: 250, response_code: 200, uri: /newextent/155/data, method: GET, req_id: 89485d93-3a87-4c1b-be39-1e2eec364824, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48331 Sep 22 23:15:15.263 TRCE incoming request, uri: /newextent/155/db, method: GET, req_id: d00832d5-c0ea-44a1-b07b-933af4e4889d, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48332 Sep 22 23:15:15.263 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/155/db, method: GET, req_id: d00832d5-c0ea-44a1-b07b-933af4e4889d, remote_addr: 127.0.0.1:37066, local_addr: 127.0.0.1:46213, task: repair
48333 Sep 22 23:15:15.264 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/09B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/09B.replace"
48334 Sep 22 23:15:15.264 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48335 Sep 22 23:15:15.265 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/09B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48336 Sep 22 23:15:15.265 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09B"
48337 Sep 22 23:15:15.265 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/09B.db"
48338 Sep 22 23:15:15.265 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48339 Sep 22 23:15:15.265 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/09B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/09B.completed"
48340 Sep 22 23:15:15.265 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48341 Sep 22 23:15:15.265 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48342 Sep 22 23:15:15.266 DEBG [0] It's time to notify for 594
48343 Sep 22 23:15:15.266 INFO Completion from [0] id:594 status:true
48344 Sep 22 23:15:15.266 INFO [595/752] Repair commands completed
48345 Sep 22 23:15:15.266 INFO Pop front: ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }, state: ClientData([New, New, New]) }
48346 Sep 22 23:15:15.266 INFO Sent repair work, now wait for resp
48347 Sep 22 23:15:15.266 INFO [0] received reconcile message
48348 Sep 22 23:15:15.266 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }, state: ClientData([InProgress, New, New]) }, : downstairs
48349 Sep 22 23:15:15.266 INFO [0] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }
48350 Sep 22 23:15:15.266 INFO [1] received reconcile message
48351 Sep 22 23:15:15.266 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48352 Sep 22 23:15:15.266 INFO [1] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }
48353 Sep 22 23:15:15.266 INFO [2] received reconcile message
48354 Sep 22 23:15:15.266 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(595), op: ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48355 Sep 22 23:15:15.266 INFO [2] client ExtentReopen { repair_id: ReconciliationId(595), extent_id: 155 }
48356 Sep 22 23:15:15.266 DEBG 595 Reopen extent 155
48357 Sep 22 23:15:15.267 DEBG 595 Reopen extent 155
48358 Sep 22 23:15:15.267 DEBG 595 Reopen extent 155
48359 Sep 22 23:15:15.268 DEBG [2] It's time to notify for 595
48360 Sep 22 23:15:15.268 INFO Completion from [2] id:595 status:true
48361 Sep 22 23:15:15.268 INFO [596/752] Repair commands completed
48362 Sep 22 23:15:15.268 INFO Pop front: ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48363 Sep 22 23:15:15.268 INFO Sent repair work, now wait for resp
48364 Sep 22 23:15:15.268 INFO [0] received reconcile message
48365 Sep 22 23:15:15.268 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48366 Sep 22 23:15:15.268 INFO [0] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48367 Sep 22 23:15:15.268 INFO [1] received reconcile message
48368 Sep 22 23:15:15.268 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48369 Sep 22 23:15:15.268 INFO [1] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48370 Sep 22 23:15:15.268 INFO [2] received reconcile message
48371 Sep 22 23:15:15.268 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(596), op: ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48372 Sep 22 23:15:15.268 INFO [2] client ExtentFlush { repair_id: ReconciliationId(596), extent_id: 29, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48373 Sep 22 23:15:15.268 DEBG 596 Flush extent 29 with f:2 g:2
48374 Sep 22 23:15:15.268 DEBG Flush just extent 29 with f:2 and g:2
48375 Sep 22 23:15:15.268 DEBG [1] It's time to notify for 596
48376 Sep 22 23:15:15.268 INFO Completion from [1] id:596 status:true
48377 Sep 22 23:15:15.268 INFO [597/752] Repair commands completed
48378 Sep 22 23:15:15.268 INFO Pop front: ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }, state: ClientData([New, New, New]) }
48379 Sep 22 23:15:15.268 INFO Sent repair work, now wait for resp
48380 Sep 22 23:15:15.268 INFO [0] received reconcile message
48381 Sep 22 23:15:15.268 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }, state: ClientData([InProgress, New, New]) }, : downstairs
48382 Sep 22 23:15:15.268 INFO [0] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }
48383 Sep 22 23:15:15.269 INFO [1] received reconcile message
48384 Sep 22 23:15:15.269 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48385 Sep 22 23:15:15.269 INFO [1] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }
48386 Sep 22 23:15:15.269 INFO [2] received reconcile message
48387 Sep 22 23:15:15.269 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(597), op: ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48388 Sep 22 23:15:15.269 INFO [2] client ExtentClose { repair_id: ReconciliationId(597), extent_id: 29 }
48389 Sep 22 23:15:15.269 DEBG 597 Close extent 29
48390 Sep 22 23:15:15.269 DEBG 597 Close extent 29
48391 Sep 22 23:15:15.269 DEBG 597 Close extent 29
48392 Sep 22 23:15:15.270 DEBG [2] It's time to notify for 597
48393 Sep 22 23:15:15.270 INFO Completion from [2] id:597 status:true
48394 Sep 22 23:15:15.270 INFO [598/752] Repair commands completed
48395 Sep 22 23:15:15.270 INFO Pop front: ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48396 Sep 22 23:15:15.270 INFO Sent repair work, now wait for resp
48397 Sep 22 23:15:15.270 INFO [0] received reconcile message
48398 Sep 22 23:15:15.270 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48399 Sep 22 23:15:15.270 INFO [0] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48400 Sep 22 23:15:15.270 INFO [0] Sending repair request ReconciliationId(598)
48401 Sep 22 23:15:15.270 INFO [1] received reconcile message
48402 Sep 22 23:15:15.270 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48403 Sep 22 23:15:15.270 INFO [1] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48404 Sep 22 23:15:15.270 INFO [1] No action required ReconciliationId(598)
48405 Sep 22 23:15:15.270 INFO [2] received reconcile message
48406 Sep 22 23:15:15.270 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(598), op: ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48407 Sep 22 23:15:15.270 INFO [2] client ExtentRepair { repair_id: ReconciliationId(598), extent_id: 29, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48408 Sep 22 23:15:15.270 INFO [2] No action required ReconciliationId(598)
48409 Sep 22 23:15:15.270 DEBG 598 Repair extent 29 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48410 Sep 22 23:15:15.270 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01D.copy"
48411 Sep 22 23:15:15.334 INFO accepted connection, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48412 Sep 22 23:15:15.334 TRCE incoming request, uri: /extent/29/files, method: GET, req_id: cde1244e-813d-4523-8907-5c7413d8771a, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48413 Sep 22 23:15:15.334 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/29/files, method: GET, req_id: cde1244e-813d-4523-8907-5c7413d8771a, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48414 Sep 22 23:15:15.335 INFO eid:29 Found repair files: ["01D", "01D.db"]
48415 Sep 22 23:15:15.335 TRCE incoming request, uri: /newextent/29/data, method: GET, req_id: 58004004-69cf-4a23-a165-eef694b6444f, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48416 Sep 22 23:15:15.335 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/29/data, method: GET, req_id: 58004004-69cf-4a23-a165-eef694b6444f, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48417 Sep 22 23:15:15.340 TRCE incoming request, uri: /newextent/29/db, method: GET, req_id: ede2c646-e7d3-4351-8585-5d8ff91f1cde, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48418 Sep 22 23:15:15.341 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/29/db, method: GET, req_id: ede2c646-e7d3-4351-8585-5d8ff91f1cde, remote_addr: 127.0.0.1:35146, local_addr: 127.0.0.1:46213, task: repair
48419 Sep 22 23:15:15.342 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01D.replace"
48420 Sep 22 23:15:15.342 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48421 Sep 22 23:15:15.343 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48422 Sep 22 23:15:15.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01D"
48423 Sep 22 23:15:15.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01D.db"
48424 Sep 22 23:15:15.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48425 Sep 22 23:15:15.343 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01D.completed"
48426 Sep 22 23:15:15.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48427 Sep 22 23:15:15.343 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48428 Sep 22 23:15:15.344 DEBG [0] It's time to notify for 598
48429 Sep 22 23:15:15.344 INFO Completion from [0] id:598 status:true
48430 Sep 22 23:15:15.344 INFO [599/752] Repair commands completed
48431 Sep 22 23:15:15.344 INFO Pop front: ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }, state: ClientData([New, New, New]) }
48432 Sep 22 23:15:15.344 INFO Sent repair work, now wait for resp
48433 Sep 22 23:15:15.344 INFO [0] received reconcile message
48434 Sep 22 23:15:15.344 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }, state: ClientData([InProgress, New, New]) }, : downstairs
48435 Sep 22 23:15:15.344 INFO [0] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }
48436 Sep 22 23:15:15.344 INFO [1] received reconcile message
48437 Sep 22 23:15:15.344 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48438 Sep 22 23:15:15.344 INFO [1] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }
48439 Sep 22 23:15:15.344 INFO [2] received reconcile message
48440 Sep 22 23:15:15.344 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(599), op: ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48441 Sep 22 23:15:15.344 INFO [2] client ExtentReopen { repair_id: ReconciliationId(599), extent_id: 29 }
48442 Sep 22 23:15:15.344 DEBG 599 Reopen extent 29
48443 Sep 22 23:15:15.345 DEBG 599 Reopen extent 29
48444 Sep 22 23:15:15.345 DEBG 599 Reopen extent 29
48445 Sep 22 23:15:15.346 DEBG [2] It's time to notify for 599
48446 Sep 22 23:15:15.346 INFO Completion from [2] id:599 status:true
48447 Sep 22 23:15:15.346 INFO [600/752] Repair commands completed
48448 Sep 22 23:15:15.346 INFO Pop front: ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48449 Sep 22 23:15:15.346 INFO Sent repair work, now wait for resp
48450 Sep 22 23:15:15.346 INFO [0] received reconcile message
48451 Sep 22 23:15:15.346 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48452 Sep 22 23:15:15.346 INFO [0] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48453 Sep 22 23:15:15.346 INFO [1] received reconcile message
48454 Sep 22 23:15:15.346 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48455 Sep 22 23:15:15.346 INFO [1] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48456 Sep 22 23:15:15.346 INFO [2] received reconcile message
48457 Sep 22 23:15:15.346 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(600), op: ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48458 Sep 22 23:15:15.346 INFO [2] client ExtentFlush { repair_id: ReconciliationId(600), extent_id: 135, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48459 Sep 22 23:15:15.346 DEBG 600 Flush extent 135 with f:2 g:2
48460 Sep 22 23:15:15.346 DEBG Flush just extent 135 with f:2 and g:2
48461 Sep 22 23:15:15.347 DEBG [1] It's time to notify for 600
48462 Sep 22 23:15:15.347 INFO Completion from [1] id:600 status:true
48463 Sep 22 23:15:15.347 INFO [601/752] Repair commands completed
48464 Sep 22 23:15:15.347 INFO Pop front: ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }, state: ClientData([New, New, New]) }
48465 Sep 22 23:15:15.347 INFO Sent repair work, now wait for resp
48466 Sep 22 23:15:15.347 INFO [0] received reconcile message
48467 Sep 22 23:15:15.347 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }, state: ClientData([InProgress, New, New]) }, : downstairs
48468 Sep 22 23:15:15.347 INFO [0] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }
48469 Sep 22 23:15:15.347 INFO [1] received reconcile message
48470 Sep 22 23:15:15.347 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48471 Sep 22 23:15:15.347 INFO [1] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }
48472 Sep 22 23:15:15.347 INFO [2] received reconcile message
48473 Sep 22 23:15:15.347 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(601), op: ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48474 Sep 22 23:15:15.347 INFO [2] client ExtentClose { repair_id: ReconciliationId(601), extent_id: 135 }
48475 Sep 22 23:15:15.347 DEBG 601 Close extent 135
48476 Sep 22 23:15:15.347 DEBG 601 Close extent 135
48477 Sep 22 23:15:15.348 DEBG 601 Close extent 135
48478 Sep 22 23:15:15.348 DEBG [2] It's time to notify for 601
48479 Sep 22 23:15:15.348 INFO Completion from [2] id:601 status:true
48480 Sep 22 23:15:15.348 INFO [602/752] Repair commands completed
48481 Sep 22 23:15:15.348 INFO Pop front: ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48482 Sep 22 23:15:15.348 INFO Sent repair work, now wait for resp
48483 Sep 22 23:15:15.348 INFO [0] received reconcile message
48484 Sep 22 23:15:15.348 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48485 Sep 22 23:15:15.348 INFO [0] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48486 Sep 22 23:15:15.348 INFO [0] Sending repair request ReconciliationId(602)
48487 Sep 22 23:15:15.348 INFO [1] received reconcile message
48488 Sep 22 23:15:15.348 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48489 Sep 22 23:15:15.348 INFO [1] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48490 Sep 22 23:15:15.348 INFO [1] No action required ReconciliationId(602)
48491 Sep 22 23:15:15.348 INFO [2] received reconcile message
48492 Sep 22 23:15:15.348 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(602), op: ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48493 Sep 22 23:15:15.348 INFO [2] client ExtentRepair { repair_id: ReconciliationId(602), extent_id: 135, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48494 Sep 22 23:15:15.348 INFO [2] No action required ReconciliationId(602)
48495 Sep 22 23:15:15.348 DEBG 602 Repair extent 135 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48496 Sep 22 23:15:15.349 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/087.copy"
48497 Sep 22 23:15:15.358 DEBG [2] Read AckReady 1088, : downstairs
48498 Sep 22 23:15:15.359 DEBG up_ds_listen was notified
48499 Sep 22 23:15:15.359 DEBG up_ds_listen process 1088
48500 Sep 22 23:15:15.359 DEBG [A] ack job 1088:89, : downstairs
48501 Sep 22 23:15:15.413 INFO accepted connection, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48502 Sep 22 23:15:15.413 TRCE incoming request, uri: /extent/135/files, method: GET, req_id: fef095b1-80c5-4f78-b8e6-71fa6fbcf3ca, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48503 Sep 22 23:15:15.413 DEBG up_ds_listen checked 1 jobs, back to waiting
48504 Sep 22 23:15:15.413 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/135/files, method: GET, req_id: fef095b1-80c5-4f78-b8e6-71fa6fbcf3ca, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48505 Sep 22 23:15:15.414 INFO eid:135 Found repair files: ["087", "087.db"]
48506 Sep 22 23:15:15.414 TRCE incoming request, uri: /newextent/135/data, method: GET, req_id: dffa5365-7e2b-4ec5-931d-79d7f2a8d759, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48507 Sep 22 23:15:15.414 INFO request completed, latency_us: 333, response_code: 200, uri: /newextent/135/data, method: GET, req_id: dffa5365-7e2b-4ec5-931d-79d7f2a8d759, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48508 Sep 22 23:15:15.416 DEBG Flush :1089 extent_limit None deps:[JobId(1088), JobId(1087)] res:true f:34 g:1
48509 Sep 22 23:15:15.416 INFO [lossy] sleeping 1 second
48510 Sep 22 23:15:15.416 INFO [lossy] skipping 1087
48511 Sep 22 23:15:15.416 INFO [lossy] skipping 1087
48512 Sep 22 23:15:15.416 INFO [lossy] skipping 1087
48513 Sep 22 23:15:15.416 DEBG Flush :1087 extent_limit None deps:[JobId(1086), JobId(1085)] res:true f:33 g:1
48514 Sep 22 23:15:15.419 TRCE incoming request, uri: /newextent/135/db, method: GET, req_id: 035f5b90-205f-4cf9-9124-20a8c81e3a47, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48515 Sep 22 23:15:15.419 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/135/db, method: GET, req_id: 035f5b90-205f-4cf9-9124-20a8c81e3a47, remote_addr: 127.0.0.1:63153, local_addr: 127.0.0.1:46213, task: repair
48516 Sep 22 23:15:15.420 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/087.copy" to "/tmp/downstairs-vrx8aK6L/00/000/087.replace"
48517 Sep 22 23:15:15.420 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48518 Sep 22 23:15:15.421 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/087.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48519 Sep 22 23:15:15.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/087"
48520 Sep 22 23:15:15.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/087.db"
48521 Sep 22 23:15:15.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48522 Sep 22 23:15:15.422 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/087.replace" to "/tmp/downstairs-vrx8aK6L/00/000/087.completed"
48523 Sep 22 23:15:15.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48524 Sep 22 23:15:15.422 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48525 Sep 22 23:15:15.422 DEBG [0] It's time to notify for 602
48526 Sep 22 23:15:15.422 INFO Completion from [0] id:602 status:true
48527 Sep 22 23:15:15.422 INFO [603/752] Repair commands completed
48528 Sep 22 23:15:15.422 DEBG Read :1088 deps:[JobId(1087)] res:true
48529 Sep 22 23:15:15.422 INFO Pop front: ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }, state: ClientData([New, New, New]) }
48530 Sep 22 23:15:15.422 INFO Sent repair work, now wait for resp
48531 Sep 22 23:15:15.422 INFO [0] received reconcile message
48532 Sep 22 23:15:15.422 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }, state: ClientData([InProgress, New, New]) }, : downstairs
48533 Sep 22 23:15:15.422 INFO [0] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }
48534 Sep 22 23:15:15.422 INFO [1] received reconcile message
48535 Sep 22 23:15:15.422 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48536 Sep 22 23:15:15.422 INFO [1] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }
48537 Sep 22 23:15:15.422 INFO [2] received reconcile message
48538 Sep 22 23:15:15.422 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(603), op: ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48539 Sep 22 23:15:15.422 INFO [2] client ExtentReopen { repair_id: ReconciliationId(603), extent_id: 135 }
48540 Sep 22 23:15:15.423 DEBG 603 Reopen extent 135
48541 Sep 22 23:15:15.423 DEBG 603 Reopen extent 135
48542 Sep 22 23:15:15.424 DEBG 603 Reopen extent 135
48543 Sep 22 23:15:15.424 DEBG [2] It's time to notify for 603
48544 Sep 22 23:15:15.424 INFO Completion from [2] id:603 status:true
48545 Sep 22 23:15:15.424 INFO [604/752] Repair commands completed
48546 Sep 22 23:15:15.424 INFO Pop front: ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48547 Sep 22 23:15:15.424 INFO Sent repair work, now wait for resp
48548 Sep 22 23:15:15.424 INFO [0] received reconcile message
48549 Sep 22 23:15:15.424 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48550 Sep 22 23:15:15.425 INFO [0] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48551 Sep 22 23:15:15.425 INFO [1] received reconcile message
48552 Sep 22 23:15:15.425 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48553 Sep 22 23:15:15.425 INFO [1] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48554 Sep 22 23:15:15.425 INFO [2] received reconcile message
48555 Sep 22 23:15:15.425 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(604), op: ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48556 Sep 22 23:15:15.425 INFO [2] client ExtentFlush { repair_id: ReconciliationId(604), extent_id: 161, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48557 Sep 22 23:15:15.425 DEBG 604 Flush extent 161 with f:2 g:2
48558 Sep 22 23:15:15.425 DEBG Flush just extent 161 with f:2 and g:2
48559 Sep 22 23:15:15.425 DEBG [1] It's time to notify for 604
48560 Sep 22 23:15:15.425 INFO Completion from [1] id:604 status:true
48561 Sep 22 23:15:15.425 INFO [605/752] Repair commands completed
48562 Sep 22 23:15:15.425 INFO Pop front: ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }, state: ClientData([New, New, New]) }
48563 Sep 22 23:15:15.425 INFO Sent repair work, now wait for resp
48564 Sep 22 23:15:15.425 INFO [0] received reconcile message
48565 Sep 22 23:15:15.425 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }, state: ClientData([InProgress, New, New]) }, : downstairs
48566 Sep 22 23:15:15.425 INFO [0] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }
48567 Sep 22 23:15:15.425 INFO [1] received reconcile message
48568 Sep 22 23:15:15.425 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48569 Sep 22 23:15:15.425 INFO [1] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }
48570 Sep 22 23:15:15.425 INFO [2] received reconcile message
48571 Sep 22 23:15:15.425 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(605), op: ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48572 Sep 22 23:15:15.425 INFO [2] client ExtentClose { repair_id: ReconciliationId(605), extent_id: 161 }
48573 Sep 22 23:15:15.425 DEBG 605 Close extent 161
48574 Sep 22 23:15:15.426 DEBG 605 Close extent 161
48575 Sep 22 23:15:15.426 DEBG 605 Close extent 161
48576 Sep 22 23:15:15.426 DEBG [2] It's time to notify for 605
48577 Sep 22 23:15:15.426 INFO Completion from [2] id:605 status:true
48578 Sep 22 23:15:15.426 INFO [606/752] Repair commands completed
48579 Sep 22 23:15:15.426 INFO Pop front: ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48580 Sep 22 23:15:15.426 INFO Sent repair work, now wait for resp
48581 Sep 22 23:15:15.427 INFO [0] received reconcile message
48582 Sep 22 23:15:15.427 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48583 Sep 22 23:15:15.427 INFO [0] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48584 Sep 22 23:15:15.427 INFO [0] Sending repair request ReconciliationId(606)
48585 Sep 22 23:15:15.427 INFO [1] received reconcile message
48586 Sep 22 23:15:15.427 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48587 Sep 22 23:15:15.427 INFO [1] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48588 Sep 22 23:15:15.427 INFO [1] No action required ReconciliationId(606)
48589 Sep 22 23:15:15.427 INFO [2] received reconcile message
48590 Sep 22 23:15:15.427 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(606), op: ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48591 Sep 22 23:15:15.427 INFO [2] client ExtentRepair { repair_id: ReconciliationId(606), extent_id: 161, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48592 Sep 22 23:15:15.427 INFO [2] No action required ReconciliationId(606)
48593 Sep 22 23:15:15.427 DEBG 606 Repair extent 161 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48594 Sep 22 23:15:15.427 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A1.copy"
48595 Sep 22 23:15:15.489 INFO accepted connection, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48596 Sep 22 23:15:15.490 TRCE incoming request, uri: /extent/161/files, method: GET, req_id: 30a633ed-b022-4155-aa83-1be9a30a7a81, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48597 Sep 22 23:15:15.490 INFO request completed, latency_us: 218, response_code: 200, uri: /extent/161/files, method: GET, req_id: 30a633ed-b022-4155-aa83-1be9a30a7a81, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48598 Sep 22 23:15:15.490 INFO eid:161 Found repair files: ["0A1", "0A1.db"]
48599 Sep 22 23:15:15.491 TRCE incoming request, uri: /newextent/161/data, method: GET, req_id: 0c23dab9-0102-476b-a2dd-3de7732747f2, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48600 Sep 22 23:15:15.491 INFO request completed, latency_us: 268, response_code: 200, uri: /newextent/161/data, method: GET, req_id: 0c23dab9-0102-476b-a2dd-3de7732747f2, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48601 Sep 22 23:15:15.495 DEBG IO Read 1090 has deps [JobId(1089)]
48602 Sep 22 23:15:15.495 DEBG [rc] retire 1087 clears [JobId(1086), JobId(1087)], : downstairs
48603 Sep 22 23:15:15.496 TRCE incoming request, uri: /newextent/161/db, method: GET, req_id: 21a0b6f5-827d-40dd-9f43-57d455f47741, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48604 Sep 22 23:15:15.496 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/161/db, method: GET, req_id: 21a0b6f5-827d-40dd-9f43-57d455f47741, remote_addr: 127.0.0.1:60071, local_addr: 127.0.0.1:46213, task: repair
48605 Sep 22 23:15:15.498 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A1.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A1.replace"
48606 Sep 22 23:15:15.498 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48607 Sep 22 23:15:15.499 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A1.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48608 Sep 22 23:15:15.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A1"
48609 Sep 22 23:15:15.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A1.db"
48610 Sep 22 23:15:15.499 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48611 Sep 22 23:15:15.499 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A1.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A1.completed"
48612 Sep 22 23:15:15.500 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48613 Sep 22 23:15:15.500 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48614 Sep 22 23:15:15.500 DEBG [0] It's time to notify for 606
48615 Sep 22 23:15:15.500 INFO Completion from [0] id:606 status:true
48616 Sep 22 23:15:15.500 INFO [607/752] Repair commands completed
48617 Sep 22 23:15:15.500 INFO Pop front: ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }, state: ClientData([New, New, New]) }
48618 Sep 22 23:15:15.500 INFO Sent repair work, now wait for resp
48619 Sep 22 23:15:15.500 INFO [0] received reconcile message
48620 Sep 22 23:15:15.500 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }, state: ClientData([InProgress, New, New]) }, : downstairs
48621 Sep 22 23:15:15.500 INFO [0] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }
48622 Sep 22 23:15:15.500 INFO [1] received reconcile message
48623 Sep 22 23:15:15.500 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48624 Sep 22 23:15:15.500 INFO [1] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }
48625 Sep 22 23:15:15.500 INFO [2] received reconcile message
48626 Sep 22 23:15:15.500 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(607), op: ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48627 Sep 22 23:15:15.500 INFO [2] client ExtentReopen { repair_id: ReconciliationId(607), extent_id: 161 }
48628 Sep 22 23:15:15.501 DEBG 607 Reopen extent 161
48629 Sep 22 23:15:15.502 DEBG 607 Reopen extent 161
48630 Sep 22 23:15:15.502 DEBG 607 Reopen extent 161
48631 Sep 22 23:15:15.503 DEBG [2] It's time to notify for 607
48632 Sep 22 23:15:15.503 INFO Completion from [2] id:607 status:true
48633 Sep 22 23:15:15.503 INFO [608/752] Repair commands completed
48634 Sep 22 23:15:15.503 INFO Pop front: ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48635 Sep 22 23:15:15.503 INFO Sent repair work, now wait for resp
48636 Sep 22 23:15:15.503 INFO [0] received reconcile message
48637 Sep 22 23:15:15.503 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48638 Sep 22 23:15:15.503 INFO [0] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48639 Sep 22 23:15:15.503 INFO [1] received reconcile message
48640 Sep 22 23:15:15.503 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48641 Sep 22 23:15:15.503 INFO [1] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48642 Sep 22 23:15:15.503 INFO [2] received reconcile message
48643 Sep 22 23:15:15.503 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(608), op: ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48644 Sep 22 23:15:15.503 INFO [2] client ExtentFlush { repair_id: ReconciliationId(608), extent_id: 15, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48645 Sep 22 23:15:15.504 DEBG 608 Flush extent 15 with f:2 g:2
48646 Sep 22 23:15:15.504 DEBG Flush just extent 15 with f:2 and g:2
48647 Sep 22 23:15:15.504 DEBG [1] It's time to notify for 608
48648 Sep 22 23:15:15.504 INFO Completion from [1] id:608 status:true
48649 Sep 22 23:15:15.504 INFO [609/752] Repair commands completed
48650 Sep 22 23:15:15.504 INFO Pop front: ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }, state: ClientData([New, New, New]) }
48651 Sep 22 23:15:15.504 INFO Sent repair work, now wait for resp
48652 Sep 22 23:15:15.504 INFO [0] received reconcile message
48653 Sep 22 23:15:15.504 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }, state: ClientData([InProgress, New, New]) }, : downstairs
48654 Sep 22 23:15:15.504 INFO [0] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }
48655 Sep 22 23:15:15.504 INFO [1] received reconcile message
48656 Sep 22 23:15:15.504 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48657 Sep 22 23:15:15.504 INFO [1] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }
48658 Sep 22 23:15:15.504 INFO [2] received reconcile message
48659 Sep 22 23:15:15.504 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(609), op: ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48660 Sep 22 23:15:15.504 INFO [2] client ExtentClose { repair_id: ReconciliationId(609), extent_id: 15 }
48661 Sep 22 23:15:15.504 DEBG 609 Close extent 15
48662 Sep 22 23:15:15.505 DEBG 609 Close extent 15
48663 Sep 22 23:15:15.505 DEBG 609 Close extent 15
48664 Sep 22 23:15:15.506 DEBG [2] It's time to notify for 609
48665 Sep 22 23:15:15.506 INFO Completion from [2] id:609 status:true
48666 Sep 22 23:15:15.506 INFO [610/752] Repair commands completed
48667 Sep 22 23:15:15.506 INFO Pop front: ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48668 Sep 22 23:15:15.506 INFO Sent repair work, now wait for resp
48669 Sep 22 23:15:15.506 INFO [0] received reconcile message
48670 Sep 22 23:15:15.506 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48671 Sep 22 23:15:15.506 INFO [0] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48672 Sep 22 23:15:15.506 INFO [0] Sending repair request ReconciliationId(610)
48673 Sep 22 23:15:15.506 INFO [1] received reconcile message
48674 Sep 22 23:15:15.506 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48675 Sep 22 23:15:15.506 INFO [1] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48676 Sep 22 23:15:15.506 INFO [1] No action required ReconciliationId(610)
48677 Sep 22 23:15:15.506 INFO [2] received reconcile message
48678 Sep 22 23:15:15.506 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(610), op: ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48679 Sep 22 23:15:15.506 INFO [2] client ExtentRepair { repair_id: ReconciliationId(610), extent_id: 15, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48680 Sep 22 23:15:15.506 INFO [2] No action required ReconciliationId(610)
48681 Sep 22 23:15:15.506 DEBG 610 Repair extent 15 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48682 Sep 22 23:15:15.506 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00F.copy"
48683 Sep 22 23:15:15.571 INFO accepted connection, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48684 Sep 22 23:15:15.571 TRCE incoming request, uri: /extent/15/files, method: GET, req_id: 259d5bcf-dd3a-4d7e-9b39-d67a93ef0430, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48685 Sep 22 23:15:15.571 INFO request completed, latency_us: 196, response_code: 200, uri: /extent/15/files, method: GET, req_id: 259d5bcf-dd3a-4d7e-9b39-d67a93ef0430, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48686 Sep 22 23:15:15.572 INFO eid:15 Found repair files: ["00F", "00F.db"]
48687 Sep 22 23:15:15.572 TRCE incoming request, uri: /newextent/15/data, method: GET, req_id: c9192a78-4dd4-43a5-a0db-ee45628e0d94, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48688 Sep 22 23:15:15.572 INFO request completed, latency_us: 329, response_code: 200, uri: /newextent/15/data, method: GET, req_id: c9192a78-4dd4-43a5-a0db-ee45628e0d94, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48689 Sep 22 23:15:15.578 TRCE incoming request, uri: /newextent/15/db, method: GET, req_id: 59e3543d-a99b-4d8f-b64c-8643df5d758f, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48690 Sep 22 23:15:15.578 INFO request completed, latency_us: 300, response_code: 200, uri: /newextent/15/db, method: GET, req_id: 59e3543d-a99b-4d8f-b64c-8643df5d758f, remote_addr: 127.0.0.1:61878, local_addr: 127.0.0.1:46213, task: repair
48691 Sep 22 23:15:15.579 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00F.replace"
48692 Sep 22 23:15:15.579 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48693 Sep 22 23:15:15.580 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48694 Sep 22 23:15:15.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00F"
48695 Sep 22 23:15:15.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00F.db"
48696 Sep 22 23:15:15.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48697 Sep 22 23:15:15.580 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00F.completed"
48698 Sep 22 23:15:15.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48699 Sep 22 23:15:15.580 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48700 Sep 22 23:15:15.580 DEBG [0] It's time to notify for 610
48701 Sep 22 23:15:15.581 INFO Completion from [0] id:610 status:true
48702 Sep 22 23:15:15.581 INFO [611/752] Repair commands completed
48703 Sep 22 23:15:15.581 INFO Pop front: ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }, state: ClientData([New, New, New]) }
48704 Sep 22 23:15:15.581 INFO Sent repair work, now wait for resp
48705 Sep 22 23:15:15.581 INFO [0] received reconcile message
48706 Sep 22 23:15:15.581 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }, state: ClientData([InProgress, New, New]) }, : downstairs
48707 Sep 22 23:15:15.581 INFO [0] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }
48708 Sep 22 23:15:15.581 INFO [1] received reconcile message
48709 Sep 22 23:15:15.581 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48710 Sep 22 23:15:15.581 INFO [1] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }
48711 Sep 22 23:15:15.581 INFO [2] received reconcile message
48712 Sep 22 23:15:15.581 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(611), op: ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48713 Sep 22 23:15:15.581 INFO [2] client ExtentReopen { repair_id: ReconciliationId(611), extent_id: 15 }
48714 Sep 22 23:15:15.581 DEBG 611 Reopen extent 15
48715 Sep 22 23:15:15.582 DEBG 611 Reopen extent 15
48716 Sep 22 23:15:15.582 DEBG 611 Reopen extent 15
48717 Sep 22 23:15:15.583 DEBG [2] It's time to notify for 611
48718 Sep 22 23:15:15.583 INFO Completion from [2] id:611 status:true
48719 Sep 22 23:15:15.583 INFO [612/752] Repair commands completed
48720 Sep 22 23:15:15.583 INFO Pop front: ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48721 Sep 22 23:15:15.583 INFO Sent repair work, now wait for resp
48722 Sep 22 23:15:15.583 INFO [0] received reconcile message
48723 Sep 22 23:15:15.583 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48724 Sep 22 23:15:15.583 INFO [0] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48725 Sep 22 23:15:15.583 INFO [1] received reconcile message
48726 Sep 22 23:15:15.583 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48727 Sep 22 23:15:15.583 INFO [1] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48728 Sep 22 23:15:15.583 INFO [2] received reconcile message
48729 Sep 22 23:15:15.583 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(612), op: ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48730 Sep 22 23:15:15.583 INFO [2] client ExtentFlush { repair_id: ReconciliationId(612), extent_id: 64, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48731 Sep 22 23:15:15.583 DEBG 612 Flush extent 64 with f:2 g:2
48732 Sep 22 23:15:15.583 DEBG Flush just extent 64 with f:2 and g:2
48733 Sep 22 23:15:15.583 DEBG [1] It's time to notify for 612
48734 Sep 22 23:15:15.583 INFO Completion from [1] id:612 status:true
48735 Sep 22 23:15:15.583 INFO [613/752] Repair commands completed
48736 Sep 22 23:15:15.583 INFO Pop front: ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }, state: ClientData([New, New, New]) }
48737 Sep 22 23:15:15.583 INFO Sent repair work, now wait for resp
48738 Sep 22 23:15:15.583 INFO [0] received reconcile message
48739 Sep 22 23:15:15.583 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }, state: ClientData([InProgress, New, New]) }, : downstairs
48740 Sep 22 23:15:15.584 INFO [0] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }
48741 Sep 22 23:15:15.584 INFO [1] received reconcile message
48742 Sep 22 23:15:15.584 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48743 Sep 22 23:15:15.584 INFO [1] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }
48744 Sep 22 23:15:15.584 INFO [2] received reconcile message
48745 Sep 22 23:15:15.584 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(613), op: ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48746 Sep 22 23:15:15.584 INFO [2] client ExtentClose { repair_id: ReconciliationId(613), extent_id: 64 }
48747 Sep 22 23:15:15.584 DEBG 613 Close extent 64
48748 Sep 22 23:15:15.584 DEBG 613 Close extent 64
48749 Sep 22 23:15:15.584 DEBG 613 Close extent 64
48750 Sep 22 23:15:15.585 DEBG [2] It's time to notify for 613
48751 Sep 22 23:15:15.585 INFO Completion from [2] id:613 status:true
48752 Sep 22 23:15:15.585 INFO [614/752] Repair commands completed
48753 Sep 22 23:15:15.585 INFO Pop front: ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48754 Sep 22 23:15:15.585 INFO Sent repair work, now wait for resp
48755 Sep 22 23:15:15.585 INFO [0] received reconcile message
48756 Sep 22 23:15:15.585 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48757 Sep 22 23:15:15.585 INFO [0] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48758 Sep 22 23:15:15.585 INFO [0] Sending repair request ReconciliationId(614)
48759 Sep 22 23:15:15.585 INFO [1] received reconcile message
48760 Sep 22 23:15:15.585 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48761 Sep 22 23:15:15.585 INFO [1] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48762 Sep 22 23:15:15.585 INFO [1] No action required ReconciliationId(614)
48763 Sep 22 23:15:15.585 INFO [2] received reconcile message
48764 Sep 22 23:15:15.585 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(614), op: ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48765 Sep 22 23:15:15.585 INFO [2] client ExtentRepair { repair_id: ReconciliationId(614), extent_id: 64, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48766 Sep 22 23:15:15.585 INFO [2] No action required ReconciliationId(614)
48767 Sep 22 23:15:15.585 DEBG 614 Repair extent 64 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48768 Sep 22 23:15:15.585 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/040.copy"
48769 Sep 22 23:15:15.647 INFO accepted connection, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48770 Sep 22 23:15:15.647 TRCE incoming request, uri: /extent/64/files, method: GET, req_id: d1e32d03-3423-4eb9-ae75-0d3e7dc09d33, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48771 Sep 22 23:15:15.647 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/64/files, method: GET, req_id: d1e32d03-3423-4eb9-ae75-0d3e7dc09d33, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48772 Sep 22 23:15:15.647 INFO eid:64 Found repair files: ["040", "040.db"]
48773 Sep 22 23:15:15.648 TRCE incoming request, uri: /newextent/64/data, method: GET, req_id: 1f28e159-cf2a-4977-9450-44b1074010ac, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48774 Sep 22 23:15:15.648 INFO request completed, latency_us: 249, response_code: 200, uri: /newextent/64/data, method: GET, req_id: 1f28e159-cf2a-4977-9450-44b1074010ac, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48775 Sep 22 23:15:15.653 TRCE incoming request, uri: /newextent/64/db, method: GET, req_id: 3881377d-e802-46fa-b4c5-0ac67d4095c2, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48776 Sep 22 23:15:15.653 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/64/db, method: GET, req_id: 3881377d-e802-46fa-b4c5-0ac67d4095c2, remote_addr: 127.0.0.1:42578, local_addr: 127.0.0.1:46213, task: repair
48777 Sep 22 23:15:15.654 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/040.copy" to "/tmp/downstairs-vrx8aK6L/00/000/040.replace"
48778 Sep 22 23:15:15.654 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48779 Sep 22 23:15:15.655 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/040.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48780 Sep 22 23:15:15.655 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/040"
48781 Sep 22 23:15:15.656 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/040.db"
48782 Sep 22 23:15:15.656 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48783 Sep 22 23:15:15.656 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/040.replace" to "/tmp/downstairs-vrx8aK6L/00/000/040.completed"
48784 Sep 22 23:15:15.656 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48785 Sep 22 23:15:15.656 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48786 Sep 22 23:15:15.656 DEBG [0] It's time to notify for 614
48787 Sep 22 23:15:15.656 INFO Completion from [0] id:614 status:true
48788 Sep 22 23:15:15.656 INFO [615/752] Repair commands completed
48789 Sep 22 23:15:15.656 INFO Pop front: ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }, state: ClientData([New, New, New]) }
48790 Sep 22 23:15:15.656 INFO Sent repair work, now wait for resp
48791 Sep 22 23:15:15.656 INFO [0] received reconcile message
48792 Sep 22 23:15:15.656 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }, state: ClientData([InProgress, New, New]) }, : downstairs
48793 Sep 22 23:15:15.656 INFO [0] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }
48794 Sep 22 23:15:15.656 INFO [1] received reconcile message
48795 Sep 22 23:15:15.656 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48796 Sep 22 23:15:15.656 INFO [1] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }
48797 Sep 22 23:15:15.656 INFO [2] received reconcile message
48798 Sep 22 23:15:15.656 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(615), op: ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48799 Sep 22 23:15:15.656 INFO [2] client ExtentReopen { repair_id: ReconciliationId(615), extent_id: 64 }
48800 Sep 22 23:15:15.656 DEBG 615 Reopen extent 64
48801 Sep 22 23:15:15.657 DEBG 615 Reopen extent 64
48802 Sep 22 23:15:15.658 DEBG 615 Reopen extent 64
48803 Sep 22 23:15:15.658 DEBG [2] It's time to notify for 615
48804 Sep 22 23:15:15.658 INFO Completion from [2] id:615 status:true
48805 Sep 22 23:15:15.658 INFO [616/752] Repair commands completed
48806 Sep 22 23:15:15.658 INFO Pop front: ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48807 Sep 22 23:15:15.658 INFO Sent repair work, now wait for resp
48808 Sep 22 23:15:15.658 INFO [0] received reconcile message
48809 Sep 22 23:15:15.658 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48810 Sep 22 23:15:15.658 INFO [0] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48811 Sep 22 23:15:15.658 INFO [1] received reconcile message
48812 Sep 22 23:15:15.658 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48813 Sep 22 23:15:15.658 INFO [1] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48814 Sep 22 23:15:15.658 INFO [2] received reconcile message
48815 Sep 22 23:15:15.658 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(616), op: ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48816 Sep 22 23:15:15.658 INFO [2] client ExtentFlush { repair_id: ReconciliationId(616), extent_id: 93, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48817 Sep 22 23:15:15.659 DEBG 616 Flush extent 93 with f:2 g:2
48818 Sep 22 23:15:15.659 DEBG Flush just extent 93 with f:2 and g:2
48819 Sep 22 23:15:15.659 DEBG [1] It's time to notify for 616
48820 Sep 22 23:15:15.659 INFO Completion from [1] id:616 status:true
48821 Sep 22 23:15:15.659 INFO [617/752] Repair commands completed
48822 Sep 22 23:15:15.659 INFO Pop front: ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }, state: ClientData([New, New, New]) }
48823 Sep 22 23:15:15.659 INFO Sent repair work, now wait for resp
48824 Sep 22 23:15:15.659 INFO [0] received reconcile message
48825 Sep 22 23:15:15.659 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }, state: ClientData([InProgress, New, New]) }, : downstairs
48826 Sep 22 23:15:15.659 INFO [0] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }
48827 Sep 22 23:15:15.659 INFO [1] received reconcile message
48828 Sep 22 23:15:15.659 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48829 Sep 22 23:15:15.659 INFO [1] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }
48830 Sep 22 23:15:15.659 INFO [2] received reconcile message
48831 Sep 22 23:15:15.659 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(617), op: ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48832 Sep 22 23:15:15.659 INFO [2] client ExtentClose { repair_id: ReconciliationId(617), extent_id: 93 }
48833 Sep 22 23:15:15.659 DEBG 617 Close extent 93
48834 Sep 22 23:15:15.659 DEBG 617 Close extent 93
48835 Sep 22 23:15:15.660 DEBG 617 Close extent 93
48836 Sep 22 23:15:15.660 DEBG [2] It's time to notify for 617
48837 Sep 22 23:15:15.660 INFO Completion from [2] id:617 status:true
48838 Sep 22 23:15:15.660 INFO [618/752] Repair commands completed
48839 Sep 22 23:15:15.660 INFO Pop front: ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48840 Sep 22 23:15:15.660 INFO Sent repair work, now wait for resp
48841 Sep 22 23:15:15.660 INFO [0] received reconcile message
48842 Sep 22 23:15:15.660 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48843 Sep 22 23:15:15.660 INFO [0] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48844 Sep 22 23:15:15.660 INFO [0] Sending repair request ReconciliationId(618)
48845 Sep 22 23:15:15.660 INFO [1] received reconcile message
48846 Sep 22 23:15:15.660 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48847 Sep 22 23:15:15.660 INFO [1] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48848 Sep 22 23:15:15.660 INFO [1] No action required ReconciliationId(618)
48849 Sep 22 23:15:15.660 INFO [2] received reconcile message
48850 Sep 22 23:15:15.660 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(618), op: ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48851 Sep 22 23:15:15.660 INFO [2] client ExtentRepair { repair_id: ReconciliationId(618), extent_id: 93, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48852 Sep 22 23:15:15.661 INFO [2] No action required ReconciliationId(618)
48853 Sep 22 23:15:15.661 DEBG 618 Repair extent 93 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48854 Sep 22 23:15:15.661 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/05D.copy"
48855 Sep 22 23:15:15.722 INFO accepted connection, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48856 Sep 22 23:15:15.722 TRCE incoming request, uri: /extent/93/files, method: GET, req_id: 54a4a2dd-1cc0-4e02-93b4-56566c1bf8b8, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48857 Sep 22 23:15:15.722 INFO request completed, latency_us: 190, response_code: 200, uri: /extent/93/files, method: GET, req_id: 54a4a2dd-1cc0-4e02-93b4-56566c1bf8b8, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48858 Sep 22 23:15:15.723 INFO eid:93 Found repair files: ["05D", "05D.db"]
48859 Sep 22 23:15:15.723 TRCE incoming request, uri: /newextent/93/data, method: GET, req_id: 3943d24b-41ef-46a1-a230-f5e75478792c, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48860 Sep 22 23:15:15.723 INFO request completed, latency_us: 314, response_code: 200, uri: /newextent/93/data, method: GET, req_id: 3943d24b-41ef-46a1-a230-f5e75478792c, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48861 Sep 22 23:15:15.728 TRCE incoming request, uri: /newextent/93/db, method: GET, req_id: 424de2ab-7145-4ec1-adb1-c353320a7b56, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48862 Sep 22 23:15:15.729 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/93/db, method: GET, req_id: 424de2ab-7145-4ec1-adb1-c353320a7b56, remote_addr: 127.0.0.1:65206, local_addr: 127.0.0.1:46213, task: repair
48863 Sep 22 23:15:15.730 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/05D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/05D.replace"
48864 Sep 22 23:15:15.730 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48865 Sep 22 23:15:15.731 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/05D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48866 Sep 22 23:15:15.731 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05D"
48867 Sep 22 23:15:15.731 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/05D.db"
48868 Sep 22 23:15:15.731 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48869 Sep 22 23:15:15.731 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/05D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/05D.completed"
48870 Sep 22 23:15:15.731 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48871 Sep 22 23:15:15.731 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48872 Sep 22 23:15:15.731 DEBG [0] It's time to notify for 618
48873 Sep 22 23:15:15.731 INFO Completion from [0] id:618 status:true
48874 Sep 22 23:15:15.731 INFO [619/752] Repair commands completed
48875 Sep 22 23:15:15.731 INFO Pop front: ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }, state: ClientData([New, New, New]) }
48876 Sep 22 23:15:15.731 INFO Sent repair work, now wait for resp
48877 Sep 22 23:15:15.731 INFO [0] received reconcile message
48878 Sep 22 23:15:15.731 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }, state: ClientData([InProgress, New, New]) }, : downstairs
48879 Sep 22 23:15:15.731 INFO [0] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }
48880 Sep 22 23:15:15.732 INFO [1] received reconcile message
48881 Sep 22 23:15:15.732 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48882 Sep 22 23:15:15.732 INFO [1] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }
48883 Sep 22 23:15:15.732 INFO [2] received reconcile message
48884 Sep 22 23:15:15.732 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(619), op: ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48885 Sep 22 23:15:15.732 INFO [2] client ExtentReopen { repair_id: ReconciliationId(619), extent_id: 93 }
48886 Sep 22 23:15:15.732 DEBG 619 Reopen extent 93
48887 Sep 22 23:15:15.732 DEBG 619 Reopen extent 93
48888 Sep 22 23:15:15.733 DEBG 619 Reopen extent 93
48889 Sep 22 23:15:15.733 DEBG [2] It's time to notify for 619
48890 Sep 22 23:15:15.734 INFO Completion from [2] id:619 status:true
48891 Sep 22 23:15:15.734 INFO [620/752] Repair commands completed
48892 Sep 22 23:15:15.734 INFO Pop front: ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48893 Sep 22 23:15:15.734 INFO Sent repair work, now wait for resp
48894 Sep 22 23:15:15.734 INFO [0] received reconcile message
48895 Sep 22 23:15:15.734 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48896 Sep 22 23:15:15.734 INFO [0] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48897 Sep 22 23:15:15.734 INFO [1] received reconcile message
48898 Sep 22 23:15:15.734 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48899 Sep 22 23:15:15.734 INFO [1] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48900 Sep 22 23:15:15.734 INFO [2] received reconcile message
48901 Sep 22 23:15:15.734 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(620), op: ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48902 Sep 22 23:15:15.734 INFO [2] client ExtentFlush { repair_id: ReconciliationId(620), extent_id: 40, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48903 Sep 22 23:15:15.734 DEBG 620 Flush extent 40 with f:2 g:2
48904 Sep 22 23:15:15.734 DEBG Flush just extent 40 with f:2 and g:2
48905 Sep 22 23:15:15.734 DEBG [1] It's time to notify for 620
48906 Sep 22 23:15:15.734 INFO Completion from [1] id:620 status:true
48907 Sep 22 23:15:15.734 INFO [621/752] Repair commands completed
48908 Sep 22 23:15:15.734 INFO Pop front: ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }, state: ClientData([New, New, New]) }
48909 Sep 22 23:15:15.734 INFO Sent repair work, now wait for resp
48910 Sep 22 23:15:15.734 INFO [0] received reconcile message
48911 Sep 22 23:15:15.734 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }, state: ClientData([InProgress, New, New]) }, : downstairs
48912 Sep 22 23:15:15.734 INFO [0] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }
48913 Sep 22 23:15:15.734 INFO [1] received reconcile message
48914 Sep 22 23:15:15.734 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48915 Sep 22 23:15:15.734 INFO [1] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }
48916 Sep 22 23:15:15.734 INFO [2] received reconcile message
48917 Sep 22 23:15:15.734 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(621), op: ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48918 Sep 22 23:15:15.734 INFO [2] client ExtentClose { repair_id: ReconciliationId(621), extent_id: 40 }
48919 Sep 22 23:15:15.735 DEBG 621 Close extent 40
48920 Sep 22 23:15:15.735 DEBG 621 Close extent 40
48921 Sep 22 23:15:15.735 DEBG 621 Close extent 40
48922 Sep 22 23:15:15.735 DEBG [2] It's time to notify for 621
48923 Sep 22 23:15:15.735 INFO Completion from [2] id:621 status:true
48924 Sep 22 23:15:15.736 INFO [622/752] Repair commands completed
48925 Sep 22 23:15:15.736 INFO Pop front: ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
48926 Sep 22 23:15:15.736 INFO Sent repair work, now wait for resp
48927 Sep 22 23:15:15.736 INFO [0] received reconcile message
48928 Sep 22 23:15:15.736 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
48929 Sep 22 23:15:15.736 INFO [0] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48930 Sep 22 23:15:15.736 INFO [0] Sending repair request ReconciliationId(622)
48931 Sep 22 23:15:15.736 INFO [1] received reconcile message
48932 Sep 22 23:15:15.736 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48933 Sep 22 23:15:15.736 INFO [1] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48934 Sep 22 23:15:15.736 INFO [1] No action required ReconciliationId(622)
48935 Sep 22 23:15:15.736 INFO [2] received reconcile message
48936 Sep 22 23:15:15.736 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(622), op: ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
48937 Sep 22 23:15:15.736 INFO [2] client ExtentRepair { repair_id: ReconciliationId(622), extent_id: 40, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
48938 Sep 22 23:15:15.736 INFO [2] No action required ReconciliationId(622)
48939 Sep 22 23:15:15.736 DEBG 622 Repair extent 40 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
48940 Sep 22 23:15:15.736 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/028.copy"
48941 Sep 22 23:15:15.800 INFO accepted connection, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48942 Sep 22 23:15:15.800 TRCE incoming request, uri: /extent/40/files, method: GET, req_id: 1a4c69bb-3433-477a-8a4b-16a696305190, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48943 Sep 22 23:15:15.800 INFO request completed, latency_us: 195, response_code: 200, uri: /extent/40/files, method: GET, req_id: 1a4c69bb-3433-477a-8a4b-16a696305190, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48944 Sep 22 23:15:15.800 INFO eid:40 Found repair files: ["028", "028.db"]
48945 Sep 22 23:15:15.801 TRCE incoming request, uri: /newextent/40/data, method: GET, req_id: 1748a9a3-a49b-4012-99a4-68ee072fa7eb, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48946 Sep 22 23:15:15.801 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/40/data, method: GET, req_id: 1748a9a3-a49b-4012-99a4-68ee072fa7eb, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48947 Sep 22 23:15:15.806 TRCE incoming request, uri: /newextent/40/db, method: GET, req_id: 964fce30-9850-4e3e-b2c4-7f5d119b95bf, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48948 Sep 22 23:15:15.806 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/40/db, method: GET, req_id: 964fce30-9850-4e3e-b2c4-7f5d119b95bf, remote_addr: 127.0.0.1:49461, local_addr: 127.0.0.1:46213, task: repair
48949 Sep 22 23:15:15.808 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/028.copy" to "/tmp/downstairs-vrx8aK6L/00/000/028.replace"
48950 Sep 22 23:15:15.808 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48951 Sep 22 23:15:15.808 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/028.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
48952 Sep 22 23:15:15.809 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/028"
48953 Sep 22 23:15:15.809 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/028.db"
48954 Sep 22 23:15:15.809 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48955 Sep 22 23:15:15.809 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/028.replace" to "/tmp/downstairs-vrx8aK6L/00/000/028.completed"
48956 Sep 22 23:15:15.809 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48957 Sep 22 23:15:15.809 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
48958 Sep 22 23:15:15.809 DEBG [0] It's time to notify for 622
48959 Sep 22 23:15:15.809 INFO Completion from [0] id:622 status:true
48960 Sep 22 23:15:15.809 INFO [623/752] Repair commands completed
48961 Sep 22 23:15:15.809 INFO Pop front: ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }, state: ClientData([New, New, New]) }
48962 Sep 22 23:15:15.809 INFO Sent repair work, now wait for resp
48963 Sep 22 23:15:15.809 INFO [0] received reconcile message
48964 Sep 22 23:15:15.809 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }, state: ClientData([InProgress, New, New]) }, : downstairs
48965 Sep 22 23:15:15.809 INFO [0] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }
48966 Sep 22 23:15:15.809 INFO [1] received reconcile message
48967 Sep 22 23:15:15.809 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
48968 Sep 22 23:15:15.809 INFO [1] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }
48969 Sep 22 23:15:15.809 INFO [2] received reconcile message
48970 Sep 22 23:15:15.809 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(623), op: ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
48971 Sep 22 23:15:15.809 INFO [2] client ExtentReopen { repair_id: ReconciliationId(623), extent_id: 40 }
48972 Sep 22 23:15:15.810 DEBG 623 Reopen extent 40
48973 Sep 22 23:15:15.810 DEBG 623 Reopen extent 40
48974 Sep 22 23:15:15.811 DEBG 623 Reopen extent 40
48975 Sep 22 23:15:15.811 DEBG [2] It's time to notify for 623
48976 Sep 22 23:15:15.811 INFO Completion from [2] id:623 status:true
48977 Sep 22 23:15:15.811 INFO [624/752] Repair commands completed
48978 Sep 22 23:15:15.811 INFO Pop front: ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
48979 Sep 22 23:15:15.811 INFO Sent repair work, now wait for resp
48980 Sep 22 23:15:15.811 INFO [0] received reconcile message
48981 Sep 22 23:15:15.811 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
48982 Sep 22 23:15:15.811 INFO [0] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48983 Sep 22 23:15:15.812 INFO [1] received reconcile message
48984 Sep 22 23:15:15.812 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
48985 Sep 22 23:15:15.812 INFO [1] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48986 Sep 22 23:15:15.812 INFO [2] received reconcile message
48987 Sep 22 23:15:15.812 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(624), op: ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
48988 Sep 22 23:15:15.812 INFO [2] client ExtentFlush { repair_id: ReconciliationId(624), extent_id: 49, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
48989 Sep 22 23:15:15.812 DEBG 624 Flush extent 49 with f:2 g:2
48990 Sep 22 23:15:15.812 DEBG Flush just extent 49 with f:2 and g:2
48991 Sep 22 23:15:15.812 DEBG [1] It's time to notify for 624
48992 Sep 22 23:15:15.812 INFO Completion from [1] id:624 status:true
48993 Sep 22 23:15:15.812 INFO [625/752] Repair commands completed
48994 Sep 22 23:15:15.812 INFO Pop front: ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }, state: ClientData([New, New, New]) }
48995 Sep 22 23:15:15.812 INFO Sent repair work, now wait for resp
48996 Sep 22 23:15:15.812 INFO [0] received reconcile message
48997 Sep 22 23:15:15.812 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }, state: ClientData([InProgress, New, New]) }, : downstairs
48998 Sep 22 23:15:15.812 INFO [0] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }
48999 Sep 22 23:15:15.812 INFO [1] received reconcile message
49000 Sep 22 23:15:15.812 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49001 Sep 22 23:15:15.812 INFO [1] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }
49002 Sep 22 23:15:15.812 INFO [2] received reconcile message
49003 Sep 22 23:15:15.812 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(625), op: ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49004 Sep 22 23:15:15.812 INFO [2] client ExtentClose { repair_id: ReconciliationId(625), extent_id: 49 }
49005 Sep 22 23:15:15.812 DEBG 625 Close extent 49
49006 Sep 22 23:15:15.813 DEBG 625 Close extent 49
49007 Sep 22 23:15:15.813 DEBG 625 Close extent 49
49008 Sep 22 23:15:15.813 DEBG [2] It's time to notify for 625
49009 Sep 22 23:15:15.813 INFO Completion from [2] id:625 status:true
49010 Sep 22 23:15:15.813 INFO [626/752] Repair commands completed
49011 Sep 22 23:15:15.813 INFO Pop front: ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49012 Sep 22 23:15:15.813 INFO Sent repair work, now wait for resp
49013 Sep 22 23:15:15.813 INFO [0] received reconcile message
49014 Sep 22 23:15:15.813 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49015 Sep 22 23:15:15.813 INFO [0] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49016 Sep 22 23:15:15.813 INFO [0] Sending repair request ReconciliationId(626)
49017 Sep 22 23:15:15.814 INFO [1] received reconcile message
49018 Sep 22 23:15:15.814 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49019 Sep 22 23:15:15.814 INFO [1] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49020 Sep 22 23:15:15.814 INFO [1] No action required ReconciliationId(626)
49021 Sep 22 23:15:15.814 INFO [2] received reconcile message
49022 Sep 22 23:15:15.814 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(626), op: ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49023 Sep 22 23:15:15.814 INFO [2] client ExtentRepair { repair_id: ReconciliationId(626), extent_id: 49, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49024 Sep 22 23:15:15.814 INFO [2] No action required ReconciliationId(626)
49025 Sep 22 23:15:15.814 DEBG 626 Repair extent 49 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49026 Sep 22 23:15:15.814 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/031.copy"
49027 Sep 22 23:15:15.848 DEBG IO Flush 1091 has deps [JobId(1090), JobId(1089)]
49028 Sep 22 23:15:15.851 DEBG Flush :1089 extent_limit None deps:[JobId(1088), JobId(1087)] res:true f:34 g:1
49029 Sep 22 23:15:15.857 DEBG Read :1090 deps:[JobId(1089)] res:true
49030 Sep 22 23:15:15.877 INFO accepted connection, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49031 Sep 22 23:15:15.878 TRCE incoming request, uri: /extent/49/files, method: GET, req_id: 69056ed3-f253-4644-8347-812ca20dfb5f, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49032 Sep 22 23:15:15.878 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/49/files, method: GET, req_id: 69056ed3-f253-4644-8347-812ca20dfb5f, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49033 Sep 22 23:15:15.878 INFO eid:49 Found repair files: ["031", "031.db"]
49034 Sep 22 23:15:15.878 TRCE incoming request, uri: /newextent/49/data, method: GET, req_id: b04e0650-2f99-4855-991d-1cae2668e7c6, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49035 Sep 22 23:15:15.879 INFO request completed, latency_us: 255, response_code: 200, uri: /newextent/49/data, method: GET, req_id: b04e0650-2f99-4855-991d-1cae2668e7c6, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49036 Sep 22 23:15:15.884 TRCE incoming request, uri: /newextent/49/db, method: GET, req_id: 99a56e2c-0a8b-406c-ae8e-61e11e3b7f7d, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49037 Sep 22 23:15:15.884 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/49/db, method: GET, req_id: 99a56e2c-0a8b-406c-ae8e-61e11e3b7f7d, remote_addr: 127.0.0.1:47650, local_addr: 127.0.0.1:46213, task: repair
49038 Sep 22 23:15:15.885 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/031.copy" to "/tmp/downstairs-vrx8aK6L/00/000/031.replace"
49039 Sep 22 23:15:15.885 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49040 Sep 22 23:15:15.886 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/031.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49041 Sep 22 23:15:15.886 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/031"
49042 Sep 22 23:15:15.887 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/031.db"
49043 Sep 22 23:15:15.887 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49044 Sep 22 23:15:15.887 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/031.replace" to "/tmp/downstairs-vrx8aK6L/00/000/031.completed"
49045 Sep 22 23:15:15.887 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49046 Sep 22 23:15:15.887 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49047 Sep 22 23:15:15.887 DEBG [0] It's time to notify for 626
49048 Sep 22 23:15:15.887 INFO Completion from [0] id:626 status:true
49049 Sep 22 23:15:15.887 INFO [627/752] Repair commands completed
49050 Sep 22 23:15:15.887 INFO Pop front: ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }, state: ClientData([New, New, New]) }
49051 Sep 22 23:15:15.887 INFO Sent repair work, now wait for resp
49052 Sep 22 23:15:15.887 INFO [0] received reconcile message
49053 Sep 22 23:15:15.887 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }, state: ClientData([InProgress, New, New]) }, : downstairs
49054 Sep 22 23:15:15.887 INFO [0] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }
49055 Sep 22 23:15:15.887 INFO [1] received reconcile message
49056 Sep 22 23:15:15.887 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49057 Sep 22 23:15:15.887 INFO [1] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }
49058 Sep 22 23:15:15.887 INFO [2] received reconcile message
49059 Sep 22 23:15:15.887 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(627), op: ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49060 Sep 22 23:15:15.887 INFO [2] client ExtentReopen { repair_id: ReconciliationId(627), extent_id: 49 }
49061 Sep 22 23:15:15.887 DEBG 627 Reopen extent 49
49062 Sep 22 23:15:15.888 DEBG 627 Reopen extent 49
49063 Sep 22 23:15:15.889 DEBG 627 Reopen extent 49
49064 Sep 22 23:15:15.889 DEBG [2] It's time to notify for 627
49065 Sep 22 23:15:15.889 INFO Completion from [2] id:627 status:true
49066 Sep 22 23:15:15.889 INFO [628/752] Repair commands completed
49067 Sep 22 23:15:15.889 INFO Pop front: ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49068 Sep 22 23:15:15.889 INFO Sent repair work, now wait for resp
49069 Sep 22 23:15:15.889 INFO [0] received reconcile message
49070 Sep 22 23:15:15.889 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49071 Sep 22 23:15:15.889 INFO [0] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49072 Sep 22 23:15:15.889 INFO [1] received reconcile message
49073 Sep 22 23:15:15.889 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49074 Sep 22 23:15:15.889 INFO [1] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49075 Sep 22 23:15:15.889 INFO [2] received reconcile message
49076 Sep 22 23:15:15.889 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(628), op: ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49077 Sep 22 23:15:15.889 INFO [2] client ExtentFlush { repair_id: ReconciliationId(628), extent_id: 81, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49078 Sep 22 23:15:15.890 DEBG 628 Flush extent 81 with f:2 g:2
49079 Sep 22 23:15:15.890 DEBG Flush just extent 81 with f:2 and g:2
49080 Sep 22 23:15:15.890 DEBG [1] It's time to notify for 628
49081 Sep 22 23:15:15.890 INFO Completion from [1] id:628 status:true
49082 Sep 22 23:15:15.890 INFO [629/752] Repair commands completed
49083 Sep 22 23:15:15.890 INFO Pop front: ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }, state: ClientData([New, New, New]) }
49084 Sep 22 23:15:15.890 INFO Sent repair work, now wait for resp
49085 Sep 22 23:15:15.890 INFO [0] received reconcile message
49086 Sep 22 23:15:15.890 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }, state: ClientData([InProgress, New, New]) }, : downstairs
49087 Sep 22 23:15:15.890 INFO [0] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }
49088 Sep 22 23:15:15.890 INFO [1] received reconcile message
49089 Sep 22 23:15:15.890 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49090 Sep 22 23:15:15.890 INFO [1] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }
49091 Sep 22 23:15:15.890 INFO [2] received reconcile message
49092 Sep 22 23:15:15.890 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(629), op: ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49093 Sep 22 23:15:15.890 INFO [2] client ExtentClose { repair_id: ReconciliationId(629), extent_id: 81 }
49094 Sep 22 23:15:15.890 DEBG 629 Close extent 81
49095 Sep 22 23:15:15.890 DEBG 629 Close extent 81
49096 Sep 22 23:15:15.891 DEBG 629 Close extent 81
49097 Sep 22 23:15:15.891 DEBG [2] It's time to notify for 629
49098 Sep 22 23:15:15.891 INFO Completion from [2] id:629 status:true
49099 Sep 22 23:15:15.891 INFO [630/752] Repair commands completed
49100 Sep 22 23:15:15.891 INFO Pop front: ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49101 Sep 22 23:15:15.891 INFO Sent repair work, now wait for resp
49102 Sep 22 23:15:15.891 INFO [0] received reconcile message
49103 Sep 22 23:15:15.891 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49104 Sep 22 23:15:15.891 INFO [0] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49105 Sep 22 23:15:15.891 INFO [0] Sending repair request ReconciliationId(630)
49106 Sep 22 23:15:15.891 INFO [1] received reconcile message
49107 Sep 22 23:15:15.891 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49108 Sep 22 23:15:15.891 INFO [1] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49109 Sep 22 23:15:15.891 INFO [1] No action required ReconciliationId(630)
49110 Sep 22 23:15:15.891 INFO [2] received reconcile message
49111 Sep 22 23:15:15.891 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(630), op: ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49112 Sep 22 23:15:15.891 INFO [2] client ExtentRepair { repair_id: ReconciliationId(630), extent_id: 81, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49113 Sep 22 23:15:15.891 INFO [2] No action required ReconciliationId(630)
49114 Sep 22 23:15:15.892 DEBG 630 Repair extent 81 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49115 Sep 22 23:15:15.892 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/051.copy"
49116 Sep 22 23:15:15.956 INFO accepted connection, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49117 Sep 22 23:15:15.956 TRCE incoming request, uri: /extent/81/files, method: GET, req_id: fec89fa8-a7a6-4d4f-9343-2d14afc6ce75, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49118 Sep 22 23:15:15.957 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/81/files, method: GET, req_id: fec89fa8-a7a6-4d4f-9343-2d14afc6ce75, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49119 Sep 22 23:15:15.957 INFO eid:81 Found repair files: ["051", "051.db"]
49120 Sep 22 23:15:15.957 TRCE incoming request, uri: /newextent/81/data, method: GET, req_id: ed6ea4bb-430a-4fb8-8e09-417758209545, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49121 Sep 22 23:15:15.957 INFO request completed, latency_us: 306, response_code: 200, uri: /newextent/81/data, method: GET, req_id: ed6ea4bb-430a-4fb8-8e09-417758209545, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49122 Sep 22 23:15:15.963 TRCE incoming request, uri: /newextent/81/db, method: GET, req_id: 9940fb4a-7356-4e37-9955-070dd08f83c5, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49123 Sep 22 23:15:15.963 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/81/db, method: GET, req_id: 9940fb4a-7356-4e37-9955-070dd08f83c5, remote_addr: 127.0.0.1:43364, local_addr: 127.0.0.1:46213, task: repair
49124 Sep 22 23:15:15.964 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/051.copy" to "/tmp/downstairs-vrx8aK6L/00/000/051.replace"
49125 Sep 22 23:15:15.964 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49126 Sep 22 23:15:15.965 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/051.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49127 Sep 22 23:15:15.965 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/051"
49128 Sep 22 23:15:15.965 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/051.db"
49129 Sep 22 23:15:15.965 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49130 Sep 22 23:15:15.965 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/051.replace" to "/tmp/downstairs-vrx8aK6L/00/000/051.completed"
49131 Sep 22 23:15:15.965 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49132 Sep 22 23:15:15.965 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49133 Sep 22 23:15:15.966 DEBG [0] It's time to notify for 630
49134 Sep 22 23:15:15.966 INFO Completion from [0] id:630 status:true
49135 Sep 22 23:15:15.966 INFO [631/752] Repair commands completed
49136 Sep 22 23:15:15.966 INFO Pop front: ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }, state: ClientData([New, New, New]) }
49137 Sep 22 23:15:15.966 INFO Sent repair work, now wait for resp
49138 Sep 22 23:15:15.966 INFO [0] received reconcile message
49139 Sep 22 23:15:15.966 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }, state: ClientData([InProgress, New, New]) }, : downstairs
49140 Sep 22 23:15:15.966 INFO [0] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }
49141 Sep 22 23:15:15.966 INFO [1] received reconcile message
49142 Sep 22 23:15:15.966 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49143 Sep 22 23:15:15.966 INFO [1] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }
49144 Sep 22 23:15:15.966 INFO [2] received reconcile message
49145 Sep 22 23:15:15.966 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(631), op: ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49146 Sep 22 23:15:15.966 INFO [2] client ExtentReopen { repair_id: ReconciliationId(631), extent_id: 81 }
49147 Sep 22 23:15:15.966 DEBG 631 Reopen extent 81
49148 Sep 22 23:15:15.967 DEBG 631 Reopen extent 81
49149 Sep 22 23:15:15.967 DEBG 631 Reopen extent 81
49150 Sep 22 23:15:15.968 DEBG [2] It's time to notify for 631
49151 Sep 22 23:15:15.968 INFO Completion from [2] id:631 status:true
49152 Sep 22 23:15:15.968 INFO [632/752] Repair commands completed
49153 Sep 22 23:15:15.968 INFO Pop front: ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49154 Sep 22 23:15:15.968 INFO Sent repair work, now wait for resp
49155 Sep 22 23:15:15.968 INFO [0] received reconcile message
49156 Sep 22 23:15:15.968 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49157 Sep 22 23:15:15.968 INFO [0] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49158 Sep 22 23:15:15.968 INFO [1] received reconcile message
49159 Sep 22 23:15:15.968 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49160 Sep 22 23:15:15.968 INFO [1] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49161 Sep 22 23:15:15.968 INFO [2] received reconcile message
49162 Sep 22 23:15:15.968 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(632), op: ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49163 Sep 22 23:15:15.968 INFO [2] client ExtentFlush { repair_id: ReconciliationId(632), extent_id: 170, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49164 Sep 22 23:15:15.968 DEBG 632 Flush extent 170 with f:2 g:2
49165 Sep 22 23:15:15.968 DEBG Flush just extent 170 with f:2 and g:2
49166 Sep 22 23:15:15.968 DEBG [1] It's time to notify for 632
49167 Sep 22 23:15:15.968 INFO Completion from [1] id:632 status:true
49168 Sep 22 23:15:15.968 INFO [633/752] Repair commands completed
49169 Sep 22 23:15:15.968 INFO Pop front: ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }, state: ClientData([New, New, New]) }
49170 Sep 22 23:15:15.968 INFO Sent repair work, now wait for resp
49171 Sep 22 23:15:15.969 INFO [0] received reconcile message
49172 Sep 22 23:15:15.969 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }, state: ClientData([InProgress, New, New]) }, : downstairs
49173 Sep 22 23:15:15.969 INFO [0] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }
49174 Sep 22 23:15:15.969 INFO [1] received reconcile message
49175 Sep 22 23:15:15.969 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49176 Sep 22 23:15:15.969 INFO [1] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }
49177 Sep 22 23:15:15.969 INFO [2] received reconcile message
49178 Sep 22 23:15:15.969 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(633), op: ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49179 Sep 22 23:15:15.969 INFO [2] client ExtentClose { repair_id: ReconciliationId(633), extent_id: 170 }
49180 Sep 22 23:15:15.969 DEBG 633 Close extent 170
49181 Sep 22 23:15:15.969 DEBG 633 Close extent 170
49182 Sep 22 23:15:15.969 DEBG 633 Close extent 170
49183 Sep 22 23:15:15.970 DEBG [2] It's time to notify for 633
49184 Sep 22 23:15:15.970 INFO Completion from [2] id:633 status:true
49185 Sep 22 23:15:15.970 INFO [634/752] Repair commands completed
49186 Sep 22 23:15:15.970 INFO Pop front: ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49187 Sep 22 23:15:15.970 INFO Sent repair work, now wait for resp
49188 Sep 22 23:15:15.970 INFO [0] received reconcile message
49189 Sep 22 23:15:15.970 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49190 Sep 22 23:15:15.970 INFO [0] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49191 Sep 22 23:15:15.970 INFO [0] Sending repair request ReconciliationId(634)
49192 Sep 22 23:15:15.970 INFO [1] received reconcile message
49193 Sep 22 23:15:15.970 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49194 Sep 22 23:15:15.970 INFO [1] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49195 Sep 22 23:15:15.970 INFO [1] No action required ReconciliationId(634)
49196 Sep 22 23:15:15.970 INFO [2] received reconcile message
49197 Sep 22 23:15:15.970 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(634), op: ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49198 Sep 22 23:15:15.970 INFO [2] client ExtentRepair { repair_id: ReconciliationId(634), extent_id: 170, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49199 Sep 22 23:15:15.970 INFO [2] No action required ReconciliationId(634)
49200 Sep 22 23:15:15.970 DEBG 634 Repair extent 170 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49201 Sep 22 23:15:15.970 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0AA.copy"
49202 Sep 22 23:15:16.034 INFO accepted connection, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49203 Sep 22 23:15:16.034 TRCE incoming request, uri: /extent/170/files, method: GET, req_id: e4617a76-c241-4332-af4f-2abc369abaf9, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49204 Sep 22 23:15:16.034 INFO request completed, latency_us: 251, response_code: 200, uri: /extent/170/files, method: GET, req_id: e4617a76-c241-4332-af4f-2abc369abaf9, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49205 Sep 22 23:15:16.035 INFO eid:170 Found repair files: ["0AA", "0AA.db"]
49206 Sep 22 23:15:16.035 TRCE incoming request, uri: /newextent/170/data, method: GET, req_id: b38bde04-3774-4f40-bfc4-392d4c077a25, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49207 Sep 22 23:15:16.035 INFO request completed, latency_us: 344, response_code: 200, uri: /newextent/170/data, method: GET, req_id: b38bde04-3774-4f40-bfc4-392d4c077a25, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49208 Sep 22 23:15:16.040 TRCE incoming request, uri: /newextent/170/db, method: GET, req_id: a75aaa63-2b62-4bab-8f14-7481abc6a039, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49209 Sep 22 23:15:16.041 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/170/db, method: GET, req_id: a75aaa63-2b62-4bab-8f14-7481abc6a039, remote_addr: 127.0.0.1:63247, local_addr: 127.0.0.1:46213, task: repair
49210 Sep 22 23:15:16.042 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0AA.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0AA.replace"
49211 Sep 22 23:15:16.042 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49212 Sep 22 23:15:16.043 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0AA.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49213 Sep 22 23:15:16.043 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AA"
49214 Sep 22 23:15:16.043 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0AA.db"
49215 Sep 22 23:15:16.043 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49216 Sep 22 23:15:16.043 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0AA.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0AA.completed"
49217 Sep 22 23:15:16.044 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49218 Sep 22 23:15:16.044 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49219 Sep 22 23:15:16.044 DEBG [0] It's time to notify for 634
49220 Sep 22 23:15:16.044 INFO Completion from [0] id:634 status:true
49221 Sep 22 23:15:16.044 INFO [635/752] Repair commands completed
49222 Sep 22 23:15:16.044 INFO Pop front: ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }, state: ClientData([New, New, New]) }
49223 Sep 22 23:15:16.044 INFO Sent repair work, now wait for resp
49224 Sep 22 23:15:16.044 INFO [0] received reconcile message
49225 Sep 22 23:15:16.044 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }, state: ClientData([InProgress, New, New]) }, : downstairs
49226 Sep 22 23:15:16.044 INFO [0] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }
49227 Sep 22 23:15:16.044 INFO [1] received reconcile message
49228 Sep 22 23:15:16.044 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49229 Sep 22 23:15:16.044 INFO [1] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }
49230 Sep 22 23:15:16.044 INFO [2] received reconcile message
49231 Sep 22 23:15:16.044 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(635), op: ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49232 Sep 22 23:15:16.044 INFO [2] client ExtentReopen { repair_id: ReconciliationId(635), extent_id: 170 }
49233 Sep 22 23:15:16.044 DEBG 635 Reopen extent 170
49234 Sep 22 23:15:16.045 DEBG 635 Reopen extent 170
49235 Sep 22 23:15:16.046 DEBG 635 Reopen extent 170
49236 Sep 22 23:15:16.046 DEBG [2] It's time to notify for 635
49237 Sep 22 23:15:16.046 INFO Completion from [2] id:635 status:true
49238 Sep 22 23:15:16.046 INFO [636/752] Repair commands completed
49239 Sep 22 23:15:16.046 INFO Pop front: ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49240 Sep 22 23:15:16.046 INFO Sent repair work, now wait for resp
49241 Sep 22 23:15:16.046 INFO [0] received reconcile message
49242 Sep 22 23:15:16.046 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49243 Sep 22 23:15:16.046 INFO [0] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49244 Sep 22 23:15:16.047 INFO [1] received reconcile message
49245 Sep 22 23:15:16.047 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49246 Sep 22 23:15:16.047 INFO [1] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49247 Sep 22 23:15:16.047 INFO [2] received reconcile message
49248 Sep 22 23:15:16.047 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(636), op: ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49249 Sep 22 23:15:16.047 INFO [2] client ExtentFlush { repair_id: ReconciliationId(636), extent_id: 13, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49250 Sep 22 23:15:16.047 DEBG 636 Flush extent 13 with f:2 g:2
49251 Sep 22 23:15:16.047 DEBG Flush just extent 13 with f:2 and g:2
49252 Sep 22 23:15:16.047 DEBG [1] It's time to notify for 636
49253 Sep 22 23:15:16.047 INFO Completion from [1] id:636 status:true
49254 Sep 22 23:15:16.047 INFO [637/752] Repair commands completed
49255 Sep 22 23:15:16.047 INFO Pop front: ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }, state: ClientData([New, New, New]) }
49256 Sep 22 23:15:16.047 INFO Sent repair work, now wait for resp
49257 Sep 22 23:15:16.047 INFO [0] received reconcile message
49258 Sep 22 23:15:16.047 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }, state: ClientData([InProgress, New, New]) }, : downstairs
49259 Sep 22 23:15:16.047 INFO [0] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }
49260 Sep 22 23:15:16.047 INFO [1] received reconcile message
49261 Sep 22 23:15:16.047 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49262 Sep 22 23:15:16.047 INFO [1] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }
49263 Sep 22 23:15:16.047 INFO [2] received reconcile message
49264 Sep 22 23:15:16.047 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(637), op: ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49265 Sep 22 23:15:16.047 INFO [2] client ExtentClose { repair_id: ReconciliationId(637), extent_id: 13 }
49266 Sep 22 23:15:16.047 DEBG 637 Close extent 13
49267 Sep 22 23:15:16.048 DEBG 637 Close extent 13
49268 Sep 22 23:15:16.048 DEBG 637 Close extent 13
49269 Sep 22 23:15:16.048 DEBG [2] It's time to notify for 637
49270 Sep 22 23:15:16.048 INFO Completion from [2] id:637 status:true
49271 Sep 22 23:15:16.048 INFO [638/752] Repair commands completed
49272 Sep 22 23:15:16.048 INFO Pop front: ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49273 Sep 22 23:15:16.048 INFO Sent repair work, now wait for resp
49274 Sep 22 23:15:16.048 INFO [0] received reconcile message
49275 Sep 22 23:15:16.048 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49276 Sep 22 23:15:16.048 INFO [0] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49277 Sep 22 23:15:16.048 INFO [0] Sending repair request ReconciliationId(638)
49278 Sep 22 23:15:16.049 INFO [1] received reconcile message
49279 Sep 22 23:15:16.049 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49280 Sep 22 23:15:16.049 INFO [1] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49281 Sep 22 23:15:16.049 INFO [1] No action required ReconciliationId(638)
49282 Sep 22 23:15:16.049 INFO [2] received reconcile message
49283 Sep 22 23:15:16.049 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(638), op: ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49284 Sep 22 23:15:16.049 INFO [2] client ExtentRepair { repair_id: ReconciliationId(638), extent_id: 13, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49285 Sep 22 23:15:16.049 INFO [2] No action required ReconciliationId(638)
49286 Sep 22 23:15:16.049 DEBG 638 Repair extent 13 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49287 Sep 22 23:15:16.049 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00D.copy"
49288 Sep 22 23:15:16.112 INFO accepted connection, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49289 Sep 22 23:15:16.112 TRCE incoming request, uri: /extent/13/files, method: GET, req_id: d8906cf3-83f7-487f-8e73-b93bf4f67bcc, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49290 Sep 22 23:15:16.112 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/13/files, method: GET, req_id: d8906cf3-83f7-487f-8e73-b93bf4f67bcc, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49291 Sep 22 23:15:16.113 INFO eid:13 Found repair files: ["00D", "00D.db"]
49292 Sep 22 23:15:16.113 TRCE incoming request, uri: /newextent/13/data, method: GET, req_id: ffe2050c-b3e4-4268-a937-7fdca04098b2, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49293 Sep 22 23:15:16.113 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/13/data, method: GET, req_id: ffe2050c-b3e4-4268-a937-7fdca04098b2, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49294 Sep 22 23:15:16.118 TRCE incoming request, uri: /newextent/13/db, method: GET, req_id: 87953b4e-4d7e-497b-8c24-564593dd29fe, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49295 Sep 22 23:15:16.119 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/13/db, method: GET, req_id: 87953b4e-4d7e-497b-8c24-564593dd29fe, remote_addr: 127.0.0.1:42474, local_addr: 127.0.0.1:46213, task: repair
49296 Sep 22 23:15:16.120 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00D.replace"
49297 Sep 22 23:15:16.120 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49298 Sep 22 23:15:16.121 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49299 Sep 22 23:15:16.121 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00D"
49300 Sep 22 23:15:16.121 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00D.db"
49301 Sep 22 23:15:16.121 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49302 Sep 22 23:15:16.121 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00D.completed"
49303 Sep 22 23:15:16.121 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49304 Sep 22 23:15:16.121 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49305 Sep 22 23:15:16.121 DEBG [0] It's time to notify for 638
49306 Sep 22 23:15:16.121 INFO Completion from [0] id:638 status:true
49307 Sep 22 23:15:16.121 INFO [639/752] Repair commands completed
49308 Sep 22 23:15:16.121 INFO Pop front: ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }, state: ClientData([New, New, New]) }
49309 Sep 22 23:15:16.122 INFO Sent repair work, now wait for resp
49310 Sep 22 23:15:16.122 INFO [0] received reconcile message
49311 Sep 22 23:15:16.122 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }, state: ClientData([InProgress, New, New]) }, : downstairs
49312 Sep 22 23:15:16.122 INFO [0] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }
49313 Sep 22 23:15:16.122 INFO [1] received reconcile message
49314 Sep 22 23:15:16.122 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49315 Sep 22 23:15:16.122 INFO [1] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }
49316 Sep 22 23:15:16.122 INFO [2] received reconcile message
49317 Sep 22 23:15:16.122 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(639), op: ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49318 Sep 22 23:15:16.122 INFO [2] client ExtentReopen { repair_id: ReconciliationId(639), extent_id: 13 }
49319 Sep 22 23:15:16.122 DEBG 639 Reopen extent 13
49320 Sep 22 23:15:16.122 DEBG 639 Reopen extent 13
49321 Sep 22 23:15:16.123 DEBG 639 Reopen extent 13
49322 Sep 22 23:15:16.124 DEBG [2] It's time to notify for 639
49323 Sep 22 23:15:16.124 INFO Completion from [2] id:639 status:true
49324 Sep 22 23:15:16.124 INFO [640/752] Repair commands completed
49325 Sep 22 23:15:16.124 INFO Pop front: ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49326 Sep 22 23:15:16.124 INFO Sent repair work, now wait for resp
49327 Sep 22 23:15:16.124 INFO [0] received reconcile message
49328 Sep 22 23:15:16.124 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49329 Sep 22 23:15:16.124 INFO [0] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49330 Sep 22 23:15:16.124 INFO [1] received reconcile message
49331 Sep 22 23:15:16.124 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49332 Sep 22 23:15:16.124 INFO [1] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49333 Sep 22 23:15:16.124 INFO [2] received reconcile message
49334 Sep 22 23:15:16.124 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(640), op: ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49335 Sep 22 23:15:16.124 INFO [2] client ExtentFlush { repair_id: ReconciliationId(640), extent_id: 27, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49336 Sep 22 23:15:16.124 DEBG 640 Flush extent 27 with f:2 g:2
49337 Sep 22 23:15:16.124 DEBG Flush just extent 27 with f:2 and g:2
49338 Sep 22 23:15:16.124 DEBG [1] It's time to notify for 640
49339 Sep 22 23:15:16.124 INFO Completion from [1] id:640 status:true
49340 Sep 22 23:15:16.124 INFO [641/752] Repair commands completed
49341 Sep 22 23:15:16.124 INFO Pop front: ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }, state: ClientData([New, New, New]) }
49342 Sep 22 23:15:16.124 INFO Sent repair work, now wait for resp
49343 Sep 22 23:15:16.124 INFO [0] received reconcile message
49344 Sep 22 23:15:16.124 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }, state: ClientData([InProgress, New, New]) }, : downstairs
49345 Sep 22 23:15:16.124 INFO [0] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }
49346 Sep 22 23:15:16.124 INFO [1] received reconcile message
49347 Sep 22 23:15:16.124 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49348 Sep 22 23:15:16.124 INFO [1] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }
49349 Sep 22 23:15:16.124 INFO [2] received reconcile message
49350 Sep 22 23:15:16.124 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(641), op: ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49351 Sep 22 23:15:16.124 INFO [2] client ExtentClose { repair_id: ReconciliationId(641), extent_id: 27 }
49352 Sep 22 23:15:16.125 DEBG 641 Close extent 27
49353 Sep 22 23:15:16.125 DEBG 641 Close extent 27
49354 Sep 22 23:15:16.125 DEBG 641 Close extent 27
49355 Sep 22 23:15:16.126 DEBG [2] It's time to notify for 641
49356 Sep 22 23:15:16.126 INFO Completion from [2] id:641 status:true
49357 Sep 22 23:15:16.126 INFO [642/752] Repair commands completed
49358 Sep 22 23:15:16.126 INFO Pop front: ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49359 Sep 22 23:15:16.126 INFO Sent repair work, now wait for resp
49360 Sep 22 23:15:16.126 INFO [0] received reconcile message
49361 Sep 22 23:15:16.126 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49362 Sep 22 23:15:16.126 INFO [0] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49363 Sep 22 23:15:16.126 INFO [0] Sending repair request ReconciliationId(642)
49364 Sep 22 23:15:16.126 INFO [1] received reconcile message
49365 Sep 22 23:15:16.126 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49366 Sep 22 23:15:16.126 INFO [1] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49367 Sep 22 23:15:16.126 INFO [1] No action required ReconciliationId(642)
49368 Sep 22 23:15:16.126 INFO [2] received reconcile message
49369 Sep 22 23:15:16.126 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(642), op: ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49370 Sep 22 23:15:16.126 INFO [2] client ExtentRepair { repair_id: ReconciliationId(642), extent_id: 27, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49371 Sep 22 23:15:16.126 INFO [2] No action required ReconciliationId(642)
49372 Sep 22 23:15:16.126 DEBG 642 Repair extent 27 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49373 Sep 22 23:15:16.126 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/01B.copy"
49374 Sep 22 23:15:16.189 INFO accepted connection, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49375 Sep 22 23:15:16.189 TRCE incoming request, uri: /extent/27/files, method: GET, req_id: dd6c63a9-84bd-4916-bff6-6c6e90ec385a, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49376 Sep 22 23:15:16.190 INFO request completed, latency_us: 200, response_code: 200, uri: /extent/27/files, method: GET, req_id: dd6c63a9-84bd-4916-bff6-6c6e90ec385a, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49377 Sep 22 23:15:16.190 INFO eid:27 Found repair files: ["01B", "01B.db"]
49378 Sep 22 23:15:16.190 TRCE incoming request, uri: /newextent/27/data, method: GET, req_id: 1e8f8524-a24b-40b3-82ae-aa1de4e0af17, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49379 Sep 22 23:15:16.190 INFO request completed, latency_us: 307, response_code: 200, uri: /newextent/27/data, method: GET, req_id: 1e8f8524-a24b-40b3-82ae-aa1de4e0af17, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49380 Sep 22 23:15:16.196 TRCE incoming request, uri: /newextent/27/db, method: GET, req_id: 75a84721-1acb-4dcf-9237-e95890348a2d, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49381 Sep 22 23:15:16.196 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/27/db, method: GET, req_id: 75a84721-1acb-4dcf-9237-e95890348a2d, remote_addr: 127.0.0.1:39512, local_addr: 127.0.0.1:46213, task: repair
49382 Sep 22 23:15:16.197 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/01B.copy" to "/tmp/downstairs-vrx8aK6L/00/000/01B.replace"
49383 Sep 22 23:15:16.197 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49384 Sep 22 23:15:16.198 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/01B.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49385 Sep 22 23:15:16.198 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01B"
49386 Sep 22 23:15:16.198 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/01B.db"
49387 Sep 22 23:15:16.198 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49388 Sep 22 23:15:16.198 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/01B.replace" to "/tmp/downstairs-vrx8aK6L/00/000/01B.completed"
49389 Sep 22 23:15:16.198 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49390 Sep 22 23:15:16.199 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49391 Sep 22 23:15:16.199 DEBG [0] It's time to notify for 642
49392 Sep 22 23:15:16.199 INFO Completion from [0] id:642 status:true
49393 Sep 22 23:15:16.199 INFO [643/752] Repair commands completed
49394 Sep 22 23:15:16.199 INFO Pop front: ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }, state: ClientData([New, New, New]) }
49395 Sep 22 23:15:16.199 INFO Sent repair work, now wait for resp
49396 Sep 22 23:15:16.199 INFO [0] received reconcile message
49397 Sep 22 23:15:16.199 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }, state: ClientData([InProgress, New, New]) }, : downstairs
49398 Sep 22 23:15:16.199 INFO [0] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }
49399 Sep 22 23:15:16.199 INFO [1] received reconcile message
49400 Sep 22 23:15:16.199 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49401 Sep 22 23:15:16.199 INFO [1] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }
49402 Sep 22 23:15:16.199 INFO [2] received reconcile message
49403 Sep 22 23:15:16.199 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(643), op: ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49404 Sep 22 23:15:16.199 INFO [2] client ExtentReopen { repair_id: ReconciliationId(643), extent_id: 27 }
49405 Sep 22 23:15:16.199 DEBG 643 Reopen extent 27
49406 Sep 22 23:15:16.200 DEBG 643 Reopen extent 27
49407 Sep 22 23:15:16.200 DEBG 643 Reopen extent 27
49408 Sep 22 23:15:16.201 DEBG [2] It's time to notify for 643
49409 Sep 22 23:15:16.201 INFO Completion from [2] id:643 status:true
49410 Sep 22 23:15:16.201 INFO [644/752] Repair commands completed
49411 Sep 22 23:15:16.201 INFO Pop front: ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49412 Sep 22 23:15:16.201 INFO Sent repair work, now wait for resp
49413 Sep 22 23:15:16.201 INFO [0] received reconcile message
49414 Sep 22 23:15:16.201 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49415 Sep 22 23:15:16.201 INFO [0] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49416 Sep 22 23:15:16.201 INFO [1] received reconcile message
49417 Sep 22 23:15:16.201 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49418 Sep 22 23:15:16.201 INFO [1] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49419 Sep 22 23:15:16.201 INFO [2] received reconcile message
49420 Sep 22 23:15:16.201 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(644), op: ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49421 Sep 22 23:15:16.201 INFO [2] client ExtentFlush { repair_id: ReconciliationId(644), extent_id: 8, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49422 Sep 22 23:15:16.201 DEBG 644 Flush extent 8 with f:2 g:2
49423 Sep 22 23:15:16.201 DEBG Flush just extent 8 with f:2 and g:2
49424 Sep 22 23:15:16.202 DEBG [1] It's time to notify for 644
49425 Sep 22 23:15:16.202 INFO Completion from [1] id:644 status:true
49426 Sep 22 23:15:16.202 INFO [645/752] Repair commands completed
49427 Sep 22 23:15:16.202 INFO Pop front: ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }, state: ClientData([New, New, New]) }
49428 Sep 22 23:15:16.202 INFO Sent repair work, now wait for resp
49429 Sep 22 23:15:16.202 INFO [0] received reconcile message
49430 Sep 22 23:15:16.202 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }, state: ClientData([InProgress, New, New]) }, : downstairs
49431 Sep 22 23:15:16.202 INFO [0] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }
49432 Sep 22 23:15:16.202 INFO [1] received reconcile message
49433 Sep 22 23:15:16.202 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49434 Sep 22 23:15:16.202 INFO [1] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }
49435 Sep 22 23:15:16.202 INFO [2] received reconcile message
49436 Sep 22 23:15:16.202 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(645), op: ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49437 Sep 22 23:15:16.202 INFO [2] client ExtentClose { repair_id: ReconciliationId(645), extent_id: 8 }
49438 Sep 22 23:15:16.202 DEBG 645 Close extent 8
49439 Sep 22 23:15:16.202 DEBG 645 Close extent 8
49440 Sep 22 23:15:16.203 DEBG 645 Close extent 8
49441 Sep 22 23:15:16.203 DEBG [2] It's time to notify for 645
49442 Sep 22 23:15:16.203 INFO Completion from [2] id:645 status:true
49443 Sep 22 23:15:16.203 INFO [646/752] Repair commands completed
49444 Sep 22 23:15:16.203 INFO Pop front: ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49445 Sep 22 23:15:16.203 INFO Sent repair work, now wait for resp
49446 Sep 22 23:15:16.203 INFO [0] received reconcile message
49447 Sep 22 23:15:16.203 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49448 Sep 22 23:15:16.203 INFO [0] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49449 Sep 22 23:15:16.203 INFO [0] Sending repair request ReconciliationId(646)
49450 Sep 22 23:15:16.203 INFO [1] received reconcile message
49451 Sep 22 23:15:16.203 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49452 Sep 22 23:15:16.203 INFO [1] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49453 Sep 22 23:15:16.203 INFO [1] No action required ReconciliationId(646)
49454 Sep 22 23:15:16.203 INFO [2] received reconcile message
49455 Sep 22 23:15:16.203 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(646), op: ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49456 Sep 22 23:15:16.203 INFO [2] client ExtentRepair { repair_id: ReconciliationId(646), extent_id: 8, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49457 Sep 22 23:15:16.203 INFO [2] No action required ReconciliationId(646)
49458 Sep 22 23:15:16.203 DEBG 646 Repair extent 8 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49459 Sep 22 23:15:16.204 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/008.copy"
49460 Sep 22 23:15:16.258 DEBG up_ds_listen was notified
49461 Sep 22 23:15:16.258 DEBG up_ds_listen process 1089
49462 Sep 22 23:15:16.258 DEBG [A] ack job 1089:90, : downstairs
49463 Sep 22 23:15:16.258 DEBG up_ds_listen checked 1 jobs, back to waiting
49464 Sep 22 23:15:16.258 DEBG Flush :1089 extent_limit None deps:[JobId(1088), JobId(1087)] res:true f:34 g:1
49465 Sep 22 23:15:16.259 WARN returning error on read!
49466 Sep 22 23:15:16.259 DEBG Read :1090 deps:[JobId(1089)] res:false
49467 Sep 22 23:15:16.265 DEBG Read :1090 deps:[JobId(1089)] res:true
49468 Sep 22 23:15:16.266 INFO accepted connection, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49469 Sep 22 23:15:16.266 TRCE incoming request, uri: /extent/8/files, method: GET, req_id: 2e9ab566-b7c7-483d-9db8-babb4b21cd32, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49470 Sep 22 23:15:16.266 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/8/files, method: GET, req_id: 2e9ab566-b7c7-483d-9db8-babb4b21cd32, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49471 Sep 22 23:15:16.267 INFO eid:8 Found repair files: ["008", "008.db"]
49472 Sep 22 23:15:16.267 TRCE incoming request, uri: /newextent/8/data, method: GET, req_id: 08bdc375-501e-4b39-9a1d-3acfa8028999, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49473 Sep 22 23:15:16.267 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/8/data, method: GET, req_id: 08bdc375-501e-4b39-9a1d-3acfa8028999, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49474 Sep 22 23:15:16.272 TRCE incoming request, uri: /newextent/8/db, method: GET, req_id: 02cb42d2-67eb-466e-b9b5-a8e0cee4e3fa, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49475 Sep 22 23:15:16.273 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/8/db, method: GET, req_id: 02cb42d2-67eb-466e-b9b5-a8e0cee4e3fa, remote_addr: 127.0.0.1:49874, local_addr: 127.0.0.1:46213, task: repair
49476 Sep 22 23:15:16.274 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/008.copy" to "/tmp/downstairs-vrx8aK6L/00/000/008.replace"
49477 Sep 22 23:15:16.274 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49478 Sep 22 23:15:16.275 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/008.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49479 Sep 22 23:15:16.275 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/008"
49480 Sep 22 23:15:16.275 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/008.db"
49481 Sep 22 23:15:16.275 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49482 Sep 22 23:15:16.275 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/008.replace" to "/tmp/downstairs-vrx8aK6L/00/000/008.completed"
49483 Sep 22 23:15:16.275 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49484 Sep 22 23:15:16.275 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49485 Sep 22 23:15:16.275 DEBG [0] It's time to notify for 646
49486 Sep 22 23:15:16.275 INFO Completion from [0] id:646 status:true
49487 Sep 22 23:15:16.276 INFO [647/752] Repair commands completed
49488 Sep 22 23:15:16.276 INFO Pop front: ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }, state: ClientData([New, New, New]) }
49489 Sep 22 23:15:16.276 INFO Sent repair work, now wait for resp
49490 Sep 22 23:15:16.276 INFO [0] received reconcile message
49491 Sep 22 23:15:16.276 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }, state: ClientData([InProgress, New, New]) }, : downstairs
49492 Sep 22 23:15:16.276 INFO [0] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }
49493 Sep 22 23:15:16.276 INFO [1] received reconcile message
49494 Sep 22 23:15:16.276 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49495 Sep 22 23:15:16.276 INFO [1] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }
49496 Sep 22 23:15:16.276 INFO [2] received reconcile message
49497 Sep 22 23:15:16.276 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(647), op: ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49498 Sep 22 23:15:16.276 INFO [2] client ExtentReopen { repair_id: ReconciliationId(647), extent_id: 8 }
49499 Sep 22 23:15:16.276 DEBG 647 Reopen extent 8
49500 Sep 22 23:15:16.277 DEBG 647 Reopen extent 8
49501 Sep 22 23:15:16.277 DEBG 647 Reopen extent 8
49502 Sep 22 23:15:16.278 DEBG [2] It's time to notify for 647
49503 Sep 22 23:15:16.278 INFO Completion from [2] id:647 status:true
49504 Sep 22 23:15:16.278 INFO [648/752] Repair commands completed
49505 Sep 22 23:15:16.278 INFO Pop front: ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49506 Sep 22 23:15:16.278 INFO Sent repair work, now wait for resp
49507 Sep 22 23:15:16.278 INFO [0] received reconcile message
49508 Sep 22 23:15:16.278 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49509 Sep 22 23:15:16.278 INFO [0] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49510 Sep 22 23:15:16.278 INFO [1] received reconcile message
49511 Sep 22 23:15:16.278 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49512 Sep 22 23:15:16.278 INFO [1] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49513 Sep 22 23:15:16.278 INFO [2] received reconcile message
49514 Sep 22 23:15:16.278 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(648), op: ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49515 Sep 22 23:15:16.278 INFO [2] client ExtentFlush { repair_id: ReconciliationId(648), extent_id: 1, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49516 Sep 22 23:15:16.278 DEBG 648 Flush extent 1 with f:2 g:2
49517 Sep 22 23:15:16.278 DEBG Flush just extent 1 with f:2 and g:2
49518 Sep 22 23:15:16.278 DEBG [1] It's time to notify for 648
49519 Sep 22 23:15:16.278 INFO Completion from [1] id:648 status:true
49520 Sep 22 23:15:16.278 INFO [649/752] Repair commands completed
49521 Sep 22 23:15:16.278 INFO Pop front: ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }, state: ClientData([New, New, New]) }
49522 Sep 22 23:15:16.278 INFO Sent repair work, now wait for resp
49523 Sep 22 23:15:16.278 INFO [0] received reconcile message
49524 Sep 22 23:15:16.278 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }, state: ClientData([InProgress, New, New]) }, : downstairs
49525 Sep 22 23:15:16.278 INFO [0] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }
49526 Sep 22 23:15:16.278 INFO [1] received reconcile message
49527 Sep 22 23:15:16.278 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49528 Sep 22 23:15:16.278 INFO [1] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }
49529 Sep 22 23:15:16.279 INFO [2] received reconcile message
49530 Sep 22 23:15:16.279 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(649), op: ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49531 Sep 22 23:15:16.279 INFO [2] client ExtentClose { repair_id: ReconciliationId(649), extent_id: 1 }
49532 Sep 22 23:15:16.279 DEBG 649 Close extent 1
49533 Sep 22 23:15:16.279 DEBG 649 Close extent 1
49534 Sep 22 23:15:16.279 DEBG 649 Close extent 1
49535 Sep 22 23:15:16.280 DEBG [2] It's time to notify for 649
49536 Sep 22 23:15:16.280 INFO Completion from [2] id:649 status:true
49537 Sep 22 23:15:16.280 INFO [650/752] Repair commands completed
49538 Sep 22 23:15:16.280 INFO Pop front: ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49539 Sep 22 23:15:16.280 INFO Sent repair work, now wait for resp
49540 Sep 22 23:15:16.280 INFO [0] received reconcile message
49541 Sep 22 23:15:16.280 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49542 Sep 22 23:15:16.280 INFO [0] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49543 Sep 22 23:15:16.280 INFO [0] Sending repair request ReconciliationId(650)
49544 Sep 22 23:15:16.280 INFO [1] received reconcile message
49545 Sep 22 23:15:16.280 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49546 Sep 22 23:15:16.280 INFO [1] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49547 Sep 22 23:15:16.280 INFO [1] No action required ReconciliationId(650)
49548 Sep 22 23:15:16.280 INFO [2] received reconcile message
49549 Sep 22 23:15:16.280 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(650), op: ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49550 Sep 22 23:15:16.280 INFO [2] client ExtentRepair { repair_id: ReconciliationId(650), extent_id: 1, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49551 Sep 22 23:15:16.280 INFO [2] No action required ReconciliationId(650)
49552 Sep 22 23:15:16.280 DEBG 650 Repair extent 1 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49553 Sep 22 23:15:16.280 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/001.copy"
49554 Sep 22 23:15:16.286 DEBG [rc] retire 1089 clears [JobId(1088), JobId(1089)], : downstairs
49555 Sep 22 23:15:16.286 ERRO [2] job id 1090 saw error GenericError("test error")
49556 Sep 22 23:15:16.289 DEBG Flush :1091 extent_limit None deps:[JobId(1090), JobId(1089)] res:true f:35 g:1
49557 Sep 22 23:15:16.289 INFO [lossy] sleeping 1 second
49558 Sep 22 23:15:16.345 INFO accepted connection, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49559 Sep 22 23:15:16.345 TRCE incoming request, uri: /extent/1/files, method: GET, req_id: 21b0536f-f9f9-48fd-85f5-4d3a62dfb386, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49560 Sep 22 23:15:16.345 INFO request completed, latency_us: 257, response_code: 200, uri: /extent/1/files, method: GET, req_id: 21b0536f-f9f9-48fd-85f5-4d3a62dfb386, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49561 Sep 22 23:15:16.346 INFO eid:1 Found repair files: ["001", "001.db"]
49562 Sep 22 23:15:16.346 TRCE incoming request, uri: /newextent/1/data, method: GET, req_id: 099aa364-6ce9-4744-b821-635d8c0c9c70, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49563 Sep 22 23:15:16.346 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/1/data, method: GET, req_id: 099aa364-6ce9-4744-b821-635d8c0c9c70, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49564 Sep 22 23:15:16.352 TRCE incoming request, uri: /newextent/1/db, method: GET, req_id: e94a52f8-8fb6-4a09-b878-a36d99780615, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49565 Sep 22 23:15:16.352 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/1/db, method: GET, req_id: e94a52f8-8fb6-4a09-b878-a36d99780615, remote_addr: 127.0.0.1:47943, local_addr: 127.0.0.1:46213, task: repair
49566 Sep 22 23:15:16.353 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/001.copy" to "/tmp/downstairs-vrx8aK6L/00/000/001.replace"
49567 Sep 22 23:15:16.353 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49568 Sep 22 23:15:16.354 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/001.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49569 Sep 22 23:15:16.354 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/001"
49570 Sep 22 23:15:16.354 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/001.db"
49571 Sep 22 23:15:16.354 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49572 Sep 22 23:15:16.354 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/001.replace" to "/tmp/downstairs-vrx8aK6L/00/000/001.completed"
49573 Sep 22 23:15:16.354 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49574 Sep 22 23:15:16.355 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49575 Sep 22 23:15:16.355 DEBG [0] It's time to notify for 650
49576 Sep 22 23:15:16.355 INFO Completion from [0] id:650 status:true
49577 Sep 22 23:15:16.355 INFO [651/752] Repair commands completed
49578 Sep 22 23:15:16.355 INFO Pop front: ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }, state: ClientData([New, New, New]) }
49579 Sep 22 23:15:16.355 INFO Sent repair work, now wait for resp
49580 Sep 22 23:15:16.355 INFO [0] received reconcile message
49581 Sep 22 23:15:16.355 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }, state: ClientData([InProgress, New, New]) }, : downstairs
49582 Sep 22 23:15:16.355 INFO [0] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }
49583 Sep 22 23:15:16.355 INFO [1] received reconcile message
49584 Sep 22 23:15:16.355 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49585 Sep 22 23:15:16.355 INFO [1] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }
49586 Sep 22 23:15:16.355 INFO [2] received reconcile message
49587 Sep 22 23:15:16.355 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(651), op: ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49588 Sep 22 23:15:16.355 INFO [2] client ExtentReopen { repair_id: ReconciliationId(651), extent_id: 1 }
49589 Sep 22 23:15:16.355 DEBG 651 Reopen extent 1
49590 Sep 22 23:15:16.356 DEBG 651 Reopen extent 1
49591 Sep 22 23:15:16.357 DEBG 651 Reopen extent 1
49592 Sep 22 23:15:16.357 DEBG [2] It's time to notify for 651
49593 Sep 22 23:15:16.357 INFO Completion from [2] id:651 status:true
49594 Sep 22 23:15:16.357 INFO [652/752] Repair commands completed
49595 Sep 22 23:15:16.357 INFO Pop front: ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49596 Sep 22 23:15:16.357 INFO Sent repair work, now wait for resp
49597 Sep 22 23:15:16.357 INFO [0] received reconcile message
49598 Sep 22 23:15:16.357 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49599 Sep 22 23:15:16.357 INFO [0] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49600 Sep 22 23:15:16.357 INFO [1] received reconcile message
49601 Sep 22 23:15:16.357 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49602 Sep 22 23:15:16.357 INFO [1] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49603 Sep 22 23:15:16.358 INFO [2] received reconcile message
49604 Sep 22 23:15:16.358 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(652), op: ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49605 Sep 22 23:15:16.358 INFO [2] client ExtentFlush { repair_id: ReconciliationId(652), extent_id: 9, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49606 Sep 22 23:15:16.358 DEBG 652 Flush extent 9 with f:2 g:2
49607 Sep 22 23:15:16.358 DEBG Flush just extent 9 with f:2 and g:2
49608 Sep 22 23:15:16.358 DEBG [1] It's time to notify for 652
49609 Sep 22 23:15:16.358 INFO Completion from [1] id:652 status:true
49610 Sep 22 23:15:16.358 INFO [653/752] Repair commands completed
49611 Sep 22 23:15:16.358 INFO Pop front: ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }, state: ClientData([New, New, New]) }
49612 Sep 22 23:15:16.358 INFO Sent repair work, now wait for resp
49613 Sep 22 23:15:16.358 INFO [0] received reconcile message
49614 Sep 22 23:15:16.358 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }, state: ClientData([InProgress, New, New]) }, : downstairs
49615 Sep 22 23:15:16.358 INFO [0] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }
49616 Sep 22 23:15:16.358 INFO [1] received reconcile message
49617 Sep 22 23:15:16.358 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49618 Sep 22 23:15:16.358 INFO [1] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }
49619 Sep 22 23:15:16.358 INFO [2] received reconcile message
49620 Sep 22 23:15:16.358 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(653), op: ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49621 Sep 22 23:15:16.358 INFO [2] client ExtentClose { repair_id: ReconciliationId(653), extent_id: 9 }
49622 Sep 22 23:15:16.358 DEBG 653 Close extent 9
49623 Sep 22 23:15:16.359 DEBG 653 Close extent 9
49624 Sep 22 23:15:16.359 DEBG 653 Close extent 9
49625 Sep 22 23:15:16.359 DEBG [2] It's time to notify for 653
49626 Sep 22 23:15:16.359 INFO Completion from [2] id:653 status:true
49627 Sep 22 23:15:16.359 INFO [654/752] Repair commands completed
49628 Sep 22 23:15:16.359 INFO Pop front: ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49629 Sep 22 23:15:16.359 INFO Sent repair work, now wait for resp
49630 Sep 22 23:15:16.359 INFO [0] received reconcile message
49631 Sep 22 23:15:16.359 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49632 Sep 22 23:15:16.359 INFO [0] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49633 Sep 22 23:15:16.359 INFO [0] Sending repair request ReconciliationId(654)
49634 Sep 22 23:15:16.359 INFO [1] received reconcile message
49635 Sep 22 23:15:16.359 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49636 Sep 22 23:15:16.360 INFO [1] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49637 Sep 22 23:15:16.360 INFO [1] No action required ReconciliationId(654)
49638 Sep 22 23:15:16.360 INFO [2] received reconcile message
49639 Sep 22 23:15:16.360 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(654), op: ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49640 Sep 22 23:15:16.360 INFO [2] client ExtentRepair { repair_id: ReconciliationId(654), extent_id: 9, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49641 Sep 22 23:15:16.360 INFO [2] No action required ReconciliationId(654)
49642 Sep 22 23:15:16.360 DEBG 654 Repair extent 9 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49643 Sep 22 23:15:16.360 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/009.copy"
49644 Sep 22 23:15:16.425 INFO accepted connection, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49645 Sep 22 23:15:16.425 TRCE incoming request, uri: /extent/9/files, method: GET, req_id: 13bddf51-ee2a-4be0-9242-83821ad227eb, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49646 Sep 22 23:15:16.425 INFO request completed, latency_us: 272, response_code: 200, uri: /extent/9/files, method: GET, req_id: 13bddf51-ee2a-4be0-9242-83821ad227eb, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49647 Sep 22 23:15:16.426 INFO eid:9 Found repair files: ["009", "009.db"]
49648 Sep 22 23:15:16.426 TRCE incoming request, uri: /newextent/9/data, method: GET, req_id: cd5418d7-9c7c-4a7e-9f2e-ffd837f2787c, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49649 Sep 22 23:15:16.426 INFO request completed, latency_us: 351, response_code: 200, uri: /newextent/9/data, method: GET, req_id: cd5418d7-9c7c-4a7e-9f2e-ffd837f2787c, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49650 Sep 22 23:15:16.432 TRCE incoming request, uri: /newextent/9/db, method: GET, req_id: 5e6097a3-0d66-4724-96dd-18f20bc7e176, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49651 Sep 22 23:15:16.432 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/9/db, method: GET, req_id: 5e6097a3-0d66-4724-96dd-18f20bc7e176, remote_addr: 127.0.0.1:46226, local_addr: 127.0.0.1:46213, task: repair
49652 Sep 22 23:15:16.433 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/009.copy" to "/tmp/downstairs-vrx8aK6L/00/000/009.replace"
49653 Sep 22 23:15:16.433 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49654 Sep 22 23:15:16.435 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/009.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49655 Sep 22 23:15:16.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/009"
49656 Sep 22 23:15:16.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/009.db"
49657 Sep 22 23:15:16.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49658 Sep 22 23:15:16.435 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/009.replace" to "/tmp/downstairs-vrx8aK6L/00/000/009.completed"
49659 Sep 22 23:15:16.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49660 Sep 22 23:15:16.435 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49661 Sep 22 23:15:16.435 DEBG [0] It's time to notify for 654
49662 Sep 22 23:15:16.436 INFO Completion from [0] id:654 status:true
49663 Sep 22 23:15:16.436 INFO [655/752] Repair commands completed
49664 Sep 22 23:15:16.436 INFO Pop front: ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }, state: ClientData([New, New, New]) }
49665 Sep 22 23:15:16.436 INFO Sent repair work, now wait for resp
49666 Sep 22 23:15:16.436 INFO [0] received reconcile message
49667 Sep 22 23:15:16.436 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }, state: ClientData([InProgress, New, New]) }, : downstairs
49668 Sep 22 23:15:16.436 INFO [0] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }
49669 Sep 22 23:15:16.436 INFO [1] received reconcile message
49670 Sep 22 23:15:16.436 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49671 Sep 22 23:15:16.436 INFO [1] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }
49672 Sep 22 23:15:16.436 INFO [2] received reconcile message
49673 Sep 22 23:15:16.436 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(655), op: ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49674 Sep 22 23:15:16.436 INFO [2] client ExtentReopen { repair_id: ReconciliationId(655), extent_id: 9 }
49675 Sep 22 23:15:16.436 DEBG 655 Reopen extent 9
49676 Sep 22 23:15:16.437 DEBG 655 Reopen extent 9
49677 Sep 22 23:15:16.437 DEBG 655 Reopen extent 9
49678 Sep 22 23:15:16.438 DEBG [2] It's time to notify for 655
49679 Sep 22 23:15:16.438 INFO Completion from [2] id:655 status:true
49680 Sep 22 23:15:16.438 INFO [656/752] Repair commands completed
49681 Sep 22 23:15:16.438 INFO Pop front: ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49682 Sep 22 23:15:16.438 INFO Sent repair work, now wait for resp
49683 Sep 22 23:15:16.438 INFO [0] received reconcile message
49684 Sep 22 23:15:16.438 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49685 Sep 22 23:15:16.438 INFO [0] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49686 Sep 22 23:15:16.438 INFO [1] received reconcile message
49687 Sep 22 23:15:16.438 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49688 Sep 22 23:15:16.438 INFO [1] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49689 Sep 22 23:15:16.438 INFO [2] received reconcile message
49690 Sep 22 23:15:16.438 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(656), op: ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49691 Sep 22 23:15:16.438 INFO [2] client ExtentFlush { repair_id: ReconciliationId(656), extent_id: 116, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49692 Sep 22 23:15:16.438 DEBG 656 Flush extent 116 with f:2 g:2
49693 Sep 22 23:15:16.438 DEBG Flush just extent 116 with f:2 and g:2
49694 Sep 22 23:15:16.439 DEBG [1] It's time to notify for 656
49695 Sep 22 23:15:16.439 INFO Completion from [1] id:656 status:true
49696 Sep 22 23:15:16.439 INFO [657/752] Repair commands completed
49697 Sep 22 23:15:16.439 INFO Pop front: ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([New, New, New]) }
49698 Sep 22 23:15:16.439 INFO Sent repair work, now wait for resp
49699 Sep 22 23:15:16.439 INFO [0] received reconcile message
49700 Sep 22 23:15:16.439 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, New, New]) }, : downstairs
49701 Sep 22 23:15:16.439 INFO [0] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49702 Sep 22 23:15:16.439 INFO [1] received reconcile message
49703 Sep 22 23:15:16.439 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49704 Sep 22 23:15:16.439 INFO [1] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49705 Sep 22 23:15:16.439 INFO [2] received reconcile message
49706 Sep 22 23:15:16.439 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(657), op: ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49707 Sep 22 23:15:16.439 INFO [2] client ExtentClose { repair_id: ReconciliationId(657), extent_id: 116 }
49708 Sep 22 23:15:16.439 DEBG 657 Close extent 116
49709 Sep 22 23:15:16.439 DEBG 657 Close extent 116
49710 Sep 22 23:15:16.440 DEBG 657 Close extent 116
49711 Sep 22 23:15:16.440 DEBG [2] It's time to notify for 657
49712 Sep 22 23:15:16.440 INFO Completion from [2] id:657 status:true
49713 Sep 22 23:15:16.440 INFO [658/752] Repair commands completed
49714 Sep 22 23:15:16.440 INFO Pop front: ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49715 Sep 22 23:15:16.440 INFO Sent repair work, now wait for resp
49716 Sep 22 23:15:16.440 INFO [0] received reconcile message
49717 Sep 22 23:15:16.440 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49718 Sep 22 23:15:16.440 INFO [0] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49719 Sep 22 23:15:16.440 INFO [0] Sending repair request ReconciliationId(658)
49720 Sep 22 23:15:16.440 INFO [1] received reconcile message
49721 Sep 22 23:15:16.440 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49722 Sep 22 23:15:16.440 INFO [1] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49723 Sep 22 23:15:16.440 INFO [1] No action required ReconciliationId(658)
49724 Sep 22 23:15:16.440 INFO [2] received reconcile message
49725 Sep 22 23:15:16.440 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(658), op: ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49726 Sep 22 23:15:16.440 INFO [2] client ExtentRepair { repair_id: ReconciliationId(658), extent_id: 116, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49727 Sep 22 23:15:16.440 INFO [2] No action required ReconciliationId(658)
49728 Sep 22 23:15:16.441 DEBG 658 Repair extent 116 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49729 Sep 22 23:15:16.441 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/074.copy"
49730 Sep 22 23:15:16.506 INFO accepted connection, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49731 Sep 22 23:15:16.506 TRCE incoming request, uri: /extent/116/files, method: GET, req_id: f65f8e95-a997-4331-a23f-8621bf7a91f5, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49732 Sep 22 23:15:16.506 INFO request completed, latency_us: 274, response_code: 200, uri: /extent/116/files, method: GET, req_id: f65f8e95-a997-4331-a23f-8621bf7a91f5, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49733 Sep 22 23:15:16.507 INFO eid:116 Found repair files: ["074", "074.db"]
49734 Sep 22 23:15:16.507 TRCE incoming request, uri: /newextent/116/data, method: GET, req_id: 65cd57c7-dad1-4a99-915b-24a616fd0215, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49735 Sep 22 23:15:16.507 INFO request completed, latency_us: 372, response_code: 200, uri: /newextent/116/data, method: GET, req_id: 65cd57c7-dad1-4a99-915b-24a616fd0215, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49736 Sep 22 23:15:16.512 TRCE incoming request, uri: /newextent/116/db, method: GET, req_id: d2d407ce-d434-4429-8976-4789d4f6c234, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49737 Sep 22 23:15:16.513 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/116/db, method: GET, req_id: d2d407ce-d434-4429-8976-4789d4f6c234, remote_addr: 127.0.0.1:61137, local_addr: 127.0.0.1:46213, task: repair
49738 Sep 22 23:15:16.514 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/074.copy" to "/tmp/downstairs-vrx8aK6L/00/000/074.replace"
49739 Sep 22 23:15:16.514 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49740 Sep 22 23:15:16.515 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/074.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49741 Sep 22 23:15:16.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/074"
49742 Sep 22 23:15:16.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/074.db"
49743 Sep 22 23:15:16.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49744 Sep 22 23:15:16.516 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/074.replace" to "/tmp/downstairs-vrx8aK6L/00/000/074.completed"
49745 Sep 22 23:15:16.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49746 Sep 22 23:15:16.516 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49747 Sep 22 23:15:16.516 DEBG [0] It's time to notify for 658
49748 Sep 22 23:15:16.516 INFO Completion from [0] id:658 status:true
49749 Sep 22 23:15:16.516 INFO [659/752] Repair commands completed
49750 Sep 22 23:15:16.516 INFO Pop front: ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([New, New, New]) }
49751 Sep 22 23:15:16.516 INFO Sent repair work, now wait for resp
49752 Sep 22 23:15:16.516 INFO [0] received reconcile message
49753 Sep 22 23:15:16.516 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, New, New]) }, : downstairs
49754 Sep 22 23:15:16.516 INFO [0] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49755 Sep 22 23:15:16.516 INFO [1] received reconcile message
49756 Sep 22 23:15:16.516 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49757 Sep 22 23:15:16.516 INFO [1] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49758 Sep 22 23:15:16.516 INFO [2] received reconcile message
49759 Sep 22 23:15:16.517 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(659), op: ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49760 Sep 22 23:15:16.517 INFO [2] client ExtentReopen { repair_id: ReconciliationId(659), extent_id: 116 }
49761 Sep 22 23:15:16.517 DEBG 659 Reopen extent 116
49762 Sep 22 23:15:16.517 DEBG 659 Reopen extent 116
49763 Sep 22 23:15:16.518 DEBG 659 Reopen extent 116
49764 Sep 22 23:15:16.519 DEBG [2] It's time to notify for 659
49765 Sep 22 23:15:16.519 INFO Completion from [2] id:659 status:true
49766 Sep 22 23:15:16.519 INFO [660/752] Repair commands completed
49767 Sep 22 23:15:16.519 INFO Pop front: ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49768 Sep 22 23:15:16.519 INFO Sent repair work, now wait for resp
49769 Sep 22 23:15:16.519 INFO [0] received reconcile message
49770 Sep 22 23:15:16.519 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49771 Sep 22 23:15:16.519 INFO [0] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49772 Sep 22 23:15:16.519 INFO [1] received reconcile message
49773 Sep 22 23:15:16.519 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49774 Sep 22 23:15:16.519 INFO [1] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49775 Sep 22 23:15:16.519 INFO [2] received reconcile message
49776 Sep 22 23:15:16.519 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(660), op: ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49777 Sep 22 23:15:16.519 INFO [2] client ExtentFlush { repair_id: ReconciliationId(660), extent_id: 131, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49778 Sep 22 23:15:16.519 DEBG 660 Flush extent 131 with f:2 g:2
49779 Sep 22 23:15:16.519 DEBG Flush just extent 131 with f:2 and g:2
49780 Sep 22 23:15:16.519 DEBG [1] It's time to notify for 660
49781 Sep 22 23:15:16.519 INFO Completion from [1] id:660 status:true
49782 Sep 22 23:15:16.519 INFO [661/752] Repair commands completed
49783 Sep 22 23:15:16.519 INFO Pop front: ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }, state: ClientData([New, New, New]) }
49784 Sep 22 23:15:16.519 INFO Sent repair work, now wait for resp
49785 Sep 22 23:15:16.519 INFO [0] received reconcile message
49786 Sep 22 23:15:16.519 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }, state: ClientData([InProgress, New, New]) }, : downstairs
49787 Sep 22 23:15:16.519 INFO [0] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }
49788 Sep 22 23:15:16.519 INFO [1] received reconcile message
49789 Sep 22 23:15:16.519 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49790 Sep 22 23:15:16.519 INFO [1] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }
49791 Sep 22 23:15:16.520 INFO [2] received reconcile message
49792 Sep 22 23:15:16.520 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(661), op: ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49793 Sep 22 23:15:16.520 INFO [2] client ExtentClose { repair_id: ReconciliationId(661), extent_id: 131 }
49794 Sep 22 23:15:16.520 DEBG 661 Close extent 131
49795 Sep 22 23:15:16.520 DEBG 661 Close extent 131
49796 Sep 22 23:15:16.520 DEBG 661 Close extent 131
49797 Sep 22 23:15:16.521 DEBG [2] It's time to notify for 661
49798 Sep 22 23:15:16.521 INFO Completion from [2] id:661 status:true
49799 Sep 22 23:15:16.521 INFO [662/752] Repair commands completed
49800 Sep 22 23:15:16.521 INFO Pop front: ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49801 Sep 22 23:15:16.521 INFO Sent repair work, now wait for resp
49802 Sep 22 23:15:16.521 INFO [0] received reconcile message
49803 Sep 22 23:15:16.521 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49804 Sep 22 23:15:16.521 INFO [0] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49805 Sep 22 23:15:16.521 INFO [0] Sending repair request ReconciliationId(662)
49806 Sep 22 23:15:16.521 INFO [1] received reconcile message
49807 Sep 22 23:15:16.521 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49808 Sep 22 23:15:16.521 INFO [1] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49809 Sep 22 23:15:16.521 INFO [1] No action required ReconciliationId(662)
49810 Sep 22 23:15:16.521 INFO [2] received reconcile message
49811 Sep 22 23:15:16.521 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(662), op: ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49812 Sep 22 23:15:16.521 INFO [2] client ExtentRepair { repair_id: ReconciliationId(662), extent_id: 131, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49813 Sep 22 23:15:16.521 INFO [2] No action required ReconciliationId(662)
49814 Sep 22 23:15:16.521 DEBG 662 Repair extent 131 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49815 Sep 22 23:15:16.521 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/083.copy"
49816 Sep 22 23:15:16.584 INFO accepted connection, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49817 Sep 22 23:15:16.584 TRCE incoming request, uri: /extent/131/files, method: GET, req_id: 45630ca0-b216-4e09-a8bd-a521d00056bb, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49818 Sep 22 23:15:16.584 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/131/files, method: GET, req_id: 45630ca0-b216-4e09-a8bd-a521d00056bb, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49819 Sep 22 23:15:16.585 INFO eid:131 Found repair files: ["083", "083.db"]
49820 Sep 22 23:15:16.585 TRCE incoming request, uri: /newextent/131/data, method: GET, req_id: a4572ee0-c61e-4751-b2f4-a3b49da5d8bd, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49821 Sep 22 23:15:16.585 INFO request completed, latency_us: 326, response_code: 200, uri: /newextent/131/data, method: GET, req_id: a4572ee0-c61e-4751-b2f4-a3b49da5d8bd, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49822 Sep 22 23:15:16.590 TRCE incoming request, uri: /newextent/131/db, method: GET, req_id: 464778a9-df73-4e67-910d-0a71101a33fb, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49823 Sep 22 23:15:16.591 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/131/db, method: GET, req_id: 464778a9-df73-4e67-910d-0a71101a33fb, remote_addr: 127.0.0.1:47908, local_addr: 127.0.0.1:46213, task: repair
49824 Sep 22 23:15:16.592 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/083.copy" to "/tmp/downstairs-vrx8aK6L/00/000/083.replace"
49825 Sep 22 23:15:16.592 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49826 Sep 22 23:15:16.593 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/083.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49827 Sep 22 23:15:16.593 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/083"
49828 Sep 22 23:15:16.593 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/083.db"
49829 Sep 22 23:15:16.593 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49830 Sep 22 23:15:16.593 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/083.replace" to "/tmp/downstairs-vrx8aK6L/00/000/083.completed"
49831 Sep 22 23:15:16.593 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49832 Sep 22 23:15:16.593 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49833 Sep 22 23:15:16.593 DEBG [0] It's time to notify for 662
49834 Sep 22 23:15:16.594 INFO Completion from [0] id:662 status:true
49835 Sep 22 23:15:16.594 INFO [663/752] Repair commands completed
49836 Sep 22 23:15:16.594 INFO Pop front: ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }, state: ClientData([New, New, New]) }
49837 Sep 22 23:15:16.594 INFO Sent repair work, now wait for resp
49838 Sep 22 23:15:16.594 INFO [0] received reconcile message
49839 Sep 22 23:15:16.594 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }, state: ClientData([InProgress, New, New]) }, : downstairs
49840 Sep 22 23:15:16.594 INFO [0] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }
49841 Sep 22 23:15:16.594 INFO [1] received reconcile message
49842 Sep 22 23:15:16.594 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49843 Sep 22 23:15:16.594 INFO [1] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }
49844 Sep 22 23:15:16.594 INFO [2] received reconcile message
49845 Sep 22 23:15:16.594 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(663), op: ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49846 Sep 22 23:15:16.594 INFO [2] client ExtentReopen { repair_id: ReconciliationId(663), extent_id: 131 }
49847 Sep 22 23:15:16.594 DEBG 663 Reopen extent 131
49848 Sep 22 23:15:16.595 DEBG 663 Reopen extent 131
49849 Sep 22 23:15:16.595 DEBG 663 Reopen extent 131
49850 Sep 22 23:15:16.596 DEBG [2] It's time to notify for 663
49851 Sep 22 23:15:16.596 INFO Completion from [2] id:663 status:true
49852 Sep 22 23:15:16.596 INFO [664/752] Repair commands completed
49853 Sep 22 23:15:16.596 INFO Pop front: ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49854 Sep 22 23:15:16.596 INFO Sent repair work, now wait for resp
49855 Sep 22 23:15:16.596 INFO [0] received reconcile message
49856 Sep 22 23:15:16.596 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49857 Sep 22 23:15:16.596 INFO [0] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49858 Sep 22 23:15:16.596 INFO [1] received reconcile message
49859 Sep 22 23:15:16.596 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49860 Sep 22 23:15:16.596 INFO [1] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49861 Sep 22 23:15:16.596 INFO [2] received reconcile message
49862 Sep 22 23:15:16.596 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(664), op: ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49863 Sep 22 23:15:16.596 INFO [2] client ExtentFlush { repair_id: ReconciliationId(664), extent_id: 17, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49864 Sep 22 23:15:16.596 DEBG 664 Flush extent 17 with f:2 g:2
49865 Sep 22 23:15:16.596 DEBG Flush just extent 17 with f:2 and g:2
49866 Sep 22 23:15:16.596 DEBG [1] It's time to notify for 664
49867 Sep 22 23:15:16.596 INFO Completion from [1] id:664 status:true
49868 Sep 22 23:15:16.596 INFO [665/752] Repair commands completed
49869 Sep 22 23:15:16.596 INFO Pop front: ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }, state: ClientData([New, New, New]) }
49870 Sep 22 23:15:16.596 INFO Sent repair work, now wait for resp
49871 Sep 22 23:15:16.596 INFO [0] received reconcile message
49872 Sep 22 23:15:16.596 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }, state: ClientData([InProgress, New, New]) }, : downstairs
49873 Sep 22 23:15:16.596 INFO [0] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }
49874 Sep 22 23:15:16.597 INFO [1] received reconcile message
49875 Sep 22 23:15:16.597 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49876 Sep 22 23:15:16.597 INFO [1] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }
49877 Sep 22 23:15:16.597 INFO [2] received reconcile message
49878 Sep 22 23:15:16.597 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(665), op: ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49879 Sep 22 23:15:16.597 INFO [2] client ExtentClose { repair_id: ReconciliationId(665), extent_id: 17 }
49880 Sep 22 23:15:16.597 DEBG 665 Close extent 17
49881 Sep 22 23:15:16.597 DEBG 665 Close extent 17
49882 Sep 22 23:15:16.597 DEBG 665 Close extent 17
49883 Sep 22 23:15:16.598 DEBG [2] It's time to notify for 665
49884 Sep 22 23:15:16.598 INFO Completion from [2] id:665 status:true
49885 Sep 22 23:15:16.598 INFO [666/752] Repair commands completed
49886 Sep 22 23:15:16.598 INFO Pop front: ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49887 Sep 22 23:15:16.598 INFO Sent repair work, now wait for resp
49888 Sep 22 23:15:16.598 INFO [0] received reconcile message
49889 Sep 22 23:15:16.598 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49890 Sep 22 23:15:16.598 INFO [0] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49891 Sep 22 23:15:16.598 INFO [0] Sending repair request ReconciliationId(666)
49892 Sep 22 23:15:16.598 INFO [1] received reconcile message
49893 Sep 22 23:15:16.598 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49894 Sep 22 23:15:16.598 INFO [1] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49895 Sep 22 23:15:16.598 INFO [1] No action required ReconciliationId(666)
49896 Sep 22 23:15:16.598 INFO [2] received reconcile message
49897 Sep 22 23:15:16.598 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(666), op: ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49898 Sep 22 23:15:16.598 INFO [2] client ExtentRepair { repair_id: ReconciliationId(666), extent_id: 17, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49899 Sep 22 23:15:16.598 INFO [2] No action required ReconciliationId(666)
49900 Sep 22 23:15:16.598 DEBG 666 Repair extent 17 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49901 Sep 22 23:15:16.598 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/011.copy"
49902 Sep 22 23:15:16.662 INFO accepted connection, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49903 Sep 22 23:15:16.662 TRCE incoming request, uri: /extent/17/files, method: GET, req_id: 14fcb968-9e6e-4647-aa7d-59390b0c41e5, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49904 Sep 22 23:15:16.662 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/17/files, method: GET, req_id: 14fcb968-9e6e-4647-aa7d-59390b0c41e5, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49905 Sep 22 23:15:16.662 INFO eid:17 Found repair files: ["011", "011.db"]
49906 Sep 22 23:15:16.662 TRCE incoming request, uri: /newextent/17/data, method: GET, req_id: 69007d68-50c4-45af-924e-12869dfc0279, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49907 Sep 22 23:15:16.663 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/17/data, method: GET, req_id: 69007d68-50c4-45af-924e-12869dfc0279, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49908 Sep 22 23:15:16.667 DEBG [0] Read AckReady 1090, : downstairs
49909 Sep 22 23:15:16.668 TRCE incoming request, uri: /newextent/17/db, method: GET, req_id: 8bedfc34-b26b-4721-9294-78ea2f3adcc1, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49910 Sep 22 23:15:16.668 INFO request completed, latency_us: 297, response_code: 200, uri: /newextent/17/db, method: GET, req_id: 8bedfc34-b26b-4721-9294-78ea2f3adcc1, remote_addr: 127.0.0.1:40101, local_addr: 127.0.0.1:46213, task: repair
49911 Sep 22 23:15:16.669 DEBG up_ds_listen was notified
49912 Sep 22 23:15:16.669 DEBG up_ds_listen process 1090
49913 Sep 22 23:15:16.669 DEBG [A] ack job 1090:91, : downstairs
49914 Sep 22 23:15:16.669 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/011.copy" to "/tmp/downstairs-vrx8aK6L/00/000/011.replace"
49915 Sep 22 23:15:16.669 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49916 Sep 22 23:15:16.670 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/011.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
49917 Sep 22 23:15:16.670 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/011"
49918 Sep 22 23:15:16.670 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/011.db"
49919 Sep 22 23:15:16.670 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49920 Sep 22 23:15:16.670 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/011.replace" to "/tmp/downstairs-vrx8aK6L/00/000/011.completed"
49921 Sep 22 23:15:16.670 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49922 Sep 22 23:15:16.670 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
49923 Sep 22 23:15:16.671 DEBG [0] It's time to notify for 666
49924 Sep 22 23:15:16.671 INFO Completion from [0] id:666 status:true
49925 Sep 22 23:15:16.671 INFO [667/752] Repair commands completed
49926 Sep 22 23:15:16.671 INFO Pop front: ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }, state: ClientData([New, New, New]) }
49927 Sep 22 23:15:16.671 INFO Sent repair work, now wait for resp
49928 Sep 22 23:15:16.671 INFO [0] received reconcile message
49929 Sep 22 23:15:16.671 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }, state: ClientData([InProgress, New, New]) }, : downstairs
49930 Sep 22 23:15:16.671 INFO [0] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }
49931 Sep 22 23:15:16.671 INFO [1] received reconcile message
49932 Sep 22 23:15:16.671 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49933 Sep 22 23:15:16.671 INFO [1] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }
49934 Sep 22 23:15:16.671 INFO [2] received reconcile message
49935 Sep 22 23:15:16.671 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(667), op: ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49936 Sep 22 23:15:16.671 INFO [2] client ExtentReopen { repair_id: ReconciliationId(667), extent_id: 17 }
49937 Sep 22 23:15:16.671 DEBG 667 Reopen extent 17
49938 Sep 22 23:15:16.672 DEBG 667 Reopen extent 17
49939 Sep 22 23:15:16.672 DEBG 667 Reopen extent 17
49940 Sep 22 23:15:16.673 DEBG [2] It's time to notify for 667
49941 Sep 22 23:15:16.673 INFO Completion from [2] id:667 status:true
49942 Sep 22 23:15:16.673 INFO [668/752] Repair commands completed
49943 Sep 22 23:15:16.673 INFO Pop front: ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
49944 Sep 22 23:15:16.673 INFO Sent repair work, now wait for resp
49945 Sep 22 23:15:16.673 INFO [0] received reconcile message
49946 Sep 22 23:15:16.673 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
49947 Sep 22 23:15:16.673 INFO [0] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49948 Sep 22 23:15:16.673 INFO [1] received reconcile message
49949 Sep 22 23:15:16.673 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
49950 Sep 22 23:15:16.673 INFO [1] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49951 Sep 22 23:15:16.673 INFO [2] received reconcile message
49952 Sep 22 23:15:16.673 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(668), op: ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
49953 Sep 22 23:15:16.673 INFO [2] client ExtentFlush { repair_id: ReconciliationId(668), extent_id: 182, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
49954 Sep 22 23:15:16.673 DEBG 668 Flush extent 182 with f:2 g:2
49955 Sep 22 23:15:16.673 DEBG Flush just extent 182 with f:2 and g:2
49956 Sep 22 23:15:16.673 DEBG [1] It's time to notify for 668
49957 Sep 22 23:15:16.673 INFO Completion from [1] id:668 status:true
49958 Sep 22 23:15:16.673 INFO [669/752] Repair commands completed
49959 Sep 22 23:15:16.674 INFO Pop front: ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }, state: ClientData([New, New, New]) }
49960 Sep 22 23:15:16.674 INFO Sent repair work, now wait for resp
49961 Sep 22 23:15:16.674 INFO [0] received reconcile message
49962 Sep 22 23:15:16.674 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }, state: ClientData([InProgress, New, New]) }, : downstairs
49963 Sep 22 23:15:16.674 INFO [0] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }
49964 Sep 22 23:15:16.674 INFO [1] received reconcile message
49965 Sep 22 23:15:16.674 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49966 Sep 22 23:15:16.674 INFO [1] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }
49967 Sep 22 23:15:16.674 INFO [2] received reconcile message
49968 Sep 22 23:15:16.674 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(669), op: ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
49969 Sep 22 23:15:16.674 INFO [2] client ExtentClose { repair_id: ReconciliationId(669), extent_id: 182 }
49970 Sep 22 23:15:16.674 DEBG 669 Close extent 182
49971 Sep 22 23:15:16.674 DEBG 669 Close extent 182
49972 Sep 22 23:15:16.674 DEBG 669 Close extent 182
49973 Sep 22 23:15:16.675 DEBG [2] It's time to notify for 669
49974 Sep 22 23:15:16.675 INFO Completion from [2] id:669 status:true
49975 Sep 22 23:15:16.675 INFO [670/752] Repair commands completed
49976 Sep 22 23:15:16.675 INFO Pop front: ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
49977 Sep 22 23:15:16.675 INFO Sent repair work, now wait for resp
49978 Sep 22 23:15:16.675 INFO [0] received reconcile message
49979 Sep 22 23:15:16.675 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
49980 Sep 22 23:15:16.675 INFO [0] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49981 Sep 22 23:15:16.675 INFO [0] Sending repair request ReconciliationId(670)
49982 Sep 22 23:15:16.675 INFO [1] received reconcile message
49983 Sep 22 23:15:16.675 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
49984 Sep 22 23:15:16.675 INFO [1] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49985 Sep 22 23:15:16.675 INFO [1] No action required ReconciliationId(670)
49986 Sep 22 23:15:16.675 INFO [2] received reconcile message
49987 Sep 22 23:15:16.675 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(670), op: ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
49988 Sep 22 23:15:16.675 INFO [2] client ExtentRepair { repair_id: ReconciliationId(670), extent_id: 182, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
49989 Sep 22 23:15:16.675 INFO [2] No action required ReconciliationId(670)
49990 Sep 22 23:15:16.675 DEBG 670 Repair extent 182 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
49991 Sep 22 23:15:16.675 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B6.copy"
49992 Sep 22 23:15:16.722 DEBG up_ds_listen checked 1 jobs, back to waiting
49993 Sep 22 23:15:16.724 DEBG Flush :1091 extent_limit None deps:[JobId(1090), JobId(1089)] res:true f:35 g:1
49994 Sep 22 23:15:16.724 INFO [lossy] sleeping 1 second
49995 Sep 22 23:15:16.724 INFO [lossy] skipping 1090
49996 Sep 22 23:15:16.730 DEBG Read :1090 deps:[JobId(1089)] res:true
49997 Sep 22 23:15:16.739 INFO accepted connection, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
49998 Sep 22 23:15:16.740 TRCE incoming request, uri: /extent/182/files, method: GET, req_id: 9da082b9-3bb5-4b5d-b108-b999f01798eb, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
49999 Sep 22 23:15:16.740 INFO request completed, latency_us: 192, response_code: 200, uri: /extent/182/files, method: GET, req_id: 9da082b9-3bb5-4b5d-b108-b999f01798eb, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
50000 Sep 22 23:15:16.740 INFO eid:182 Found repair files: ["0B6", "0B6.db"]
50001 Sep 22 23:15:16.740 TRCE incoming request, uri: /newextent/182/data, method: GET, req_id: dc869527-e495-48b8-8c6f-6f606a5a36da, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
50002 Sep 22 23:15:16.741 INFO request completed, latency_us: 252, response_code: 200, uri: /newextent/182/data, method: GET, req_id: dc869527-e495-48b8-8c6f-6f606a5a36da, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
50003 Sep 22 23:15:16.746 TRCE incoming request, uri: /newextent/182/db, method: GET, req_id: 070fe0da-36ce-497e-88d9-577a28ff67d5, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
50004 Sep 22 23:15:16.746 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/182/db, method: GET, req_id: 070fe0da-36ce-497e-88d9-577a28ff67d5, remote_addr: 127.0.0.1:52509, local_addr: 127.0.0.1:46213, task: repair
50005 Sep 22 23:15:16.747 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B6.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B6.replace"
50006 Sep 22 23:15:16.747 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50007 Sep 22 23:15:16.748 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B6.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50008 Sep 22 23:15:16.748 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B6"
50009 Sep 22 23:15:16.748 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B6.db"
50010 Sep 22 23:15:16.748 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50011 Sep 22 23:15:16.749 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B6.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B6.completed"
50012 Sep 22 23:15:16.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50013 Sep 22 23:15:16.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50014 Sep 22 23:15:16.749 DEBG [0] It's time to notify for 670
50015 Sep 22 23:15:16.749 INFO Completion from [0] id:670 status:true
50016 Sep 22 23:15:16.749 INFO [671/752] Repair commands completed
50017 Sep 22 23:15:16.749 INFO Pop front: ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }, state: ClientData([New, New, New]) }
50018 Sep 22 23:15:16.749 INFO Sent repair work, now wait for resp
50019 Sep 22 23:15:16.749 INFO [0] received reconcile message
50020 Sep 22 23:15:16.749 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }, state: ClientData([InProgress, New, New]) }, : downstairs
50021 Sep 22 23:15:16.749 INFO [0] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }
50022 Sep 22 23:15:16.749 INFO [1] received reconcile message
50023 Sep 22 23:15:16.749 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50024 Sep 22 23:15:16.749 INFO [1] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }
50025 Sep 22 23:15:16.749 INFO [2] received reconcile message
50026 Sep 22 23:15:16.749 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(671), op: ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50027 Sep 22 23:15:16.749 INFO [2] client ExtentReopen { repair_id: ReconciliationId(671), extent_id: 182 }
50028 Sep 22 23:15:16.749 DEBG 671 Reopen extent 182
50029 Sep 22 23:15:16.750 DEBG 671 Reopen extent 182
50030 Sep 22 23:15:16.750 DEBG 671 Reopen extent 182
50031 Sep 22 23:15:16.751 DEBG [2] It's time to notify for 671
50032 Sep 22 23:15:16.751 INFO Completion from [2] id:671 status:true
50033 Sep 22 23:15:16.751 INFO [672/752] Repair commands completed
50034 Sep 22 23:15:16.751 INFO Pop front: ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50035 Sep 22 23:15:16.751 INFO Sent repair work, now wait for resp
50036 Sep 22 23:15:16.751 INFO [0] received reconcile message
50037 Sep 22 23:15:16.751 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50038 Sep 22 23:15:16.751 INFO [0] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50039 Sep 22 23:15:16.751 INFO [1] received reconcile message
50040 Sep 22 23:15:16.751 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50041 Sep 22 23:15:16.751 INFO [1] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50042 Sep 22 23:15:16.751 INFO [2] received reconcile message
50043 Sep 22 23:15:16.751 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(672), op: ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50044 Sep 22 23:15:16.751 INFO [2] client ExtentFlush { repair_id: ReconciliationId(672), extent_id: 183, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50045 Sep 22 23:15:16.751 DEBG 672 Flush extent 183 with f:2 g:2
50046 Sep 22 23:15:16.751 DEBG Flush just extent 183 with f:2 and g:2
50047 Sep 22 23:15:16.752 DEBG [1] It's time to notify for 672
50048 Sep 22 23:15:16.752 INFO Completion from [1] id:672 status:true
50049 Sep 22 23:15:16.752 INFO [673/752] Repair commands completed
50050 Sep 22 23:15:16.752 INFO Pop front: ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }, state: ClientData([New, New, New]) }
50051 Sep 22 23:15:16.752 INFO Sent repair work, now wait for resp
50052 Sep 22 23:15:16.752 INFO [0] received reconcile message
50053 Sep 22 23:15:16.752 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }, state: ClientData([InProgress, New, New]) }, : downstairs
50054 Sep 22 23:15:16.752 INFO [0] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }
50055 Sep 22 23:15:16.752 INFO [1] received reconcile message
50056 Sep 22 23:15:16.752 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50057 Sep 22 23:15:16.752 INFO [1] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }
50058 Sep 22 23:15:16.752 INFO [2] received reconcile message
50059 Sep 22 23:15:16.752 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(673), op: ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50060 Sep 22 23:15:16.752 INFO [2] client ExtentClose { repair_id: ReconciliationId(673), extent_id: 183 }
50061 Sep 22 23:15:16.752 DEBG 673 Close extent 183
50062 Sep 22 23:15:16.752 DEBG 673 Close extent 183
50063 Sep 22 23:15:16.753 DEBG 673 Close extent 183
50064 Sep 22 23:15:16.753 DEBG [2] It's time to notify for 673
50065 Sep 22 23:15:16.753 INFO Completion from [2] id:673 status:true
50066 Sep 22 23:15:16.753 INFO [674/752] Repair commands completed
50067 Sep 22 23:15:16.753 INFO Pop front: ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50068 Sep 22 23:15:16.753 INFO Sent repair work, now wait for resp
50069 Sep 22 23:15:16.753 INFO [0] received reconcile message
50070 Sep 22 23:15:16.753 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50071 Sep 22 23:15:16.753 INFO [0] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50072 Sep 22 23:15:16.753 INFO [0] Sending repair request ReconciliationId(674)
50073 Sep 22 23:15:16.753 INFO [1] received reconcile message
50074 Sep 22 23:15:16.753 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50075 Sep 22 23:15:16.753 INFO [1] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50076 Sep 22 23:15:16.753 INFO [1] No action required ReconciliationId(674)
50077 Sep 22 23:15:16.753 INFO [2] received reconcile message
50078 Sep 22 23:15:16.753 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(674), op: ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50079 Sep 22 23:15:16.753 INFO [2] client ExtentRepair { repair_id: ReconciliationId(674), extent_id: 183, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50080 Sep 22 23:15:16.753 INFO [2] No action required ReconciliationId(674)
50081 Sep 22 23:15:16.753 DEBG 674 Repair extent 183 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50082 Sep 22 23:15:16.753 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0B7.copy"
50083 Sep 22 23:15:16.800 DEBG IO Read 1092 has deps [JobId(1091)]
50084 Sep 22 23:15:16.818 INFO accepted connection, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50085 Sep 22 23:15:16.818 TRCE incoming request, uri: /extent/183/files, method: GET, req_id: 94291c13-e9d2-40ec-b6c5-cf879ae548a4, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50086 Sep 22 23:15:16.818 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/183/files, method: GET, req_id: 94291c13-e9d2-40ec-b6c5-cf879ae548a4, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50087 Sep 22 23:15:16.818 INFO eid:183 Found repair files: ["0B7", "0B7.db"]
50088 Sep 22 23:15:16.818 TRCE incoming request, uri: /newextent/183/data, method: GET, req_id: e08f7204-495f-4d24-945a-03fcc08f64a9, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50089 Sep 22 23:15:16.819 INFO request completed, latency_us: 331, response_code: 200, uri: /newextent/183/data, method: GET, req_id: e08f7204-495f-4d24-945a-03fcc08f64a9, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50090 Sep 22 23:15:16.824 TRCE incoming request, uri: /newextent/183/db, method: GET, req_id: 596256a6-0480-480b-8bde-456ec1e40267, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50091 Sep 22 23:15:16.824 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/183/db, method: GET, req_id: 596256a6-0480-480b-8bde-456ec1e40267, remote_addr: 127.0.0.1:46103, local_addr: 127.0.0.1:46213, task: repair
50092 Sep 22 23:15:16.825 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0B7.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0B7.replace"
50093 Sep 22 23:15:16.825 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50094 Sep 22 23:15:16.826 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0B7.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50095 Sep 22 23:15:16.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B7"
50096 Sep 22 23:15:16.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0B7.db"
50097 Sep 22 23:15:16.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50098 Sep 22 23:15:16.827 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0B7.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0B7.completed"
50099 Sep 22 23:15:16.827 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50100 Sep 22 23:15:16.827 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50101 Sep 22 23:15:16.827 DEBG [0] It's time to notify for 674
50102 Sep 22 23:15:16.827 INFO Completion from [0] id:674 status:true
50103 Sep 22 23:15:16.827 INFO [675/752] Repair commands completed
50104 Sep 22 23:15:16.827 INFO Pop front: ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }, state: ClientData([New, New, New]) }
50105 Sep 22 23:15:16.827 INFO Sent repair work, now wait for resp
50106 Sep 22 23:15:16.827 INFO [0] received reconcile message
50107 Sep 22 23:15:16.827 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }, state: ClientData([InProgress, New, New]) }, : downstairs
50108 Sep 22 23:15:16.827 INFO [0] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }
50109 Sep 22 23:15:16.827 INFO [1] received reconcile message
50110 Sep 22 23:15:16.827 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50111 Sep 22 23:15:16.827 INFO [1] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }
50112 Sep 22 23:15:16.827 INFO [2] received reconcile message
50113 Sep 22 23:15:16.827 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(675), op: ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50114 Sep 22 23:15:16.827 INFO [2] client ExtentReopen { repair_id: ReconciliationId(675), extent_id: 183 }
50115 Sep 22 23:15:16.827 DEBG 675 Reopen extent 183
50116 Sep 22 23:15:16.828 DEBG 675 Reopen extent 183
50117 Sep 22 23:15:16.828 DEBG 675 Reopen extent 183
50118 Sep 22 23:15:16.829 DEBG [2] It's time to notify for 675
50119 Sep 22 23:15:16.829 INFO Completion from [2] id:675 status:true
50120 Sep 22 23:15:16.829 INFO [676/752] Repair commands completed
50121 Sep 22 23:15:16.829 INFO Pop front: ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50122 Sep 22 23:15:16.829 INFO Sent repair work, now wait for resp
50123 Sep 22 23:15:16.829 INFO [0] received reconcile message
50124 Sep 22 23:15:16.829 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50125 Sep 22 23:15:16.829 INFO [0] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50126 Sep 22 23:15:16.829 INFO [1] received reconcile message
50127 Sep 22 23:15:16.829 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50128 Sep 22 23:15:16.829 INFO [1] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50129 Sep 22 23:15:16.829 INFO [2] received reconcile message
50130 Sep 22 23:15:16.829 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(676), op: ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50131 Sep 22 23:15:16.829 INFO [2] client ExtentFlush { repair_id: ReconciliationId(676), extent_id: 20, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50132 Sep 22 23:15:16.829 DEBG 676 Flush extent 20 with f:2 g:2
50133 Sep 22 23:15:16.829 DEBG Flush just extent 20 with f:2 and g:2
50134 Sep 22 23:15:16.830 DEBG [1] It's time to notify for 676
50135 Sep 22 23:15:16.830 INFO Completion from [1] id:676 status:true
50136 Sep 22 23:15:16.830 INFO [677/752] Repair commands completed
50137 Sep 22 23:15:16.830 INFO Pop front: ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }, state: ClientData([New, New, New]) }
50138 Sep 22 23:15:16.830 INFO Sent repair work, now wait for resp
50139 Sep 22 23:15:16.830 INFO [0] received reconcile message
50140 Sep 22 23:15:16.830 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }, state: ClientData([InProgress, New, New]) }, : downstairs
50141 Sep 22 23:15:16.830 INFO [0] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }
50142 Sep 22 23:15:16.830 INFO [1] received reconcile message
50143 Sep 22 23:15:16.830 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50144 Sep 22 23:15:16.830 INFO [1] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }
50145 Sep 22 23:15:16.830 INFO [2] received reconcile message
50146 Sep 22 23:15:16.830 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(677), op: ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50147 Sep 22 23:15:16.830 INFO [2] client ExtentClose { repair_id: ReconciliationId(677), extent_id: 20 }
50148 Sep 22 23:15:16.830 DEBG 677 Close extent 20
50149 Sep 22 23:15:16.830 DEBG 677 Close extent 20
50150 Sep 22 23:15:16.831 DEBG 677 Close extent 20
50151 Sep 22 23:15:16.831 DEBG [2] It's time to notify for 677
50152 Sep 22 23:15:16.831 INFO Completion from [2] id:677 status:true
50153 Sep 22 23:15:16.831 INFO [678/752] Repair commands completed
50154 Sep 22 23:15:16.831 INFO Pop front: ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50155 Sep 22 23:15:16.831 INFO Sent repair work, now wait for resp
50156 Sep 22 23:15:16.831 INFO [0] received reconcile message
50157 Sep 22 23:15:16.831 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50158 Sep 22 23:15:16.831 INFO [0] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50159 Sep 22 23:15:16.831 INFO [0] Sending repair request ReconciliationId(678)
50160 Sep 22 23:15:16.831 INFO [1] received reconcile message
50161 Sep 22 23:15:16.831 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50162 Sep 22 23:15:16.831 INFO [1] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50163 Sep 22 23:15:16.831 INFO [1] No action required ReconciliationId(678)
50164 Sep 22 23:15:16.831 INFO [2] received reconcile message
50165 Sep 22 23:15:16.831 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(678), op: ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50166 Sep 22 23:15:16.831 INFO [2] client ExtentRepair { repair_id: ReconciliationId(678), extent_id: 20, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50167 Sep 22 23:15:16.831 INFO [2] No action required ReconciliationId(678)
50168 Sep 22 23:15:16.831 DEBG 678 Repair extent 20 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50169 Sep 22 23:15:16.832 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/014.copy"
50170 Sep 22 23:15:16.893 INFO accepted connection, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50171 Sep 22 23:15:16.894 TRCE incoming request, uri: /extent/20/files, method: GET, req_id: 4e92c872-8ce0-463f-b09a-29f24eca1511, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50172 Sep 22 23:15:16.894 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/20/files, method: GET, req_id: 4e92c872-8ce0-463f-b09a-29f24eca1511, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50173 Sep 22 23:15:16.894 INFO eid:20 Found repair files: ["014", "014.db"]
50174 Sep 22 23:15:16.894 TRCE incoming request, uri: /newextent/20/data, method: GET, req_id: 86e68e0c-a5ff-4ed9-8062-37a6b6e231ef, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50175 Sep 22 23:15:16.895 INFO request completed, latency_us: 256, response_code: 200, uri: /newextent/20/data, method: GET, req_id: 86e68e0c-a5ff-4ed9-8062-37a6b6e231ef, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50176 Sep 22 23:15:16.900 TRCE incoming request, uri: /newextent/20/db, method: GET, req_id: dc6f4d51-f6ad-4187-8df1-9c1f24e72387, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50177 Sep 22 23:15:16.900 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/20/db, method: GET, req_id: dc6f4d51-f6ad-4187-8df1-9c1f24e72387, remote_addr: 127.0.0.1:57550, local_addr: 127.0.0.1:46213, task: repair
50178 Sep 22 23:15:16.901 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/014.copy" to "/tmp/downstairs-vrx8aK6L/00/000/014.replace"
50179 Sep 22 23:15:16.901 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50180 Sep 22 23:15:16.902 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/014.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50181 Sep 22 23:15:16.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/014"
50182 Sep 22 23:15:16.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/014.db"
50183 Sep 22 23:15:16.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50184 Sep 22 23:15:16.902 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/014.replace" to "/tmp/downstairs-vrx8aK6L/00/000/014.completed"
50185 Sep 22 23:15:16.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50186 Sep 22 23:15:16.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50187 Sep 22 23:15:16.903 DEBG [0] It's time to notify for 678
50188 Sep 22 23:15:16.903 INFO Completion from [0] id:678 status:true
50189 Sep 22 23:15:16.903 INFO [679/752] Repair commands completed
50190 Sep 22 23:15:16.903 INFO Pop front: ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }, state: ClientData([New, New, New]) }
50191 Sep 22 23:15:16.903 INFO Sent repair work, now wait for resp
50192 Sep 22 23:15:16.903 INFO [0] received reconcile message
50193 Sep 22 23:15:16.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }, state: ClientData([InProgress, New, New]) }, : downstairs
50194 Sep 22 23:15:16.903 INFO [0] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }
50195 Sep 22 23:15:16.903 INFO [1] received reconcile message
50196 Sep 22 23:15:16.903 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50197 Sep 22 23:15:16.903 INFO [1] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }
50198 Sep 22 23:15:16.903 INFO [2] received reconcile message
50199 Sep 22 23:15:16.903 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(679), op: ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50200 Sep 22 23:15:16.903 INFO [2] client ExtentReopen { repair_id: ReconciliationId(679), extent_id: 20 }
50201 Sep 22 23:15:16.903 DEBG 679 Reopen extent 20
50202 Sep 22 23:15:16.904 DEBG 679 Reopen extent 20
50203 Sep 22 23:15:16.904 DEBG 679 Reopen extent 20
50204 Sep 22 23:15:16.905 DEBG [2] It's time to notify for 679
50205 Sep 22 23:15:16.905 INFO Completion from [2] id:679 status:true
50206 Sep 22 23:15:16.905 INFO [680/752] Repair commands completed
50207 Sep 22 23:15:16.905 INFO Pop front: ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50208 Sep 22 23:15:16.905 INFO Sent repair work, now wait for resp
50209 Sep 22 23:15:16.905 INFO [0] received reconcile message
50210 Sep 22 23:15:16.905 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50211 Sep 22 23:15:16.905 INFO [0] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50212 Sep 22 23:15:16.905 INFO [1] received reconcile message
50213 Sep 22 23:15:16.905 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50214 Sep 22 23:15:16.905 INFO [1] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50215 Sep 22 23:15:16.905 INFO [2] received reconcile message
50216 Sep 22 23:15:16.905 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(680), op: ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50217 Sep 22 23:15:16.905 INFO [2] client ExtentFlush { repair_id: ReconciliationId(680), extent_id: 63, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50218 Sep 22 23:15:16.905 DEBG 680 Flush extent 63 with f:2 g:2
50219 Sep 22 23:15:16.905 DEBG Flush just extent 63 with f:2 and g:2
50220 Sep 22 23:15:16.905 DEBG [1] It's time to notify for 680
50221 Sep 22 23:15:16.905 INFO Completion from [1] id:680 status:true
50222 Sep 22 23:15:16.905 INFO [681/752] Repair commands completed
50223 Sep 22 23:15:16.906 INFO Pop front: ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }, state: ClientData([New, New, New]) }
50224 Sep 22 23:15:16.906 INFO Sent repair work, now wait for resp
50225 Sep 22 23:15:16.906 INFO [0] received reconcile message
50226 Sep 22 23:15:16.906 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }, state: ClientData([InProgress, New, New]) }, : downstairs
50227 Sep 22 23:15:16.906 INFO [0] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }
50228 Sep 22 23:15:16.906 INFO [1] received reconcile message
50229 Sep 22 23:15:16.906 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50230 Sep 22 23:15:16.906 INFO [1] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }
50231 Sep 22 23:15:16.906 INFO [2] received reconcile message
50232 Sep 22 23:15:16.906 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(681), op: ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50233 Sep 22 23:15:16.906 INFO [2] client ExtentClose { repair_id: ReconciliationId(681), extent_id: 63 }
50234 Sep 22 23:15:16.906 DEBG 681 Close extent 63
50235 Sep 22 23:15:16.906 DEBG 681 Close extent 63
50236 Sep 22 23:15:16.906 DEBG 681 Close extent 63
50237 Sep 22 23:15:16.907 DEBG [2] It's time to notify for 681
50238 Sep 22 23:15:16.907 INFO Completion from [2] id:681 status:true
50239 Sep 22 23:15:16.907 INFO [682/752] Repair commands completed
50240 Sep 22 23:15:16.907 INFO Pop front: ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50241 Sep 22 23:15:16.907 INFO Sent repair work, now wait for resp
50242 Sep 22 23:15:16.907 INFO [0] received reconcile message
50243 Sep 22 23:15:16.907 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50244 Sep 22 23:15:16.907 INFO [0] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50245 Sep 22 23:15:16.907 INFO [0] Sending repair request ReconciliationId(682)
50246 Sep 22 23:15:16.907 INFO [1] received reconcile message
50247 Sep 22 23:15:16.907 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50248 Sep 22 23:15:16.907 INFO [1] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50249 Sep 22 23:15:16.907 INFO [1] No action required ReconciliationId(682)
50250 Sep 22 23:15:16.907 INFO [2] received reconcile message
50251 Sep 22 23:15:16.907 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(682), op: ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50252 Sep 22 23:15:16.907 INFO [2] client ExtentRepair { repair_id: ReconciliationId(682), extent_id: 63, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50253 Sep 22 23:15:16.907 INFO [2] No action required ReconciliationId(682)
50254 Sep 22 23:15:16.907 DEBG 682 Repair extent 63 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50255 Sep 22 23:15:16.907 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03F.copy"
50256 Sep 22 23:15:16.971 INFO accepted connection, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50257 Sep 22 23:15:16.971 TRCE incoming request, uri: /extent/63/files, method: GET, req_id: a34b3e28-2b8e-48a1-9d9d-fc87b52e8d04, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50258 Sep 22 23:15:16.971 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/63/files, method: GET, req_id: a34b3e28-2b8e-48a1-9d9d-fc87b52e8d04, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50259 Sep 22 23:15:16.971 INFO eid:63 Found repair files: ["03F", "03F.db"]
50260 Sep 22 23:15:16.971 TRCE incoming request, uri: /newextent/63/data, method: GET, req_id: 676a94eb-4898-44c0-9f47-369eed37da50, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50261 Sep 22 23:15:16.972 INFO request completed, latency_us: 249, response_code: 200, uri: /newextent/63/data, method: GET, req_id: 676a94eb-4898-44c0-9f47-369eed37da50, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50262 Sep 22 23:15:16.977 TRCE incoming request, uri: /newextent/63/db, method: GET, req_id: 382c490f-e811-4ff8-bd2a-ccfadadbb678, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50263 Sep 22 23:15:16.977 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/63/db, method: GET, req_id: 382c490f-e811-4ff8-bd2a-ccfadadbb678, remote_addr: 127.0.0.1:43683, local_addr: 127.0.0.1:46213, task: repair
50264 Sep 22 23:15:16.978 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03F.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03F.replace"
50265 Sep 22 23:15:16.978 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50266 Sep 22 23:15:16.979 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03F.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50267 Sep 22 23:15:16.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03F"
50268 Sep 22 23:15:16.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03F.db"
50269 Sep 22 23:15:16.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50270 Sep 22 23:15:16.979 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03F.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03F.completed"
50271 Sep 22 23:15:16.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50272 Sep 22 23:15:16.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50273 Sep 22 23:15:16.980 DEBG [0] It's time to notify for 682
50274 Sep 22 23:15:16.980 INFO Completion from [0] id:682 status:true
50275 Sep 22 23:15:16.980 INFO [683/752] Repair commands completed
50276 Sep 22 23:15:16.980 INFO Pop front: ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }, state: ClientData([New, New, New]) }
50277 Sep 22 23:15:16.980 INFO Sent repair work, now wait for resp
50278 Sep 22 23:15:16.980 INFO [0] received reconcile message
50279 Sep 22 23:15:16.980 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }, state: ClientData([InProgress, New, New]) }, : downstairs
50280 Sep 22 23:15:16.980 INFO [0] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }
50281 Sep 22 23:15:16.980 INFO [1] received reconcile message
50282 Sep 22 23:15:16.980 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50283 Sep 22 23:15:16.980 INFO [1] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }
50284 Sep 22 23:15:16.980 INFO [2] received reconcile message
50285 Sep 22 23:15:16.980 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(683), op: ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50286 Sep 22 23:15:16.980 INFO [2] client ExtentReopen { repair_id: ReconciliationId(683), extent_id: 63 }
50287 Sep 22 23:15:16.980 DEBG 683 Reopen extent 63
50288 Sep 22 23:15:16.981 DEBG 683 Reopen extent 63
50289 Sep 22 23:15:16.981 DEBG 683 Reopen extent 63
50290 Sep 22 23:15:16.982 DEBG [2] It's time to notify for 683
50291 Sep 22 23:15:16.982 INFO Completion from [2] id:683 status:true
50292 Sep 22 23:15:16.982 INFO [684/752] Repair commands completed
50293 Sep 22 23:15:16.982 INFO Pop front: ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50294 Sep 22 23:15:16.982 INFO Sent repair work, now wait for resp
50295 Sep 22 23:15:16.982 INFO [0] received reconcile message
50296 Sep 22 23:15:16.982 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50297 Sep 22 23:15:16.982 INFO [0] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50298 Sep 22 23:15:16.982 INFO [1] received reconcile message
50299 Sep 22 23:15:16.982 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50300 Sep 22 23:15:16.982 INFO [1] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50301 Sep 22 23:15:16.982 INFO [2] received reconcile message
50302 Sep 22 23:15:16.982 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(684), op: ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50303 Sep 22 23:15:16.982 INFO [2] client ExtentFlush { repair_id: ReconciliationId(684), extent_id: 166, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50304 Sep 22 23:15:16.982 DEBG 684 Flush extent 166 with f:2 g:2
50305 Sep 22 23:15:16.982 DEBG Flush just extent 166 with f:2 and g:2
50306 Sep 22 23:15:16.982 DEBG [1] It's time to notify for 684
50307 Sep 22 23:15:16.982 INFO Completion from [1] id:684 status:true
50308 Sep 22 23:15:16.982 INFO [685/752] Repair commands completed
50309 Sep 22 23:15:16.983 INFO Pop front: ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }, state: ClientData([New, New, New]) }
50310 Sep 22 23:15:16.983 INFO Sent repair work, now wait for resp
50311 Sep 22 23:15:16.983 INFO [0] received reconcile message
50312 Sep 22 23:15:16.983 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }, state: ClientData([InProgress, New, New]) }, : downstairs
50313 Sep 22 23:15:16.983 INFO [0] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }
50314 Sep 22 23:15:16.983 INFO [1] received reconcile message
50315 Sep 22 23:15:16.983 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50316 Sep 22 23:15:16.983 INFO [1] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }
50317 Sep 22 23:15:16.983 INFO [2] received reconcile message
50318 Sep 22 23:15:16.983 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(685), op: ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50319 Sep 22 23:15:16.983 INFO [2] client ExtentClose { repair_id: ReconciliationId(685), extent_id: 166 }
50320 Sep 22 23:15:16.983 DEBG 685 Close extent 166
50321 Sep 22 23:15:16.983 DEBG 685 Close extent 166
50322 Sep 22 23:15:16.983 DEBG 685 Close extent 166
50323 Sep 22 23:15:16.984 DEBG [2] It's time to notify for 685
50324 Sep 22 23:15:16.984 INFO Completion from [2] id:685 status:true
50325 Sep 22 23:15:16.984 INFO [686/752] Repair commands completed
50326 Sep 22 23:15:16.984 INFO Pop front: ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50327 Sep 22 23:15:16.984 INFO Sent repair work, now wait for resp
50328 Sep 22 23:15:16.984 INFO [0] received reconcile message
50329 Sep 22 23:15:16.984 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50330 Sep 22 23:15:16.984 INFO [0] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50331 Sep 22 23:15:16.984 INFO [0] Sending repair request ReconciliationId(686)
50332 Sep 22 23:15:16.984 INFO [1] received reconcile message
50333 Sep 22 23:15:16.984 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50334 Sep 22 23:15:16.984 INFO [1] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50335 Sep 22 23:15:16.984 INFO [1] No action required ReconciliationId(686)
50336 Sep 22 23:15:16.984 INFO [2] received reconcile message
50337 Sep 22 23:15:16.984 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(686), op: ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50338 Sep 22 23:15:16.984 INFO [2] client ExtentRepair { repair_id: ReconciliationId(686), extent_id: 166, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50339 Sep 22 23:15:16.984 INFO [2] No action required ReconciliationId(686)
50340 Sep 22 23:15:16.984 DEBG 686 Repair extent 166 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50341 Sep 22 23:15:16.984 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0A6.copy"
50342 Sep 22 23:15:17.046 INFO accepted connection, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50343 Sep 22 23:15:17.046 TRCE incoming request, uri: /extent/166/files, method: GET, req_id: 50c1b6e8-d5f1-4e0f-a263-1fdf3c0e1d9e, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50344 Sep 22 23:15:17.046 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/166/files, method: GET, req_id: 50c1b6e8-d5f1-4e0f-a263-1fdf3c0e1d9e, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50345 Sep 22 23:15:17.047 INFO eid:166 Found repair files: ["0A6", "0A6.db"]
50346 Sep 22 23:15:17.047 TRCE incoming request, uri: /newextent/166/data, method: GET, req_id: 9d86f1d9-4be9-428f-9aa6-fb31783fd65c, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50347 Sep 22 23:15:17.047 INFO request completed, latency_us: 312, response_code: 200, uri: /newextent/166/data, method: GET, req_id: 9d86f1d9-4be9-428f-9aa6-fb31783fd65c, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50348 Sep 22 23:15:17.053 TRCE incoming request, uri: /newextent/166/db, method: GET, req_id: 1c41826b-3af3-4b7b-86da-df7435f086f0, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50349 Sep 22 23:15:17.053 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/166/db, method: GET, req_id: 1c41826b-3af3-4b7b-86da-df7435f086f0, remote_addr: 127.0.0.1:55473, local_addr: 127.0.0.1:46213, task: repair
50350 Sep 22 23:15:17.054 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0A6.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0A6.replace"
50351 Sep 22 23:15:17.054 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50352 Sep 22 23:15:17.055 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0A6.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50353 Sep 22 23:15:17.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A6"
50354 Sep 22 23:15:17.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0A6.db"
50355 Sep 22 23:15:17.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50356 Sep 22 23:15:17.055 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0A6.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0A6.completed"
50357 Sep 22 23:15:17.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50358 Sep 22 23:15:17.055 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50359 Sep 22 23:15:17.056 DEBG [0] It's time to notify for 686
50360 Sep 22 23:15:17.056 INFO Completion from [0] id:686 status:true
50361 Sep 22 23:15:17.056 INFO [687/752] Repair commands completed
50362 Sep 22 23:15:17.056 INFO Pop front: ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }, state: ClientData([New, New, New]) }
50363 Sep 22 23:15:17.056 INFO Sent repair work, now wait for resp
50364 Sep 22 23:15:17.056 INFO [0] received reconcile message
50365 Sep 22 23:15:17.056 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }, state: ClientData([InProgress, New, New]) }, : downstairs
50366 Sep 22 23:15:17.056 INFO [0] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }
50367 Sep 22 23:15:17.056 INFO [1] received reconcile message
50368 Sep 22 23:15:17.056 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50369 Sep 22 23:15:17.056 INFO [1] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }
50370 Sep 22 23:15:17.056 INFO [2] received reconcile message
50371 Sep 22 23:15:17.056 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(687), op: ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50372 Sep 22 23:15:17.056 INFO [2] client ExtentReopen { repair_id: ReconciliationId(687), extent_id: 166 }
50373 Sep 22 23:15:17.056 DEBG 687 Reopen extent 166
50374 Sep 22 23:15:17.057 DEBG 687 Reopen extent 166
50375 Sep 22 23:15:17.057 DEBG 687 Reopen extent 166
50376 Sep 22 23:15:17.058 DEBG [2] It's time to notify for 687
50377 Sep 22 23:15:17.058 INFO Completion from [2] id:687 status:true
50378 Sep 22 23:15:17.058 INFO [688/752] Repair commands completed
50379 Sep 22 23:15:17.058 INFO Pop front: ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50380 Sep 22 23:15:17.058 INFO Sent repair work, now wait for resp
50381 Sep 22 23:15:17.058 INFO [0] received reconcile message
50382 Sep 22 23:15:17.058 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50383 Sep 22 23:15:17.058 INFO [0] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50384 Sep 22 23:15:17.058 INFO [1] received reconcile message
50385 Sep 22 23:15:17.058 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50386 Sep 22 23:15:17.058 INFO [1] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50387 Sep 22 23:15:17.058 INFO [2] received reconcile message
50388 Sep 22 23:15:17.058 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(688), op: ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50389 Sep 22 23:15:17.058 INFO [2] client ExtentFlush { repair_id: ReconciliationId(688), extent_id: 14, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50390 Sep 22 23:15:17.058 DEBG 688 Flush extent 14 with f:2 g:2
50391 Sep 22 23:15:17.058 DEBG Flush just extent 14 with f:2 and g:2
50392 Sep 22 23:15:17.058 DEBG [1] It's time to notify for 688
50393 Sep 22 23:15:17.058 INFO Completion from [1] id:688 status:true
50394 Sep 22 23:15:17.058 INFO [689/752] Repair commands completed
50395 Sep 22 23:15:17.058 INFO Pop front: ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }, state: ClientData([New, New, New]) }
50396 Sep 22 23:15:17.059 INFO Sent repair work, now wait for resp
50397 Sep 22 23:15:17.059 INFO [0] received reconcile message
50398 Sep 22 23:15:17.059 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }, state: ClientData([InProgress, New, New]) }, : downstairs
50399 Sep 22 23:15:17.059 INFO [0] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }
50400 Sep 22 23:15:17.059 INFO [1] received reconcile message
50401 Sep 22 23:15:17.059 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50402 Sep 22 23:15:17.059 INFO [1] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }
50403 Sep 22 23:15:17.059 INFO [2] received reconcile message
50404 Sep 22 23:15:17.059 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(689), op: ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50405 Sep 22 23:15:17.059 INFO [2] client ExtentClose { repair_id: ReconciliationId(689), extent_id: 14 }
50406 Sep 22 23:15:17.059 DEBG 689 Close extent 14
50407 Sep 22 23:15:17.059 DEBG 689 Close extent 14
50408 Sep 22 23:15:17.059 DEBG 689 Close extent 14
50409 Sep 22 23:15:17.060 DEBG [2] It's time to notify for 689
50410 Sep 22 23:15:17.060 INFO Completion from [2] id:689 status:true
50411 Sep 22 23:15:17.060 INFO [690/752] Repair commands completed
50412 Sep 22 23:15:17.060 INFO Pop front: ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50413 Sep 22 23:15:17.060 INFO Sent repair work, now wait for resp
50414 Sep 22 23:15:17.060 INFO [0] received reconcile message
50415 Sep 22 23:15:17.060 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50416 Sep 22 23:15:17.060 INFO [0] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50417 Sep 22 23:15:17.060 INFO [0] Sending repair request ReconciliationId(690)
50418 Sep 22 23:15:17.060 INFO [1] received reconcile message
50419 Sep 22 23:15:17.060 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50420 Sep 22 23:15:17.060 INFO [1] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50421 Sep 22 23:15:17.060 INFO [1] No action required ReconciliationId(690)
50422 Sep 22 23:15:17.060 INFO [2] received reconcile message
50423 Sep 22 23:15:17.060 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(690), op: ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50424 Sep 22 23:15:17.060 INFO [2] client ExtentRepair { repair_id: ReconciliationId(690), extent_id: 14, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50425 Sep 22 23:15:17.060 INFO [2] No action required ReconciliationId(690)
50426 Sep 22 23:15:17.060 DEBG 690 Repair extent 14 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50427 Sep 22 23:15:17.060 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/00E.copy"
50428 Sep 22 23:15:17.125 INFO accepted connection, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50429 Sep 22 23:15:17.125 TRCE incoming request, uri: /extent/14/files, method: GET, req_id: f437eab1-9f72-48ea-8ad0-d672907049db, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50430 Sep 22 23:15:17.125 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/14/files, method: GET, req_id: f437eab1-9f72-48ea-8ad0-d672907049db, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50431 Sep 22 23:15:17.126 INFO eid:14 Found repair files: ["00E", "00E.db"]
50432 Sep 22 23:15:17.126 TRCE incoming request, uri: /newextent/14/data, method: GET, req_id: 6428d5a9-da5c-498c-ba01-2a00663843c1, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50433 Sep 22 23:15:17.126 INFO request completed, latency_us: 315, response_code: 200, uri: /newextent/14/data, method: GET, req_id: 6428d5a9-da5c-498c-ba01-2a00663843c1, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50434 Sep 22 23:15:17.131 TRCE incoming request, uri: /newextent/14/db, method: GET, req_id: b7a6950c-f770-4cec-8c01-1f4402e80a26, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50435 Sep 22 23:15:17.131 INFO request completed, latency_us: 295, response_code: 200, uri: /newextent/14/db, method: GET, req_id: b7a6950c-f770-4cec-8c01-1f4402e80a26, remote_addr: 127.0.0.1:45501, local_addr: 127.0.0.1:46213, task: repair
50436 Sep 22 23:15:17.132 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/00E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/00E.replace"
50437 Sep 22 23:15:17.132 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50438 Sep 22 23:15:17.133 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/00E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50439 Sep 22 23:15:17.133 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00E"
50440 Sep 22 23:15:17.134 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/00E.db"
50441 Sep 22 23:15:17.134 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50442 Sep 22 23:15:17.134 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/00E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/00E.completed"
50443 Sep 22 23:15:17.134 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50444 Sep 22 23:15:17.134 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50445 Sep 22 23:15:17.134 DEBG [0] It's time to notify for 690
50446 Sep 22 23:15:17.134 INFO Completion from [0] id:690 status:true
50447 Sep 22 23:15:17.134 INFO [691/752] Repair commands completed
50448 Sep 22 23:15:17.134 INFO Pop front: ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }, state: ClientData([New, New, New]) }
50449 Sep 22 23:15:17.134 INFO Sent repair work, now wait for resp
50450 Sep 22 23:15:17.134 INFO [0] received reconcile message
50451 Sep 22 23:15:17.134 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }, state: ClientData([InProgress, New, New]) }, : downstairs
50452 Sep 22 23:15:17.134 INFO [0] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }
50453 Sep 22 23:15:17.134 INFO [1] received reconcile message
50454 Sep 22 23:15:17.134 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50455 Sep 22 23:15:17.134 INFO [1] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }
50456 Sep 22 23:15:17.134 INFO [2] received reconcile message
50457 Sep 22 23:15:17.134 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(691), op: ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50458 Sep 22 23:15:17.134 INFO [2] client ExtentReopen { repair_id: ReconciliationId(691), extent_id: 14 }
50459 Sep 22 23:15:17.134 DEBG 691 Reopen extent 14
50460 Sep 22 23:15:17.135 DEBG 691 Reopen extent 14
50461 Sep 22 23:15:17.136 DEBG 691 Reopen extent 14
50462 Sep 22 23:15:17.136 DEBG [2] It's time to notify for 691
50463 Sep 22 23:15:17.136 INFO Completion from [2] id:691 status:true
50464 Sep 22 23:15:17.136 INFO [692/752] Repair commands completed
50465 Sep 22 23:15:17.136 INFO Pop front: ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50466 Sep 22 23:15:17.136 INFO Sent repair work, now wait for resp
50467 Sep 22 23:15:17.136 INFO [0] received reconcile message
50468 Sep 22 23:15:17.136 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50469 Sep 22 23:15:17.136 INFO [0] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50470 Sep 22 23:15:17.136 INFO [1] received reconcile message
50471 Sep 22 23:15:17.136 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50472 Sep 22 23:15:17.136 INFO [1] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50473 Sep 22 23:15:17.136 INFO [2] received reconcile message
50474 Sep 22 23:15:17.136 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(692), op: ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50475 Sep 22 23:15:17.136 INFO [2] client ExtentFlush { repair_id: ReconciliationId(692), extent_id: 37, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50476 Sep 22 23:15:17.137 DEBG 692 Flush extent 37 with f:2 g:2
50477 Sep 22 23:15:17.137 DEBG Flush just extent 37 with f:2 and g:2
50478 Sep 22 23:15:17.137 DEBG [1] It's time to notify for 692
50479 Sep 22 23:15:17.137 INFO Completion from [1] id:692 status:true
50480 Sep 22 23:15:17.137 INFO [693/752] Repair commands completed
50481 Sep 22 23:15:17.137 INFO Pop front: ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }, state: ClientData([New, New, New]) }
50482 Sep 22 23:15:17.137 INFO Sent repair work, now wait for resp
50483 Sep 22 23:15:17.137 INFO [0] received reconcile message
50484 Sep 22 23:15:17.137 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }, state: ClientData([InProgress, New, New]) }, : downstairs
50485 Sep 22 23:15:17.137 INFO [0] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }
50486 Sep 22 23:15:17.137 INFO [1] received reconcile message
50487 Sep 22 23:15:17.137 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50488 Sep 22 23:15:17.137 INFO [1] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }
50489 Sep 22 23:15:17.137 INFO [2] received reconcile message
50490 Sep 22 23:15:17.137 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(693), op: ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50491 Sep 22 23:15:17.137 INFO [2] client ExtentClose { repair_id: ReconciliationId(693), extent_id: 37 }
50492 Sep 22 23:15:17.137 DEBG 693 Close extent 37
50493 Sep 22 23:15:17.137 DEBG 693 Close extent 37
50494 Sep 22 23:15:17.138 DEBG 693 Close extent 37
50495 Sep 22 23:15:17.138 DEBG up_ds_listen was notified
50496 Sep 22 23:15:17.138 DEBG up_ds_listen process 1091
50497 Sep 22 23:15:17.138 DEBG [2] It's time to notify for 693
50498 Sep 22 23:15:17.138 DEBG [A] ack job 1091:92, : downstairs
50499 Sep 22 23:15:17.138 INFO Completion from [2] id:693 status:true
50500 Sep 22 23:15:17.138 DEBG up_ds_listen checked 1 jobs, back to waiting
50501 Sep 22 23:15:17.138 INFO [694/752] Repair commands completed
50502 Sep 22 23:15:17.138 INFO Pop front: ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50503 Sep 22 23:15:17.138 INFO Sent repair work, now wait for resp
50504 Sep 22 23:15:17.138 INFO [0] received reconcile message
50505 Sep 22 23:15:17.138 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50506 Sep 22 23:15:17.138 INFO [0] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50507 Sep 22 23:15:17.138 INFO [0] Sending repair request ReconciliationId(694)
50508 Sep 22 23:15:17.138 INFO [1] received reconcile message
50509 Sep 22 23:15:17.138 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50510 Sep 22 23:15:17.138 INFO [1] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50511 Sep 22 23:15:17.138 INFO [1] No action required ReconciliationId(694)
50512 Sep 22 23:15:17.138 INFO [2] received reconcile message
50513 Sep 22 23:15:17.138 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(694), op: ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50514 Sep 22 23:15:17.138 INFO [2] client ExtentRepair { repair_id: ReconciliationId(694), extent_id: 37, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50515 Sep 22 23:15:17.138 INFO [2] No action required ReconciliationId(694)
50516 Sep 22 23:15:17.139 DEBG 694 Repair extent 37 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50517 Sep 22 23:15:17.139 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/025.copy"
50518 Sep 22 23:15:17.147 DEBG Flush :1091 extent_limit None deps:[JobId(1090), JobId(1089)] res:true f:35 g:1
50519 Sep 22 23:15:17.148 WARN returning error on read!
50520 Sep 22 23:15:17.148 DEBG Read :1092 deps:[JobId(1091)] res:false
50521 Sep 22 23:15:17.154 DEBG Read :1092 deps:[JobId(1091)] res:true
50522 Sep 22 23:15:17.201 INFO accepted connection, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50523 Sep 22 23:15:17.201 TRCE incoming request, uri: /extent/37/files, method: GET, req_id: fb00f139-631a-4b41-b4d8-ffa7c81158f3, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50524 Sep 22 23:15:17.201 INFO request completed, latency_us: 209, response_code: 200, uri: /extent/37/files, method: GET, req_id: fb00f139-631a-4b41-b4d8-ffa7c81158f3, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50525 Sep 22 23:15:17.202 INFO eid:37 Found repair files: ["025", "025.db"]
50526 Sep 22 23:15:17.202 TRCE incoming request, uri: /newextent/37/data, method: GET, req_id: a9846241-9110-4081-b82f-ffd28e60a457, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50527 Sep 22 23:15:17.202 INFO request completed, latency_us: 304, response_code: 200, uri: /newextent/37/data, method: GET, req_id: a9846241-9110-4081-b82f-ffd28e60a457, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50528 Sep 22 23:15:17.207 TRCE incoming request, uri: /newextent/37/db, method: GET, req_id: c19470f0-a39c-4700-886d-f7c2acd0a5ea, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50529 Sep 22 23:15:17.208 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/37/db, method: GET, req_id: c19470f0-a39c-4700-886d-f7c2acd0a5ea, remote_addr: 127.0.0.1:42680, local_addr: 127.0.0.1:46213, task: repair
50530 Sep 22 23:15:17.209 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/025.copy" to "/tmp/downstairs-vrx8aK6L/00/000/025.replace"
50531 Sep 22 23:15:17.209 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50532 Sep 22 23:15:17.210 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/025.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50533 Sep 22 23:15:17.210 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/025"
50534 Sep 22 23:15:17.210 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/025.db"
50535 Sep 22 23:15:17.210 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50536 Sep 22 23:15:17.210 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/025.replace" to "/tmp/downstairs-vrx8aK6L/00/000/025.completed"
50537 Sep 22 23:15:17.210 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50538 Sep 22 23:15:17.210 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50539 Sep 22 23:15:17.210 DEBG [0] It's time to notify for 694
50540 Sep 22 23:15:17.211 INFO Completion from [0] id:694 status:true
50541 Sep 22 23:15:17.211 INFO [695/752] Repair commands completed
50542 Sep 22 23:15:17.211 INFO Pop front: ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }, state: ClientData([New, New, New]) }
50543 Sep 22 23:15:17.211 INFO Sent repair work, now wait for resp
50544 Sep 22 23:15:17.211 INFO [0] received reconcile message
50545 Sep 22 23:15:17.211 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }, state: ClientData([InProgress, New, New]) }, : downstairs
50546 Sep 22 23:15:17.211 INFO [0] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }
50547 Sep 22 23:15:17.211 INFO [1] received reconcile message
50548 Sep 22 23:15:17.211 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50549 Sep 22 23:15:17.211 INFO [1] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }
50550 Sep 22 23:15:17.211 INFO [2] received reconcile message
50551 Sep 22 23:15:17.211 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(695), op: ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50552 Sep 22 23:15:17.211 INFO [2] client ExtentReopen { repair_id: ReconciliationId(695), extent_id: 37 }
50553 Sep 22 23:15:17.211 DEBG 695 Reopen extent 37
50554 Sep 22 23:15:17.211 DEBG 695 Reopen extent 37
50555 Sep 22 23:15:17.212 DEBG 695 Reopen extent 37
50556 Sep 22 23:15:17.213 DEBG [2] It's time to notify for 695
50557 Sep 22 23:15:17.213 INFO Completion from [2] id:695 status:true
50558 Sep 22 23:15:17.213 INFO [696/752] Repair commands completed
50559 Sep 22 23:15:17.213 INFO Pop front: ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50560 Sep 22 23:15:17.213 INFO Sent repair work, now wait for resp
50561 Sep 22 23:15:17.213 INFO [0] received reconcile message
50562 Sep 22 23:15:17.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50563 Sep 22 23:15:17.213 INFO [0] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50564 Sep 22 23:15:17.213 INFO [1] received reconcile message
50565 Sep 22 23:15:17.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50566 Sep 22 23:15:17.213 INFO [1] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50567 Sep 22 23:15:17.213 INFO [2] received reconcile message
50568 Sep 22 23:15:17.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(696), op: ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50569 Sep 22 23:15:17.213 INFO [2] client ExtentFlush { repair_id: ReconciliationId(696), extent_id: 124, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50570 Sep 22 23:15:17.213 DEBG 696 Flush extent 124 with f:2 g:2
50571 Sep 22 23:15:17.213 DEBG Flush just extent 124 with f:2 and g:2
50572 Sep 22 23:15:17.213 DEBG [1] It's time to notify for 696
50573 Sep 22 23:15:17.213 INFO Completion from [1] id:696 status:true
50574 Sep 22 23:15:17.213 INFO [697/752] Repair commands completed
50575 Sep 22 23:15:17.213 INFO Pop front: ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }, state: ClientData([New, New, New]) }
50576 Sep 22 23:15:17.213 INFO Sent repair work, now wait for resp
50577 Sep 22 23:15:17.213 INFO [0] received reconcile message
50578 Sep 22 23:15:17.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }, state: ClientData([InProgress, New, New]) }, : downstairs
50579 Sep 22 23:15:17.213 INFO [0] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }
50580 Sep 22 23:15:17.213 INFO [1] received reconcile message
50581 Sep 22 23:15:17.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50582 Sep 22 23:15:17.213 INFO [1] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }
50583 Sep 22 23:15:17.213 INFO [2] received reconcile message
50584 Sep 22 23:15:17.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(697), op: ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50585 Sep 22 23:15:17.214 INFO [2] client ExtentClose { repair_id: ReconciliationId(697), extent_id: 124 }
50586 Sep 22 23:15:17.214 DEBG 697 Close extent 124
50587 Sep 22 23:15:17.214 DEBG 697 Close extent 124
50588 Sep 22 23:15:17.214 DEBG 697 Close extent 124
50589 Sep 22 23:15:17.215 DEBG [2] It's time to notify for 697
50590 Sep 22 23:15:17.215 INFO Completion from [2] id:697 status:true
50591 Sep 22 23:15:17.215 INFO [698/752] Repair commands completed
50592 Sep 22 23:15:17.215 INFO Pop front: ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50593 Sep 22 23:15:17.215 INFO Sent repair work, now wait for resp
50594 Sep 22 23:15:17.215 INFO [0] received reconcile message
50595 Sep 22 23:15:17.215 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50596 Sep 22 23:15:17.215 INFO [0] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50597 Sep 22 23:15:17.215 INFO [0] Sending repair request ReconciliationId(698)
50598 Sep 22 23:15:17.215 INFO [1] received reconcile message
50599 Sep 22 23:15:17.215 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50600 Sep 22 23:15:17.215 INFO [1] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50601 Sep 22 23:15:17.215 INFO [1] No action required ReconciliationId(698)
50602 Sep 22 23:15:17.215 INFO [2] received reconcile message
50603 Sep 22 23:15:17.215 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(698), op: ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50604 Sep 22 23:15:17.215 INFO [2] client ExtentRepair { repair_id: ReconciliationId(698), extent_id: 124, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50605 Sep 22 23:15:17.215 INFO [2] No action required ReconciliationId(698)
50606 Sep 22 23:15:17.215 DEBG 698 Repair extent 124 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50607 Sep 22 23:15:17.215 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07C.copy"
50608 Sep 22 23:15:17.277 INFO accepted connection, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50609 Sep 22 23:15:17.277 TRCE incoming request, uri: /extent/124/files, method: GET, req_id: e38070f0-4181-4fd0-9726-8c979008bbae, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50610 Sep 22 23:15:17.277 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/124/files, method: GET, req_id: e38070f0-4181-4fd0-9726-8c979008bbae, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50611 Sep 22 23:15:17.278 INFO eid:124 Found repair files: ["07C", "07C.db"]
50612 Sep 22 23:15:17.278 TRCE incoming request, uri: /newextent/124/data, method: GET, req_id: 1ca4752a-ccd0-4cae-a693-ea8b95f59e57, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50613 Sep 22 23:15:17.278 INFO request completed, latency_us: 298, response_code: 200, uri: /newextent/124/data, method: GET, req_id: 1ca4752a-ccd0-4cae-a693-ea8b95f59e57, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50614 Sep 22 23:15:17.283 TRCE incoming request, uri: /newextent/124/db, method: GET, req_id: 2702736f-6b60-4da8-ad5d-952c90d1b43f, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50615 Sep 22 23:15:17.284 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/124/db, method: GET, req_id: 2702736f-6b60-4da8-ad5d-952c90d1b43f, remote_addr: 127.0.0.1:42905, local_addr: 127.0.0.1:46213, task: repair
50616 Sep 22 23:15:17.285 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07C.replace"
50617 Sep 22 23:15:17.285 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50618 Sep 22 23:15:17.286 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50619 Sep 22 23:15:17.286 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07C"
50620 Sep 22 23:15:17.286 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07C.db"
50621 Sep 22 23:15:17.286 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50622 Sep 22 23:15:17.286 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07C.completed"
50623 Sep 22 23:15:17.286 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50624 Sep 22 23:15:17.286 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50625 Sep 22 23:15:17.286 DEBG [0] It's time to notify for 698
50626 Sep 22 23:15:17.286 INFO Completion from [0] id:698 status:true
50627 Sep 22 23:15:17.286 INFO [699/752] Repair commands completed
50628 Sep 22 23:15:17.286 INFO Pop front: ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }, state: ClientData([New, New, New]) }
50629 Sep 22 23:15:17.287 INFO Sent repair work, now wait for resp
50630 Sep 22 23:15:17.287 INFO [0] received reconcile message
50631 Sep 22 23:15:17.287 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }, state: ClientData([InProgress, New, New]) }, : downstairs
50632 Sep 22 23:15:17.287 INFO [0] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }
50633 Sep 22 23:15:17.287 INFO [1] received reconcile message
50634 Sep 22 23:15:17.287 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50635 Sep 22 23:15:17.287 INFO [1] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }
50636 Sep 22 23:15:17.287 INFO [2] received reconcile message
50637 Sep 22 23:15:17.287 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(699), op: ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50638 Sep 22 23:15:17.287 INFO [2] client ExtentReopen { repair_id: ReconciliationId(699), extent_id: 124 }
50639 Sep 22 23:15:17.287 DEBG 699 Reopen extent 124
50640 Sep 22 23:15:17.287 DEBG 699 Reopen extent 124
50641 Sep 22 23:15:17.288 DEBG 699 Reopen extent 124
50642 Sep 22 23:15:17.288 DEBG [2] It's time to notify for 699
50643 Sep 22 23:15:17.289 INFO Completion from [2] id:699 status:true
50644 Sep 22 23:15:17.289 INFO [700/752] Repair commands completed
50645 Sep 22 23:15:17.289 INFO Pop front: ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50646 Sep 22 23:15:17.289 INFO Sent repair work, now wait for resp
50647 Sep 22 23:15:17.289 INFO [0] received reconcile message
50648 Sep 22 23:15:17.289 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50649 Sep 22 23:15:17.289 INFO [0] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50650 Sep 22 23:15:17.289 INFO [1] received reconcile message
50651 Sep 22 23:15:17.289 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50652 Sep 22 23:15:17.289 INFO [1] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50653 Sep 22 23:15:17.289 INFO [2] received reconcile message
50654 Sep 22 23:15:17.289 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(700), op: ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50655 Sep 22 23:15:17.289 INFO [2] client ExtentFlush { repair_id: ReconciliationId(700), extent_id: 5, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50656 Sep 22 23:15:17.289 DEBG 700 Flush extent 5 with f:2 g:2
50657 Sep 22 23:15:17.289 DEBG Flush just extent 5 with f:2 and g:2
50658 Sep 22 23:15:17.289 DEBG [1] It's time to notify for 700
50659 Sep 22 23:15:17.289 INFO Completion from [1] id:700 status:true
50660 Sep 22 23:15:17.289 INFO [701/752] Repair commands completed
50661 Sep 22 23:15:17.289 INFO Pop front: ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }, state: ClientData([New, New, New]) }
50662 Sep 22 23:15:17.289 INFO Sent repair work, now wait for resp
50663 Sep 22 23:15:17.289 INFO [0] received reconcile message
50664 Sep 22 23:15:17.289 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }, state: ClientData([InProgress, New, New]) }, : downstairs
50665 Sep 22 23:15:17.289 INFO [0] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }
50666 Sep 22 23:15:17.289 INFO [1] received reconcile message
50667 Sep 22 23:15:17.289 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50668 Sep 22 23:15:17.289 INFO [1] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }
50669 Sep 22 23:15:17.289 INFO [2] received reconcile message
50670 Sep 22 23:15:17.289 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(701), op: ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50671 Sep 22 23:15:17.289 INFO [2] client ExtentClose { repair_id: ReconciliationId(701), extent_id: 5 }
50672 Sep 22 23:15:17.290 DEBG 701 Close extent 5
50673 Sep 22 23:15:17.290 DEBG 701 Close extent 5
50674 Sep 22 23:15:17.290 DEBG 701 Close extent 5
50675 Sep 22 23:15:17.290 DEBG [2] It's time to notify for 701
50676 Sep 22 23:15:17.291 INFO Completion from [2] id:701 status:true
50677 Sep 22 23:15:17.291 INFO [702/752] Repair commands completed
50678 Sep 22 23:15:17.291 INFO Pop front: ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50679 Sep 22 23:15:17.291 INFO Sent repair work, now wait for resp
50680 Sep 22 23:15:17.291 INFO [0] received reconcile message
50681 Sep 22 23:15:17.291 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50682 Sep 22 23:15:17.291 INFO [0] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50683 Sep 22 23:15:17.291 INFO [0] Sending repair request ReconciliationId(702)
50684 Sep 22 23:15:17.291 INFO [1] received reconcile message
50685 Sep 22 23:15:17.291 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50686 Sep 22 23:15:17.291 INFO [1] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50687 Sep 22 23:15:17.291 INFO [1] No action required ReconciliationId(702)
50688 Sep 22 23:15:17.291 INFO [2] received reconcile message
50689 Sep 22 23:15:17.291 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(702), op: ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50690 Sep 22 23:15:17.291 INFO [2] client ExtentRepair { repair_id: ReconciliationId(702), extent_id: 5, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50691 Sep 22 23:15:17.291 INFO [2] No action required ReconciliationId(702)
50692 Sep 22 23:15:17.291 DEBG 702 Repair extent 5 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50693 Sep 22 23:15:17.291 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/005.copy"
50694 Sep 22 23:15:17.355 INFO accepted connection, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50695 Sep 22 23:15:17.356 TRCE incoming request, uri: /extent/5/files, method: GET, req_id: 15bae0bb-9d3e-4857-85dc-d5533c6d3fe7, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50696 Sep 22 23:15:17.356 INFO request completed, latency_us: 194, response_code: 200, uri: /extent/5/files, method: GET, req_id: 15bae0bb-9d3e-4857-85dc-d5533c6d3fe7, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50697 Sep 22 23:15:17.356 INFO eid:5 Found repair files: ["005", "005.db"]
50698 Sep 22 23:15:17.356 TRCE incoming request, uri: /newextent/5/data, method: GET, req_id: e6e8cc4b-41dc-4c6c-a083-101970c835a0, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50699 Sep 22 23:15:17.357 INFO request completed, latency_us: 258, response_code: 200, uri: /newextent/5/data, method: GET, req_id: e6e8cc4b-41dc-4c6c-a083-101970c835a0, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50700 Sep 22 23:15:17.361 TRCE incoming request, uri: /newextent/5/db, method: GET, req_id: 395127cf-397c-420b-8648-28cd8d1ba6b4, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50701 Sep 22 23:15:17.362 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/5/db, method: GET, req_id: 395127cf-397c-420b-8648-28cd8d1ba6b4, remote_addr: 127.0.0.1:46810, local_addr: 127.0.0.1:46213, task: repair
50702 Sep 22 23:15:17.363 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/005.copy" to "/tmp/downstairs-vrx8aK6L/00/000/005.replace"
50703 Sep 22 23:15:17.363 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50704 Sep 22 23:15:17.364 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/005.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50705 Sep 22 23:15:17.364 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/005"
50706 Sep 22 23:15:17.364 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/005.db"
50707 Sep 22 23:15:17.364 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50708 Sep 22 23:15:17.364 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/005.replace" to "/tmp/downstairs-vrx8aK6L/00/000/005.completed"
50709 Sep 22 23:15:17.364 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50710 Sep 22 23:15:17.364 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50711 Sep 22 23:15:17.364 DEBG [0] It's time to notify for 702
50712 Sep 22 23:15:17.364 INFO Completion from [0] id:702 status:true
50713 Sep 22 23:15:17.364 INFO [703/752] Repair commands completed
50714 Sep 22 23:15:17.364 INFO Pop front: ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }, state: ClientData([New, New, New]) }
50715 Sep 22 23:15:17.364 INFO Sent repair work, now wait for resp
50716 Sep 22 23:15:17.365 INFO [0] received reconcile message
50717 Sep 22 23:15:17.365 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }, state: ClientData([InProgress, New, New]) }, : downstairs
50718 Sep 22 23:15:17.365 INFO [0] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }
50719 Sep 22 23:15:17.365 INFO [1] received reconcile message
50720 Sep 22 23:15:17.365 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50721 Sep 22 23:15:17.365 INFO [1] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }
50722 Sep 22 23:15:17.365 INFO [2] received reconcile message
50723 Sep 22 23:15:17.365 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(703), op: ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50724 Sep 22 23:15:17.365 INFO [2] client ExtentReopen { repair_id: ReconciliationId(703), extent_id: 5 }
50725 Sep 22 23:15:17.365 DEBG 703 Reopen extent 5
50726 Sep 22 23:15:17.365 DEBG 703 Reopen extent 5
50727 Sep 22 23:15:17.366 DEBG 703 Reopen extent 5
50728 Sep 22 23:15:17.366 DEBG [2] It's time to notify for 703
50729 Sep 22 23:15:17.367 INFO Completion from [2] id:703 status:true
50730 Sep 22 23:15:17.367 INFO [704/752] Repair commands completed
50731 Sep 22 23:15:17.367 INFO Pop front: ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50732 Sep 22 23:15:17.367 INFO Sent repair work, now wait for resp
50733 Sep 22 23:15:17.367 INFO [0] received reconcile message
50734 Sep 22 23:15:17.367 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50735 Sep 22 23:15:17.367 INFO [0] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50736 Sep 22 23:15:17.367 INFO [1] received reconcile message
50737 Sep 22 23:15:17.367 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50738 Sep 22 23:15:17.367 INFO [1] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50739 Sep 22 23:15:17.367 INFO [2] received reconcile message
50740 Sep 22 23:15:17.367 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(704), op: ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50741 Sep 22 23:15:17.367 INFO [2] client ExtentFlush { repair_id: ReconciliationId(704), extent_id: 55, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50742 Sep 22 23:15:17.367 DEBG 704 Flush extent 55 with f:2 g:2
50743 Sep 22 23:15:17.367 DEBG Flush just extent 55 with f:2 and g:2
50744 Sep 22 23:15:17.367 DEBG [1] It's time to notify for 704
50745 Sep 22 23:15:17.367 INFO Completion from [1] id:704 status:true
50746 Sep 22 23:15:17.367 INFO [705/752] Repair commands completed
50747 Sep 22 23:15:17.367 INFO Pop front: ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }, state: ClientData([New, New, New]) }
50748 Sep 22 23:15:17.367 INFO Sent repair work, now wait for resp
50749 Sep 22 23:15:17.367 INFO [0] received reconcile message
50750 Sep 22 23:15:17.367 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }, state: ClientData([InProgress, New, New]) }, : downstairs
50751 Sep 22 23:15:17.367 INFO [0] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }
50752 Sep 22 23:15:17.367 INFO [1] received reconcile message
50753 Sep 22 23:15:17.367 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50754 Sep 22 23:15:17.367 INFO [1] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }
50755 Sep 22 23:15:17.367 INFO [2] received reconcile message
50756 Sep 22 23:15:17.367 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(705), op: ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50757 Sep 22 23:15:17.367 INFO [2] client ExtentClose { repair_id: ReconciliationId(705), extent_id: 55 }
50758 Sep 22 23:15:17.368 DEBG 705 Close extent 55
50759 Sep 22 23:15:17.368 DEBG 705 Close extent 55
50760 Sep 22 23:15:17.368 DEBG 705 Close extent 55
50761 Sep 22 23:15:17.368 DEBG [2] It's time to notify for 705
50762 Sep 22 23:15:17.368 INFO Completion from [2] id:705 status:true
50763 Sep 22 23:15:17.368 INFO [706/752] Repair commands completed
50764 Sep 22 23:15:17.369 INFO Pop front: ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50765 Sep 22 23:15:17.369 INFO Sent repair work, now wait for resp
50766 Sep 22 23:15:17.369 INFO [0] received reconcile message
50767 Sep 22 23:15:17.369 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50768 Sep 22 23:15:17.369 INFO [0] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50769 Sep 22 23:15:17.369 INFO [0] Sending repair request ReconciliationId(706)
50770 Sep 22 23:15:17.369 INFO [1] received reconcile message
50771 Sep 22 23:15:17.369 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50772 Sep 22 23:15:17.369 INFO [1] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50773 Sep 22 23:15:17.369 INFO [1] No action required ReconciliationId(706)
50774 Sep 22 23:15:17.369 INFO [2] received reconcile message
50775 Sep 22 23:15:17.369 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(706), op: ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50776 Sep 22 23:15:17.369 INFO [2] client ExtentRepair { repair_id: ReconciliationId(706), extent_id: 55, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50777 Sep 22 23:15:17.369 INFO [2] No action required ReconciliationId(706)
50778 Sep 22 23:15:17.369 DEBG 706 Repair extent 55 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50779 Sep 22 23:15:17.369 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/037.copy"
50780 Sep 22 23:15:17.434 INFO accepted connection, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50781 Sep 22 23:15:17.434 TRCE incoming request, uri: /extent/55/files, method: GET, req_id: 6454ead8-cc50-4d05-8409-96505b307855, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50782 Sep 22 23:15:17.434 INFO request completed, latency_us: 189, response_code: 200, uri: /extent/55/files, method: GET, req_id: 6454ead8-cc50-4d05-8409-96505b307855, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50783 Sep 22 23:15:17.434 INFO eid:55 Found repair files: ["037", "037.db"]
50784 Sep 22 23:15:17.435 TRCE incoming request, uri: /newextent/55/data, method: GET, req_id: e494a712-8bd2-428f-8d5d-2a10cb6714ad, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50785 Sep 22 23:15:17.435 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/55/data, method: GET, req_id: e494a712-8bd2-428f-8d5d-2a10cb6714ad, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50786 Sep 22 23:15:17.440 TRCE incoming request, uri: /newextent/55/db, method: GET, req_id: 81f8e990-a4e9-4df1-8c3b-4a4a25fbdb4e, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50787 Sep 22 23:15:17.440 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/55/db, method: GET, req_id: 81f8e990-a4e9-4df1-8c3b-4a4a25fbdb4e, remote_addr: 127.0.0.1:57552, local_addr: 127.0.0.1:46213, task: repair
50788 Sep 22 23:15:17.441 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/037.copy" to "/tmp/downstairs-vrx8aK6L/00/000/037.replace"
50789 Sep 22 23:15:17.441 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50790 Sep 22 23:15:17.442 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/037.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50791 Sep 22 23:15:17.442 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/037"
50792 Sep 22 23:15:17.442 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/037.db"
50793 Sep 22 23:15:17.442 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50794 Sep 22 23:15:17.442 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/037.replace" to "/tmp/downstairs-vrx8aK6L/00/000/037.completed"
50795 Sep 22 23:15:17.442 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50796 Sep 22 23:15:17.443 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50797 Sep 22 23:15:17.443 DEBG [0] It's time to notify for 706
50798 Sep 22 23:15:17.443 INFO Completion from [0] id:706 status:true
50799 Sep 22 23:15:17.443 INFO [707/752] Repair commands completed
50800 Sep 22 23:15:17.443 INFO Pop front: ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }, state: ClientData([New, New, New]) }
50801 Sep 22 23:15:17.443 INFO Sent repair work, now wait for resp
50802 Sep 22 23:15:17.443 INFO [0] received reconcile message
50803 Sep 22 23:15:17.443 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }, state: ClientData([InProgress, New, New]) }, : downstairs
50804 Sep 22 23:15:17.443 INFO [0] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }
50805 Sep 22 23:15:17.443 INFO [1] received reconcile message
50806 Sep 22 23:15:17.443 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50807 Sep 22 23:15:17.443 INFO [1] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }
50808 Sep 22 23:15:17.443 INFO [2] received reconcile message
50809 Sep 22 23:15:17.443 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(707), op: ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50810 Sep 22 23:15:17.443 INFO [2] client ExtentReopen { repair_id: ReconciliationId(707), extent_id: 55 }
50811 Sep 22 23:15:17.443 DEBG 707 Reopen extent 55
50812 Sep 22 23:15:17.444 DEBG 707 Reopen extent 55
50813 Sep 22 23:15:17.444 DEBG 707 Reopen extent 55
50814 Sep 22 23:15:17.445 DEBG [2] It's time to notify for 707
50815 Sep 22 23:15:17.445 INFO Completion from [2] id:707 status:true
50816 Sep 22 23:15:17.445 INFO [708/752] Repair commands completed
50817 Sep 22 23:15:17.445 INFO Pop front: ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50818 Sep 22 23:15:17.445 INFO Sent repair work, now wait for resp
50819 Sep 22 23:15:17.445 INFO [0] received reconcile message
50820 Sep 22 23:15:17.445 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50821 Sep 22 23:15:17.445 INFO [0] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50822 Sep 22 23:15:17.445 INFO [1] received reconcile message
50823 Sep 22 23:15:17.445 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50824 Sep 22 23:15:17.445 INFO [1] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50825 Sep 22 23:15:17.445 INFO [2] received reconcile message
50826 Sep 22 23:15:17.445 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(708), op: ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50827 Sep 22 23:15:17.445 INFO [2] client ExtentFlush { repair_id: ReconciliationId(708), extent_id: 62, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50828 Sep 22 23:15:17.445 DEBG 708 Flush extent 62 with f:2 g:2
50829 Sep 22 23:15:17.445 DEBG Flush just extent 62 with f:2 and g:2
50830 Sep 22 23:15:17.445 DEBG [1] It's time to notify for 708
50831 Sep 22 23:15:17.446 INFO Completion from [1] id:708 status:true
50832 Sep 22 23:15:17.446 INFO [709/752] Repair commands completed
50833 Sep 22 23:15:17.446 INFO Pop front: ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }, state: ClientData([New, New, New]) }
50834 Sep 22 23:15:17.446 INFO Sent repair work, now wait for resp
50835 Sep 22 23:15:17.446 INFO [0] received reconcile message
50836 Sep 22 23:15:17.446 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }, state: ClientData([InProgress, New, New]) }, : downstairs
50837 Sep 22 23:15:17.446 INFO [0] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }
50838 Sep 22 23:15:17.446 INFO [1] received reconcile message
50839 Sep 22 23:15:17.446 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50840 Sep 22 23:15:17.446 INFO [1] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }
50841 Sep 22 23:15:17.446 INFO [2] received reconcile message
50842 Sep 22 23:15:17.446 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(709), op: ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50843 Sep 22 23:15:17.446 INFO [2] client ExtentClose { repair_id: ReconciliationId(709), extent_id: 62 }
50844 Sep 22 23:15:17.446 DEBG 709 Close extent 62
50845 Sep 22 23:15:17.446 DEBG 709 Close extent 62
50846 Sep 22 23:15:17.446 DEBG 709 Close extent 62
50847 Sep 22 23:15:17.447 DEBG [2] It's time to notify for 709
50848 Sep 22 23:15:17.447 INFO Completion from [2] id:709 status:true
50849 Sep 22 23:15:17.447 INFO [710/752] Repair commands completed
50850 Sep 22 23:15:17.447 INFO Pop front: ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50851 Sep 22 23:15:17.447 INFO Sent repair work, now wait for resp
50852 Sep 22 23:15:17.447 INFO [0] received reconcile message
50853 Sep 22 23:15:17.447 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50854 Sep 22 23:15:17.447 INFO [0] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50855 Sep 22 23:15:17.447 INFO [0] Sending repair request ReconciliationId(710)
50856 Sep 22 23:15:17.447 INFO [1] received reconcile message
50857 Sep 22 23:15:17.447 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50858 Sep 22 23:15:17.447 INFO [1] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50859 Sep 22 23:15:17.447 INFO [1] No action required ReconciliationId(710)
50860 Sep 22 23:15:17.447 INFO [2] received reconcile message
50861 Sep 22 23:15:17.447 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(710), op: ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50862 Sep 22 23:15:17.447 INFO [2] client ExtentRepair { repair_id: ReconciliationId(710), extent_id: 62, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50863 Sep 22 23:15:17.447 INFO [2] No action required ReconciliationId(710)
50864 Sep 22 23:15:17.447 DEBG 710 Repair extent 62 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50865 Sep 22 23:15:17.447 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/03E.copy"
50866 Sep 22 23:15:17.510 INFO accepted connection, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50867 Sep 22 23:15:17.510 TRCE incoming request, uri: /extent/62/files, method: GET, req_id: 44d53c19-c569-41bb-846a-b2daca637e48, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50868 Sep 22 23:15:17.510 INFO request completed, latency_us: 188, response_code: 200, uri: /extent/62/files, method: GET, req_id: 44d53c19-c569-41bb-846a-b2daca637e48, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50869 Sep 22 23:15:17.511 INFO eid:62 Found repair files: ["03E", "03E.db"]
50870 Sep 22 23:15:17.511 TRCE incoming request, uri: /newextent/62/data, method: GET, req_id: b031140b-6e94-4e5c-ae32-b65d58b4dd01, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50871 Sep 22 23:15:17.511 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/62/data, method: GET, req_id: b031140b-6e94-4e5c-ae32-b65d58b4dd01, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50872 Sep 22 23:15:17.516 TRCE incoming request, uri: /newextent/62/db, method: GET, req_id: 59e2250e-7b86-4391-a8c7-f8f7e812e62f, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50873 Sep 22 23:15:17.517 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/62/db, method: GET, req_id: 59e2250e-7b86-4391-a8c7-f8f7e812e62f, remote_addr: 127.0.0.1:59015, local_addr: 127.0.0.1:46213, task: repair
50874 Sep 22 23:15:17.518 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/03E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/03E.replace"
50875 Sep 22 23:15:17.518 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50876 Sep 22 23:15:17.519 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/03E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50877 Sep 22 23:15:17.519 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03E"
50878 Sep 22 23:15:17.519 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/03E.db"
50879 Sep 22 23:15:17.519 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50880 Sep 22 23:15:17.519 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/03E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/03E.completed"
50881 Sep 22 23:15:17.519 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50882 Sep 22 23:15:17.519 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50883 Sep 22 23:15:17.519 DEBG [0] It's time to notify for 710
50884 Sep 22 23:15:17.519 INFO Completion from [0] id:710 status:true
50885 Sep 22 23:15:17.519 INFO [711/752] Repair commands completed
50886 Sep 22 23:15:17.519 INFO Pop front: ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }, state: ClientData([New, New, New]) }
50887 Sep 22 23:15:17.519 INFO Sent repair work, now wait for resp
50888 Sep 22 23:15:17.519 INFO [0] received reconcile message
50889 Sep 22 23:15:17.519 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }, state: ClientData([InProgress, New, New]) }, : downstairs
50890 Sep 22 23:15:17.519 INFO [0] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }
50891 Sep 22 23:15:17.520 INFO [1] received reconcile message
50892 Sep 22 23:15:17.520 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50893 Sep 22 23:15:17.520 INFO [1] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }
50894 Sep 22 23:15:17.520 INFO [2] received reconcile message
50895 Sep 22 23:15:17.520 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(711), op: ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50896 Sep 22 23:15:17.520 INFO [2] client ExtentReopen { repair_id: ReconciliationId(711), extent_id: 62 }
50897 Sep 22 23:15:17.520 DEBG 711 Reopen extent 62
50898 Sep 22 23:15:17.520 DEBG 711 Reopen extent 62
50899 Sep 22 23:15:17.521 DEBG 711 Reopen extent 62
50900 Sep 22 23:15:17.521 DEBG [2] It's time to notify for 711
50901 Sep 22 23:15:17.521 INFO Completion from [2] id:711 status:true
50902 Sep 22 23:15:17.521 INFO [712/752] Repair commands completed
50903 Sep 22 23:15:17.521 INFO Pop front: ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50904 Sep 22 23:15:17.522 INFO Sent repair work, now wait for resp
50905 Sep 22 23:15:17.522 INFO [0] received reconcile message
50906 Sep 22 23:15:17.522 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
50907 Sep 22 23:15:17.522 INFO [0] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50908 Sep 22 23:15:17.522 INFO [1] received reconcile message
50909 Sep 22 23:15:17.522 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
50910 Sep 22 23:15:17.522 INFO [1] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50911 Sep 22 23:15:17.522 INFO [2] received reconcile message
50912 Sep 22 23:15:17.522 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(712), op: ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
50913 Sep 22 23:15:17.522 INFO [2] client ExtentFlush { repair_id: ReconciliationId(712), extent_id: 89, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
50914 Sep 22 23:15:17.522 DEBG 712 Flush extent 89 with f:2 g:2
50915 Sep 22 23:15:17.522 DEBG Flush just extent 89 with f:2 and g:2
50916 Sep 22 23:15:17.522 DEBG [1] It's time to notify for 712
50917 Sep 22 23:15:17.522 INFO Completion from [1] id:712 status:true
50918 Sep 22 23:15:17.522 INFO [713/752] Repair commands completed
50919 Sep 22 23:15:17.522 INFO Pop front: ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }, state: ClientData([New, New, New]) }
50920 Sep 22 23:15:17.522 INFO Sent repair work, now wait for resp
50921 Sep 22 23:15:17.522 INFO [0] received reconcile message
50922 Sep 22 23:15:17.522 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }, state: ClientData([InProgress, New, New]) }, : downstairs
50923 Sep 22 23:15:17.522 INFO [0] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }
50924 Sep 22 23:15:17.522 INFO [1] received reconcile message
50925 Sep 22 23:15:17.522 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50926 Sep 22 23:15:17.522 INFO [1] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }
50927 Sep 22 23:15:17.522 INFO [2] received reconcile message
50928 Sep 22 23:15:17.522 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(713), op: ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50929 Sep 22 23:15:17.522 INFO [2] client ExtentClose { repair_id: ReconciliationId(713), extent_id: 89 }
50930 Sep 22 23:15:17.522 DEBG 713 Close extent 89
50931 Sep 22 23:15:17.523 DEBG 713 Close extent 89
50932 Sep 22 23:15:17.523 DEBG 713 Close extent 89
50933 Sep 22 23:15:17.523 DEBG [2] It's time to notify for 713
50934 Sep 22 23:15:17.523 INFO Completion from [2] id:713 status:true
50935 Sep 22 23:15:17.523 INFO [714/752] Repair commands completed
50936 Sep 22 23:15:17.523 INFO Pop front: ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
50937 Sep 22 23:15:17.523 INFO Sent repair work, now wait for resp
50938 Sep 22 23:15:17.523 INFO [0] received reconcile message
50939 Sep 22 23:15:17.524 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
50940 Sep 22 23:15:17.524 INFO [0] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50941 Sep 22 23:15:17.524 INFO [0] Sending repair request ReconciliationId(714)
50942 Sep 22 23:15:17.524 INFO [1] received reconcile message
50943 Sep 22 23:15:17.524 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50944 Sep 22 23:15:17.524 INFO [1] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50945 Sep 22 23:15:17.524 INFO [1] No action required ReconciliationId(714)
50946 Sep 22 23:15:17.524 INFO [2] received reconcile message
50947 Sep 22 23:15:17.524 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(714), op: ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
50948 Sep 22 23:15:17.524 INFO [2] client ExtentRepair { repair_id: ReconciliationId(714), extent_id: 89, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
50949 Sep 22 23:15:17.524 INFO [2] No action required ReconciliationId(714)
50950 Sep 22 23:15:17.524 DEBG 714 Repair extent 89 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
50951 Sep 22 23:15:17.524 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/059.copy"
50952 Sep 22 23:15:17.554 DEBG [rc] retire 1091 clears [JobId(1090), JobId(1091)], : downstairs
50953 Sep 22 23:15:17.554 ERRO [1] job id 1092 saw error GenericError("test error")
50954 Sep 22 23:15:17.554 DEBG IO Flush 1093 has deps [JobId(1092)]
50955 Sep 22 23:15:17.554 WARN returning error on read!
50956 Sep 22 23:15:17.554 DEBG Read :1092 deps:[JobId(1091)] res:false
50957 Sep 22 23:15:17.560 DEBG Read :1092 deps:[JobId(1091)] res:true
50958 Sep 22 23:15:17.582 ERRO [0] job id 1092 saw error GenericError("test error")
50959 Sep 22 23:15:17.584 DEBG Flush :1093 extent_limit None deps:[JobId(1092)] res:true f:36 g:1
50960 Sep 22 23:15:17.584 INFO [lossy] sleeping 1 second
50961 Sep 22 23:15:17.588 INFO accepted connection, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50962 Sep 22 23:15:17.588 TRCE incoming request, uri: /extent/89/files, method: GET, req_id: ec39fe56-1327-4046-8ad5-a9420b9458a9, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50963 Sep 22 23:15:17.588 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/89/files, method: GET, req_id: ec39fe56-1327-4046-8ad5-a9420b9458a9, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50964 Sep 22 23:15:17.588 INFO eid:89 Found repair files: ["059", "059.db"]
50965 Sep 22 23:15:17.589 TRCE incoming request, uri: /newextent/89/data, method: GET, req_id: 8bd54bd4-e07b-4b71-a72f-c4c152e56ecd, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50966 Sep 22 23:15:17.589 INFO request completed, latency_us: 319, response_code: 200, uri: /newextent/89/data, method: GET, req_id: 8bd54bd4-e07b-4b71-a72f-c4c152e56ecd, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50967 Sep 22 23:15:17.594 TRCE incoming request, uri: /newextent/89/db, method: GET, req_id: 4f56a5f6-da06-4545-9695-5b7a865585eb, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50968 Sep 22 23:15:17.594 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/89/db, method: GET, req_id: 4f56a5f6-da06-4545-9695-5b7a865585eb, remote_addr: 127.0.0.1:33732, local_addr: 127.0.0.1:46213, task: repair
50969 Sep 22 23:15:17.596 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/059.copy" to "/tmp/downstairs-vrx8aK6L/00/000/059.replace"
50970 Sep 22 23:15:17.596 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50971 Sep 22 23:15:17.597 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/059.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
50972 Sep 22 23:15:17.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/059"
50973 Sep 22 23:15:17.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/059.db"
50974 Sep 22 23:15:17.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50975 Sep 22 23:15:17.597 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/059.replace" to "/tmp/downstairs-vrx8aK6L/00/000/059.completed"
50976 Sep 22 23:15:17.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50977 Sep 22 23:15:17.597 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
50978 Sep 22 23:15:17.597 DEBG [0] It's time to notify for 714
50979 Sep 22 23:15:17.597 INFO Completion from [0] id:714 status:true
50980 Sep 22 23:15:17.597 INFO [715/752] Repair commands completed
50981 Sep 22 23:15:17.597 INFO Pop front: ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }, state: ClientData([New, New, New]) }
50982 Sep 22 23:15:17.597 INFO Sent repair work, now wait for resp
50983 Sep 22 23:15:17.597 INFO [0] received reconcile message
50984 Sep 22 23:15:17.597 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }, state: ClientData([InProgress, New, New]) }, : downstairs
50985 Sep 22 23:15:17.597 INFO [0] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }
50986 Sep 22 23:15:17.598 INFO [1] received reconcile message
50987 Sep 22 23:15:17.598 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
50988 Sep 22 23:15:17.598 INFO [1] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }
50989 Sep 22 23:15:17.598 INFO [2] received reconcile message
50990 Sep 22 23:15:17.598 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(715), op: ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
50991 Sep 22 23:15:17.598 INFO [2] client ExtentReopen { repair_id: ReconciliationId(715), extent_id: 89 }
50992 Sep 22 23:15:17.598 DEBG 715 Reopen extent 89
50993 Sep 22 23:15:17.598 DEBG 715 Reopen extent 89
50994 Sep 22 23:15:17.599 DEBG 715 Reopen extent 89
50995 Sep 22 23:15:17.599 DEBG [2] It's time to notify for 715
50996 Sep 22 23:15:17.599 INFO Completion from [2] id:715 status:true
50997 Sep 22 23:15:17.599 INFO [716/752] Repair commands completed
50998 Sep 22 23:15:17.599 INFO Pop front: ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
50999 Sep 22 23:15:17.600 INFO Sent repair work, now wait for resp
51000 Sep 22 23:15:17.600 INFO [0] received reconcile message
51001 Sep 22 23:15:17.600 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51002 Sep 22 23:15:17.600 INFO [0] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51003 Sep 22 23:15:17.600 INFO [1] received reconcile message
51004 Sep 22 23:15:17.600 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51005 Sep 22 23:15:17.600 INFO [1] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51006 Sep 22 23:15:17.600 INFO [2] received reconcile message
51007 Sep 22 23:15:17.600 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(716), op: ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51008 Sep 22 23:15:17.600 INFO [2] client ExtentFlush { repair_id: ReconciliationId(716), extent_id: 126, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51009 Sep 22 23:15:17.600 DEBG 716 Flush extent 126 with f:2 g:2
51010 Sep 22 23:15:17.600 DEBG Flush just extent 126 with f:2 and g:2
51011 Sep 22 23:15:17.600 DEBG [1] It's time to notify for 716
51012 Sep 22 23:15:17.600 INFO Completion from [1] id:716 status:true
51013 Sep 22 23:15:17.600 INFO [717/752] Repair commands completed
51014 Sep 22 23:15:17.600 INFO Pop front: ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }, state: ClientData([New, New, New]) }
51015 Sep 22 23:15:17.600 INFO Sent repair work, now wait for resp
51016 Sep 22 23:15:17.600 INFO [0] received reconcile message
51017 Sep 22 23:15:17.600 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }, state: ClientData([InProgress, New, New]) }, : downstairs
51018 Sep 22 23:15:17.600 INFO [0] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }
51019 Sep 22 23:15:17.600 INFO [1] received reconcile message
51020 Sep 22 23:15:17.600 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51021 Sep 22 23:15:17.600 INFO [1] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }
51022 Sep 22 23:15:17.600 INFO [2] received reconcile message
51023 Sep 22 23:15:17.600 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(717), op: ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51024 Sep 22 23:15:17.600 INFO [2] client ExtentClose { repair_id: ReconciliationId(717), extent_id: 126 }
51025 Sep 22 23:15:17.600 DEBG 717 Close extent 126
51026 Sep 22 23:15:17.601 DEBG 717 Close extent 126
51027 Sep 22 23:15:17.601 DEBG 717 Close extent 126
51028 Sep 22 23:15:17.601 DEBG [2] It's time to notify for 717
51029 Sep 22 23:15:17.601 INFO Completion from [2] id:717 status:true
51030 Sep 22 23:15:17.601 INFO [718/752] Repair commands completed
51031 Sep 22 23:15:17.601 INFO Pop front: ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51032 Sep 22 23:15:17.602 INFO Sent repair work, now wait for resp
51033 Sep 22 23:15:17.602 INFO [0] received reconcile message
51034 Sep 22 23:15:17.602 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51035 Sep 22 23:15:17.602 INFO [0] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51036 Sep 22 23:15:17.602 INFO [0] Sending repair request ReconciliationId(718)
51037 Sep 22 23:15:17.602 INFO [1] received reconcile message
51038 Sep 22 23:15:17.602 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51039 Sep 22 23:15:17.602 INFO [1] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51040 Sep 22 23:15:17.602 INFO [1] No action required ReconciliationId(718)
51041 Sep 22 23:15:17.602 INFO [2] received reconcile message
51042 Sep 22 23:15:17.602 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(718), op: ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51043 Sep 22 23:15:17.602 INFO [2] client ExtentRepair { repair_id: ReconciliationId(718), extent_id: 126, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51044 Sep 22 23:15:17.602 INFO [2] No action required ReconciliationId(718)
51045 Sep 22 23:15:17.602 DEBG 718 Repair extent 126 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51046 Sep 22 23:15:17.602 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/07E.copy"
51047 Sep 22 23:15:17.664 INFO accepted connection, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51048 Sep 22 23:15:17.664 TRCE incoming request, uri: /extent/126/files, method: GET, req_id: 6ad0c804-5c72-4e65-a940-a0aa1d4918d6, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51049 Sep 22 23:15:17.665 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/126/files, method: GET, req_id: 6ad0c804-5c72-4e65-a940-a0aa1d4918d6, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51050 Sep 22 23:15:17.665 INFO eid:126 Found repair files: ["07E", "07E.db"]
51051 Sep 22 23:15:17.665 TRCE incoming request, uri: /newextent/126/data, method: GET, req_id: a32e0d22-0263-4725-91c3-a8a6b0705f1d, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51052 Sep 22 23:15:17.665 INFO request completed, latency_us: 303, response_code: 200, uri: /newextent/126/data, method: GET, req_id: a32e0d22-0263-4725-91c3-a8a6b0705f1d, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51053 Sep 22 23:15:17.670 TRCE incoming request, uri: /newextent/126/db, method: GET, req_id: ad0256fe-142c-455e-92cf-ac92ed7fbca1, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51054 Sep 22 23:15:17.671 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/126/db, method: GET, req_id: ad0256fe-142c-455e-92cf-ac92ed7fbca1, remote_addr: 127.0.0.1:34817, local_addr: 127.0.0.1:46213, task: repair
51055 Sep 22 23:15:17.672 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/07E.copy" to "/tmp/downstairs-vrx8aK6L/00/000/07E.replace"
51056 Sep 22 23:15:17.672 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51057 Sep 22 23:15:17.673 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/07E.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51058 Sep 22 23:15:17.673 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07E"
51059 Sep 22 23:15:17.673 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/07E.db"
51060 Sep 22 23:15:17.673 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51061 Sep 22 23:15:17.673 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/07E.replace" to "/tmp/downstairs-vrx8aK6L/00/000/07E.completed"
51062 Sep 22 23:15:17.673 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51063 Sep 22 23:15:17.673 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51064 Sep 22 23:15:17.673 DEBG [0] It's time to notify for 718
51065 Sep 22 23:15:17.673 INFO Completion from [0] id:718 status:true
51066 Sep 22 23:15:17.674 INFO [719/752] Repair commands completed
51067 Sep 22 23:15:17.674 INFO Pop front: ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }, state: ClientData([New, New, New]) }
51068 Sep 22 23:15:17.674 INFO Sent repair work, now wait for resp
51069 Sep 22 23:15:17.674 INFO [0] received reconcile message
51070 Sep 22 23:15:17.674 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }, state: ClientData([InProgress, New, New]) }, : downstairs
51071 Sep 22 23:15:17.674 INFO [0] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }
51072 Sep 22 23:15:17.674 INFO [1] received reconcile message
51073 Sep 22 23:15:17.674 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51074 Sep 22 23:15:17.674 INFO [1] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }
51075 Sep 22 23:15:17.674 INFO [2] received reconcile message
51076 Sep 22 23:15:17.674 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(719), op: ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51077 Sep 22 23:15:17.674 INFO [2] client ExtentReopen { repair_id: ReconciliationId(719), extent_id: 126 }
51078 Sep 22 23:15:17.674 DEBG 719 Reopen extent 126
51079 Sep 22 23:15:17.674 DEBG 719 Reopen extent 126
51080 Sep 22 23:15:17.675 DEBG 719 Reopen extent 126
51081 Sep 22 23:15:17.676 DEBG [2] It's time to notify for 719
51082 Sep 22 23:15:17.676 INFO Completion from [2] id:719 status:true
51083 Sep 22 23:15:17.676 INFO [720/752] Repair commands completed
51084 Sep 22 23:15:17.676 INFO Pop front: ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51085 Sep 22 23:15:17.676 INFO Sent repair work, now wait for resp
51086 Sep 22 23:15:17.676 INFO [0] received reconcile message
51087 Sep 22 23:15:17.676 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51088 Sep 22 23:15:17.676 INFO [0] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51089 Sep 22 23:15:17.676 INFO [1] received reconcile message
51090 Sep 22 23:15:17.676 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51091 Sep 22 23:15:17.676 INFO [1] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51092 Sep 22 23:15:17.676 INFO [2] received reconcile message
51093 Sep 22 23:15:17.676 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(720), op: ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51094 Sep 22 23:15:17.676 INFO [2] client ExtentFlush { repair_id: ReconciliationId(720), extent_id: 186, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51095 Sep 22 23:15:17.676 DEBG 720 Flush extent 186 with f:2 g:2
51096 Sep 22 23:15:17.676 DEBG Flush just extent 186 with f:2 and g:2
51097 Sep 22 23:15:17.676 DEBG [1] It's time to notify for 720
51098 Sep 22 23:15:17.676 INFO Completion from [1] id:720 status:true
51099 Sep 22 23:15:17.676 INFO [721/752] Repair commands completed
51100 Sep 22 23:15:17.676 INFO Pop front: ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }, state: ClientData([New, New, New]) }
51101 Sep 22 23:15:17.676 INFO Sent repair work, now wait for resp
51102 Sep 22 23:15:17.676 INFO [0] received reconcile message
51103 Sep 22 23:15:17.676 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }, state: ClientData([InProgress, New, New]) }, : downstairs
51104 Sep 22 23:15:17.676 INFO [0] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }
51105 Sep 22 23:15:17.676 INFO [1] received reconcile message
51106 Sep 22 23:15:17.676 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51107 Sep 22 23:15:17.676 INFO [1] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }
51108 Sep 22 23:15:17.676 INFO [2] received reconcile message
51109 Sep 22 23:15:17.676 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(721), op: ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51110 Sep 22 23:15:17.676 INFO [2] client ExtentClose { repair_id: ReconciliationId(721), extent_id: 186 }
51111 Sep 22 23:15:17.677 DEBG 721 Close extent 186
51112 Sep 22 23:15:17.677 DEBG 721 Close extent 186
51113 Sep 22 23:15:17.677 DEBG 721 Close extent 186
51114 Sep 22 23:15:17.678 DEBG [2] It's time to notify for 721
51115 Sep 22 23:15:17.678 INFO Completion from [2] id:721 status:true
51116 Sep 22 23:15:17.678 INFO [722/752] Repair commands completed
51117 Sep 22 23:15:17.678 INFO Pop front: ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51118 Sep 22 23:15:17.678 INFO Sent repair work, now wait for resp
51119 Sep 22 23:15:17.678 INFO [0] received reconcile message
51120 Sep 22 23:15:17.678 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51121 Sep 22 23:15:17.678 INFO [0] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51122 Sep 22 23:15:17.678 INFO [0] Sending repair request ReconciliationId(722)
51123 Sep 22 23:15:17.678 INFO [1] received reconcile message
51124 Sep 22 23:15:17.678 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51125 Sep 22 23:15:17.678 INFO [1] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51126 Sep 22 23:15:17.678 INFO [1] No action required ReconciliationId(722)
51127 Sep 22 23:15:17.678 INFO [2] received reconcile message
51128 Sep 22 23:15:17.678 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(722), op: ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51129 Sep 22 23:15:17.678 INFO [2] client ExtentRepair { repair_id: ReconciliationId(722), extent_id: 186, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51130 Sep 22 23:15:17.678 INFO [2] No action required ReconciliationId(722)
51131 Sep 22 23:15:17.678 DEBG 722 Repair extent 186 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51132 Sep 22 23:15:17.678 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/0BA.copy"
51133 Sep 22 23:15:17.740 INFO accepted connection, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51134 Sep 22 23:15:17.741 TRCE incoming request, uri: /extent/186/files, method: GET, req_id: 11244d44-9860-4092-be8e-9b0f980528de, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51135 Sep 22 23:15:17.741 INFO request completed, latency_us: 197, response_code: 200, uri: /extent/186/files, method: GET, req_id: 11244d44-9860-4092-be8e-9b0f980528de, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51136 Sep 22 23:15:17.741 INFO eid:186 Found repair files: ["0BA", "0BA.db"]
51137 Sep 22 23:15:17.741 TRCE incoming request, uri: /newextent/186/data, method: GET, req_id: 4a474cbb-2561-4299-909a-78ac2b46ee6e, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51138 Sep 22 23:15:17.742 INFO request completed, latency_us: 252, response_code: 200, uri: /newextent/186/data, method: GET, req_id: 4a474cbb-2561-4299-909a-78ac2b46ee6e, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51139 Sep 22 23:15:17.746 TRCE incoming request, uri: /newextent/186/db, method: GET, req_id: a0a99c1f-37da-421f-9c68-4c79fcc799a3, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51140 Sep 22 23:15:17.747 INFO request completed, latency_us: 292, response_code: 200, uri: /newextent/186/db, method: GET, req_id: a0a99c1f-37da-421f-9c68-4c79fcc799a3, remote_addr: 127.0.0.1:52544, local_addr: 127.0.0.1:46213, task: repair
51141 Sep 22 23:15:17.748 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/0BA.copy" to "/tmp/downstairs-vrx8aK6L/00/000/0BA.replace"
51142 Sep 22 23:15:17.748 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51143 Sep 22 23:15:17.749 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/0BA.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51144 Sep 22 23:15:17.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0BA"
51145 Sep 22 23:15:17.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/0BA.db"
51146 Sep 22 23:15:17.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51147 Sep 22 23:15:17.749 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/0BA.replace" to "/tmp/downstairs-vrx8aK6L/00/000/0BA.completed"
51148 Sep 22 23:15:17.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51149 Sep 22 23:15:17.749 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51150 Sep 22 23:15:17.749 DEBG [0] It's time to notify for 722
51151 Sep 22 23:15:17.749 INFO Completion from [0] id:722 status:true
51152 Sep 22 23:15:17.749 INFO [723/752] Repair commands completed
51153 Sep 22 23:15:17.749 INFO Pop front: ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }, state: ClientData([New, New, New]) }
51154 Sep 22 23:15:17.750 INFO Sent repair work, now wait for resp
51155 Sep 22 23:15:17.750 INFO [0] received reconcile message
51156 Sep 22 23:15:17.750 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }, state: ClientData([InProgress, New, New]) }, : downstairs
51157 Sep 22 23:15:17.750 INFO [0] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }
51158 Sep 22 23:15:17.750 INFO [1] received reconcile message
51159 Sep 22 23:15:17.750 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51160 Sep 22 23:15:17.750 INFO [1] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }
51161 Sep 22 23:15:17.750 INFO [2] received reconcile message
51162 Sep 22 23:15:17.750 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(723), op: ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51163 Sep 22 23:15:17.750 INFO [2] client ExtentReopen { repair_id: ReconciliationId(723), extent_id: 186 }
51164 Sep 22 23:15:17.750 DEBG 723 Reopen extent 186
51165 Sep 22 23:15:17.750 DEBG 723 Reopen extent 186
51166 Sep 22 23:15:17.751 DEBG 723 Reopen extent 186
51167 Sep 22 23:15:17.751 DEBG [2] It's time to notify for 723
51168 Sep 22 23:15:17.752 INFO Completion from [2] id:723 status:true
51169 Sep 22 23:15:17.752 INFO [724/752] Repair commands completed
51170 Sep 22 23:15:17.752 INFO Pop front: ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51171 Sep 22 23:15:17.752 INFO Sent repair work, now wait for resp
51172 Sep 22 23:15:17.752 INFO [0] received reconcile message
51173 Sep 22 23:15:17.752 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51174 Sep 22 23:15:17.752 INFO [0] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51175 Sep 22 23:15:17.752 INFO [1] received reconcile message
51176 Sep 22 23:15:17.752 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51177 Sep 22 23:15:17.752 INFO [1] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51178 Sep 22 23:15:17.752 INFO [2] received reconcile message
51179 Sep 22 23:15:17.752 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(724), op: ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51180 Sep 22 23:15:17.752 INFO [2] client ExtentFlush { repair_id: ReconciliationId(724), extent_id: 108, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51181 Sep 22 23:15:17.752 DEBG 724 Flush extent 108 with f:2 g:2
51182 Sep 22 23:15:17.752 DEBG Flush just extent 108 with f:2 and g:2
51183 Sep 22 23:15:17.752 DEBG [1] It's time to notify for 724
51184 Sep 22 23:15:17.752 INFO Completion from [1] id:724 status:true
51185 Sep 22 23:15:17.752 INFO [725/752] Repair commands completed
51186 Sep 22 23:15:17.752 INFO Pop front: ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }, state: ClientData([New, New, New]) }
51187 Sep 22 23:15:17.752 INFO Sent repair work, now wait for resp
51188 Sep 22 23:15:17.752 INFO [0] received reconcile message
51189 Sep 22 23:15:17.752 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }, state: ClientData([InProgress, New, New]) }, : downstairs
51190 Sep 22 23:15:17.752 INFO [0] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }
51191 Sep 22 23:15:17.752 INFO [1] received reconcile message
51192 Sep 22 23:15:17.752 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51193 Sep 22 23:15:17.752 INFO [1] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }
51194 Sep 22 23:15:17.752 INFO [2] received reconcile message
51195 Sep 22 23:15:17.752 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(725), op: ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51196 Sep 22 23:15:17.752 INFO [2] client ExtentClose { repair_id: ReconciliationId(725), extent_id: 108 }
51197 Sep 22 23:15:17.752 DEBG 725 Close extent 108
51198 Sep 22 23:15:17.753 DEBG 725 Close extent 108
51199 Sep 22 23:15:17.753 DEBG 725 Close extent 108
51200 Sep 22 23:15:17.753 DEBG [2] It's time to notify for 725
51201 Sep 22 23:15:17.753 INFO Completion from [2] id:725 status:true
51202 Sep 22 23:15:17.753 INFO [726/752] Repair commands completed
51203 Sep 22 23:15:17.754 INFO Pop front: ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51204 Sep 22 23:15:17.754 INFO Sent repair work, now wait for resp
51205 Sep 22 23:15:17.754 INFO [0] received reconcile message
51206 Sep 22 23:15:17.754 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51207 Sep 22 23:15:17.754 INFO [0] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51208 Sep 22 23:15:17.754 INFO [0] Sending repair request ReconciliationId(726)
51209 Sep 22 23:15:17.754 INFO [1] received reconcile message
51210 Sep 22 23:15:17.754 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51211 Sep 22 23:15:17.754 INFO [1] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51212 Sep 22 23:15:17.754 INFO [1] No action required ReconciliationId(726)
51213 Sep 22 23:15:17.754 INFO [2] received reconcile message
51214 Sep 22 23:15:17.754 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(726), op: ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51215 Sep 22 23:15:17.754 INFO [2] client ExtentRepair { repair_id: ReconciliationId(726), extent_id: 108, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51216 Sep 22 23:15:17.754 INFO [2] No action required ReconciliationId(726)
51217 Sep 22 23:15:17.754 DEBG 726 Repair extent 108 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51218 Sep 22 23:15:17.754 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06C.copy"
51219 Sep 22 23:15:17.817 INFO accepted connection, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51220 Sep 22 23:15:17.817 TRCE incoming request, uri: /extent/108/files, method: GET, req_id: 90e0864b-9dea-4701-b38f-ca91371dc188, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51221 Sep 22 23:15:17.817 INFO request completed, latency_us: 191, response_code: 200, uri: /extent/108/files, method: GET, req_id: 90e0864b-9dea-4701-b38f-ca91371dc188, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51222 Sep 22 23:15:17.817 INFO eid:108 Found repair files: ["06C", "06C.db"]
51223 Sep 22 23:15:17.818 TRCE incoming request, uri: /newextent/108/data, method: GET, req_id: c6b0710d-19cb-494e-a0b5-0f0b7808191b, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51224 Sep 22 23:15:17.818 INFO request completed, latency_us: 340, response_code: 200, uri: /newextent/108/data, method: GET, req_id: c6b0710d-19cb-494e-a0b5-0f0b7808191b, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51225 Sep 22 23:15:17.823 TRCE incoming request, uri: /newextent/108/db, method: GET, req_id: 2dee5474-5181-45cf-b510-030116f445c2, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51226 Sep 22 23:15:17.823 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/108/db, method: GET, req_id: 2dee5474-5181-45cf-b510-030116f445c2, remote_addr: 127.0.0.1:60392, local_addr: 127.0.0.1:46213, task: repair
51227 Sep 22 23:15:17.825 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06C.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06C.replace"
51228 Sep 22 23:15:17.825 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51229 Sep 22 23:15:17.825 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06C.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51230 Sep 22 23:15:17.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06C"
51231 Sep 22 23:15:17.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06C.db"
51232 Sep 22 23:15:17.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51233 Sep 22 23:15:17.826 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06C.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06C.completed"
51234 Sep 22 23:15:17.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51235 Sep 22 23:15:17.826 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51236 Sep 22 23:15:17.826 DEBG [0] It's time to notify for 726
51237 Sep 22 23:15:17.826 INFO Completion from [0] id:726 status:true
51238 Sep 22 23:15:17.826 INFO [727/752] Repair commands completed
51239 Sep 22 23:15:17.826 INFO Pop front: ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }, state: ClientData([New, New, New]) }
51240 Sep 22 23:15:17.826 INFO Sent repair work, now wait for resp
51241 Sep 22 23:15:17.826 INFO [0] received reconcile message
51242 Sep 22 23:15:17.826 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }, state: ClientData([InProgress, New, New]) }, : downstairs
51243 Sep 22 23:15:17.826 INFO [0] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }
51244 Sep 22 23:15:17.826 INFO [1] received reconcile message
51245 Sep 22 23:15:17.826 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51246 Sep 22 23:15:17.826 INFO [1] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }
51247 Sep 22 23:15:17.826 INFO [2] received reconcile message
51248 Sep 22 23:15:17.826 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(727), op: ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51249 Sep 22 23:15:17.826 INFO [2] client ExtentReopen { repair_id: ReconciliationId(727), extent_id: 108 }
51250 Sep 22 23:15:17.827 DEBG 727 Reopen extent 108
51251 Sep 22 23:15:17.827 DEBG 727 Reopen extent 108
51252 Sep 22 23:15:17.828 DEBG 727 Reopen extent 108
51253 Sep 22 23:15:17.828 DEBG [2] It's time to notify for 727
51254 Sep 22 23:15:17.828 INFO Completion from [2] id:727 status:true
51255 Sep 22 23:15:17.828 INFO [728/752] Repair commands completed
51256 Sep 22 23:15:17.828 INFO Pop front: ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51257 Sep 22 23:15:17.828 INFO Sent repair work, now wait for resp
51258 Sep 22 23:15:17.828 INFO [0] received reconcile message
51259 Sep 22 23:15:17.828 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51260 Sep 22 23:15:17.828 INFO [0] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51261 Sep 22 23:15:17.828 INFO [1] received reconcile message
51262 Sep 22 23:15:17.828 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51263 Sep 22 23:15:17.828 INFO [1] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51264 Sep 22 23:15:17.829 INFO [2] received reconcile message
51265 Sep 22 23:15:17.829 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(728), op: ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51266 Sep 22 23:15:17.829 INFO [2] client ExtentFlush { repair_id: ReconciliationId(728), extent_id: 109, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51267 Sep 22 23:15:17.829 DEBG 728 Flush extent 109 with f:2 g:2
51268 Sep 22 23:15:17.829 DEBG Flush just extent 109 with f:2 and g:2
51269 Sep 22 23:15:17.829 DEBG [1] It's time to notify for 728
51270 Sep 22 23:15:17.829 INFO Completion from [1] id:728 status:true
51271 Sep 22 23:15:17.829 INFO [729/752] Repair commands completed
51272 Sep 22 23:15:17.829 INFO Pop front: ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }, state: ClientData([New, New, New]) }
51273 Sep 22 23:15:17.829 INFO Sent repair work, now wait for resp
51274 Sep 22 23:15:17.829 INFO [0] received reconcile message
51275 Sep 22 23:15:17.829 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }, state: ClientData([InProgress, New, New]) }, : downstairs
51276 Sep 22 23:15:17.829 INFO [0] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }
51277 Sep 22 23:15:17.829 INFO [1] received reconcile message
51278 Sep 22 23:15:17.829 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51279 Sep 22 23:15:17.829 INFO [1] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }
51280 Sep 22 23:15:17.829 INFO [2] received reconcile message
51281 Sep 22 23:15:17.829 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(729), op: ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51282 Sep 22 23:15:17.829 INFO [2] client ExtentClose { repair_id: ReconciliationId(729), extent_id: 109 }
51283 Sep 22 23:15:17.829 DEBG 729 Close extent 109
51284 Sep 22 23:15:17.829 DEBG 729 Close extent 109
51285 Sep 22 23:15:17.830 DEBG 729 Close extent 109
51286 Sep 22 23:15:17.830 DEBG [2] It's time to notify for 729
51287 Sep 22 23:15:17.830 INFO Completion from [2] id:729 status:true
51288 Sep 22 23:15:17.830 INFO [730/752] Repair commands completed
51289 Sep 22 23:15:17.830 INFO Pop front: ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51290 Sep 22 23:15:17.830 INFO Sent repair work, now wait for resp
51291 Sep 22 23:15:17.830 INFO [0] received reconcile message
51292 Sep 22 23:15:17.830 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51293 Sep 22 23:15:17.830 INFO [0] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51294 Sep 22 23:15:17.830 INFO [0] Sending repair request ReconciliationId(730)
51295 Sep 22 23:15:17.830 INFO [1] received reconcile message
51296 Sep 22 23:15:17.830 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51297 Sep 22 23:15:17.830 INFO [1] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51298 Sep 22 23:15:17.831 INFO [1] No action required ReconciliationId(730)
51299 Sep 22 23:15:17.831 INFO [2] received reconcile message
51300 Sep 22 23:15:17.831 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(730), op: ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51301 Sep 22 23:15:17.831 INFO [2] client ExtentRepair { repair_id: ReconciliationId(730), extent_id: 109, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51302 Sep 22 23:15:17.831 INFO [2] No action required ReconciliationId(730)
51303 Sep 22 23:15:17.831 DEBG 730 Repair extent 109 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51304 Sep 22 23:15:17.831 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/06D.copy"
51305 Sep 22 23:15:17.894 INFO accepted connection, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51306 Sep 22 23:15:17.894 TRCE incoming request, uri: /extent/109/files, method: GET, req_id: ae0ac088-e3ba-4d0f-8c98-820b95913887, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51307 Sep 22 23:15:17.895 INFO request completed, latency_us: 193, response_code: 200, uri: /extent/109/files, method: GET, req_id: ae0ac088-e3ba-4d0f-8c98-820b95913887, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51308 Sep 22 23:15:17.895 INFO eid:109 Found repair files: ["06D", "06D.db"]
51309 Sep 22 23:15:17.895 TRCE incoming request, uri: /newextent/109/data, method: GET, req_id: ed958c89-ec9c-4ebe-bf0c-82f6f455cbff, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51310 Sep 22 23:15:17.895 INFO request completed, latency_us: 301, response_code: 200, uri: /newextent/109/data, method: GET, req_id: ed958c89-ec9c-4ebe-bf0c-82f6f455cbff, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51311 Sep 22 23:15:17.900 TRCE incoming request, uri: /newextent/109/db, method: GET, req_id: 0f231f08-3577-45af-84e1-f6a62d5a334b, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51312 Sep 22 23:15:17.901 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/109/db, method: GET, req_id: 0f231f08-3577-45af-84e1-f6a62d5a334b, remote_addr: 127.0.0.1:62743, local_addr: 127.0.0.1:46213, task: repair
51313 Sep 22 23:15:17.902 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/06D.copy" to "/tmp/downstairs-vrx8aK6L/00/000/06D.replace"
51314 Sep 22 23:15:17.902 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51315 Sep 22 23:15:17.903 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/06D.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51316 Sep 22 23:15:17.903 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06D"
51317 Sep 22 23:15:17.903 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/06D.db"
51318 Sep 22 23:15:17.903 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51319 Sep 22 23:15:17.903 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/06D.replace" to "/tmp/downstairs-vrx8aK6L/00/000/06D.completed"
51320 Sep 22 23:15:17.903 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51321 Sep 22 23:15:17.903 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51322 Sep 22 23:15:17.903 DEBG [0] It's time to notify for 730
51323 Sep 22 23:15:17.903 INFO Completion from [0] id:730 status:true
51324 Sep 22 23:15:17.903 INFO [731/752] Repair commands completed
51325 Sep 22 23:15:17.903 INFO Pop front: ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }, state: ClientData([New, New, New]) }
51326 Sep 22 23:15:17.903 INFO Sent repair work, now wait for resp
51327 Sep 22 23:15:17.903 INFO [0] received reconcile message
51328 Sep 22 23:15:17.903 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }, state: ClientData([InProgress, New, New]) }, : downstairs
51329 Sep 22 23:15:17.903 INFO [0] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }
51330 Sep 22 23:15:17.904 INFO [1] received reconcile message
51331 Sep 22 23:15:17.904 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51332 Sep 22 23:15:17.904 INFO [1] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }
51333 Sep 22 23:15:17.904 INFO [2] received reconcile message
51334 Sep 22 23:15:17.904 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(731), op: ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51335 Sep 22 23:15:17.904 INFO [2] client ExtentReopen { repair_id: ReconciliationId(731), extent_id: 109 }
51336 Sep 22 23:15:17.904 DEBG 731 Reopen extent 109
51337 Sep 22 23:15:17.904 DEBG 731 Reopen extent 109
51338 Sep 22 23:15:17.905 DEBG 731 Reopen extent 109
51339 Sep 22 23:15:17.905 DEBG [2] It's time to notify for 731
51340 Sep 22 23:15:17.906 INFO Completion from [2] id:731 status:true
51341 Sep 22 23:15:17.906 INFO [732/752] Repair commands completed
51342 Sep 22 23:15:17.906 INFO Pop front: ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51343 Sep 22 23:15:17.906 INFO Sent repair work, now wait for resp
51344 Sep 22 23:15:17.906 INFO [0] received reconcile message
51345 Sep 22 23:15:17.906 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51346 Sep 22 23:15:17.906 INFO [0] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51347 Sep 22 23:15:17.906 INFO [1] received reconcile message
51348 Sep 22 23:15:17.906 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51349 Sep 22 23:15:17.906 INFO [1] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51350 Sep 22 23:15:17.906 INFO [2] received reconcile message
51351 Sep 22 23:15:17.906 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(732), op: ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51352 Sep 22 23:15:17.906 INFO [2] client ExtentFlush { repair_id: ReconciliationId(732), extent_id: 104, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51353 Sep 22 23:15:17.906 DEBG 732 Flush extent 104 with f:2 g:2
51354 Sep 22 23:15:17.906 DEBG Flush just extent 104 with f:2 and g:2
51355 Sep 22 23:15:17.906 DEBG [1] It's time to notify for 732
51356 Sep 22 23:15:17.906 INFO Completion from [1] id:732 status:true
51357 Sep 22 23:15:17.906 INFO [733/752] Repair commands completed
51358 Sep 22 23:15:17.906 INFO Pop front: ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }, state: ClientData([New, New, New]) }
51359 Sep 22 23:15:17.906 INFO Sent repair work, now wait for resp
51360 Sep 22 23:15:17.906 INFO [0] received reconcile message
51361 Sep 22 23:15:17.906 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }, state: ClientData([InProgress, New, New]) }, : downstairs
51362 Sep 22 23:15:17.906 INFO [0] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }
51363 Sep 22 23:15:17.906 INFO [1] received reconcile message
51364 Sep 22 23:15:17.906 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51365 Sep 22 23:15:17.906 INFO [1] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }
51366 Sep 22 23:15:17.906 INFO [2] received reconcile message
51367 Sep 22 23:15:17.906 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(733), op: ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51368 Sep 22 23:15:17.906 INFO [2] client ExtentClose { repair_id: ReconciliationId(733), extent_id: 104 }
51369 Sep 22 23:15:17.907 DEBG 733 Close extent 104
51370 Sep 22 23:15:17.907 DEBG 733 Close extent 104
51371 Sep 22 23:15:17.907 DEBG 733 Close extent 104
51372 Sep 22 23:15:17.907 DEBG [2] It's time to notify for 733
51373 Sep 22 23:15:17.908 INFO Completion from [2] id:733 status:true
51374 Sep 22 23:15:17.908 INFO [734/752] Repair commands completed
51375 Sep 22 23:15:17.908 INFO Pop front: ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51376 Sep 22 23:15:17.908 INFO Sent repair work, now wait for resp
51377 Sep 22 23:15:17.908 INFO [0] received reconcile message
51378 Sep 22 23:15:17.908 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51379 Sep 22 23:15:17.908 INFO [0] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51380 Sep 22 23:15:17.908 INFO [0] Sending repair request ReconciliationId(734)
51381 Sep 22 23:15:17.908 INFO [1] received reconcile message
51382 Sep 22 23:15:17.908 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51383 Sep 22 23:15:17.908 INFO [1] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51384 Sep 22 23:15:17.908 INFO [1] No action required ReconciliationId(734)
51385 Sep 22 23:15:17.908 INFO [2] received reconcile message
51386 Sep 22 23:15:17.908 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(734), op: ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51387 Sep 22 23:15:17.908 INFO [2] client ExtentRepair { repair_id: ReconciliationId(734), extent_id: 104, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51388 Sep 22 23:15:17.908 INFO [2] No action required ReconciliationId(734)
51389 Sep 22 23:15:17.908 DEBG 734 Repair extent 104 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51390 Sep 22 23:15:17.908 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/068.copy"
51391 Sep 22 23:15:17.961 DEBG [1] Read AckReady 1092, : downstairs
51392 Sep 22 23:15:17.962 DEBG up_ds_listen was notified
51393 Sep 22 23:15:17.963 DEBG up_ds_listen process 1092
51394 Sep 22 23:15:17.963 DEBG [A] ack job 1092:93, : downstairs
51395 Sep 22 23:15:17.972 INFO accepted connection, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51396 Sep 22 23:15:17.972 TRCE incoming request, uri: /extent/104/files, method: GET, req_id: 79b6d2a3-d936-4eed-a693-b4eee9fedc57, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51397 Sep 22 23:15:17.972 INFO request completed, latency_us: 202, response_code: 200, uri: /extent/104/files, method: GET, req_id: 79b6d2a3-d936-4eed-a693-b4eee9fedc57, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51398 Sep 22 23:15:17.972 INFO eid:104 Found repair files: ["068", "068.db"]
51399 Sep 22 23:15:17.973 TRCE incoming request, uri: /newextent/104/data, method: GET, req_id: 71ea988e-ecf0-4760-b633-fee5a72f2bdc, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51400 Sep 22 23:15:17.973 INFO request completed, latency_us: 325, response_code: 200, uri: /newextent/104/data, method: GET, req_id: 71ea988e-ecf0-4760-b633-fee5a72f2bdc, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51401 Sep 22 23:15:17.978 TRCE incoming request, uri: /newextent/104/db, method: GET, req_id: 81dac8df-1e7b-4bf3-b0e5-a754a5ba78ec, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51402 Sep 22 23:15:17.978 INFO request completed, latency_us: 294, response_code: 200, uri: /newextent/104/db, method: GET, req_id: 81dac8df-1e7b-4bf3-b0e5-a754a5ba78ec, remote_addr: 127.0.0.1:49958, local_addr: 127.0.0.1:46213, task: repair
51403 Sep 22 23:15:17.979 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/068.copy" to "/tmp/downstairs-vrx8aK6L/00/000/068.replace"
51404 Sep 22 23:15:17.979 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51405 Sep 22 23:15:17.980 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/068.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51406 Sep 22 23:15:17.980 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/068"
51407 Sep 22 23:15:17.981 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/068.db"
51408 Sep 22 23:15:17.981 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51409 Sep 22 23:15:17.981 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/068.replace" to "/tmp/downstairs-vrx8aK6L/00/000/068.completed"
51410 Sep 22 23:15:17.981 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51411 Sep 22 23:15:17.981 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51412 Sep 22 23:15:17.981 DEBG [0] It's time to notify for 734
51413 Sep 22 23:15:17.981 INFO Completion from [0] id:734 status:true
51414 Sep 22 23:15:17.981 INFO [735/752] Repair commands completed
51415 Sep 22 23:15:17.981 INFO Pop front: ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }, state: ClientData([New, New, New]) }
51416 Sep 22 23:15:17.981 INFO Sent repair work, now wait for resp
51417 Sep 22 23:15:17.981 INFO [0] received reconcile message
51418 Sep 22 23:15:17.981 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }, state: ClientData([InProgress, New, New]) }, : downstairs
51419 Sep 22 23:15:17.981 INFO [0] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }
51420 Sep 22 23:15:17.981 INFO [1] received reconcile message
51421 Sep 22 23:15:17.981 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51422 Sep 22 23:15:17.981 INFO [1] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }
51423 Sep 22 23:15:17.981 INFO [2] received reconcile message
51424 Sep 22 23:15:17.981 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(735), op: ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51425 Sep 22 23:15:17.981 INFO [2] client ExtentReopen { repair_id: ReconciliationId(735), extent_id: 104 }
51426 Sep 22 23:15:17.981 DEBG 735 Reopen extent 104
51427 Sep 22 23:15:17.982 DEBG 735 Reopen extent 104
51428 Sep 22 23:15:17.983 DEBG 735 Reopen extent 104
51429 Sep 22 23:15:17.983 DEBG [2] It's time to notify for 735
51430 Sep 22 23:15:17.983 INFO Completion from [2] id:735 status:true
51431 Sep 22 23:15:17.983 INFO [736/752] Repair commands completed
51432 Sep 22 23:15:17.983 INFO Pop front: ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51433 Sep 22 23:15:17.983 INFO Sent repair work, now wait for resp
51434 Sep 22 23:15:17.983 INFO [0] received reconcile message
51435 Sep 22 23:15:17.983 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51436 Sep 22 23:15:17.983 INFO [0] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51437 Sep 22 23:15:17.983 INFO [1] received reconcile message
51438 Sep 22 23:15:17.983 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51439 Sep 22 23:15:17.983 INFO [1] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51440 Sep 22 23:15:17.983 INFO [2] received reconcile message
51441 Sep 22 23:15:17.983 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(736), op: ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51442 Sep 22 23:15:17.984 INFO [2] client ExtentFlush { repair_id: ReconciliationId(736), extent_id: 101, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51443 Sep 22 23:15:17.984 DEBG 736 Flush extent 101 with f:2 g:2
51444 Sep 22 23:15:17.984 DEBG Flush just extent 101 with f:2 and g:2
51445 Sep 22 23:15:17.984 DEBG [1] It's time to notify for 736
51446 Sep 22 23:15:17.984 INFO Completion from [1] id:736 status:true
51447 Sep 22 23:15:17.984 INFO [737/752] Repair commands completed
51448 Sep 22 23:15:17.984 INFO Pop front: ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }, state: ClientData([New, New, New]) }
51449 Sep 22 23:15:17.984 INFO Sent repair work, now wait for resp
51450 Sep 22 23:15:17.984 INFO [0] received reconcile message
51451 Sep 22 23:15:17.984 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }, state: ClientData([InProgress, New, New]) }, : downstairs
51452 Sep 22 23:15:17.984 INFO [0] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }
51453 Sep 22 23:15:17.984 INFO [1] received reconcile message
51454 Sep 22 23:15:17.984 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51455 Sep 22 23:15:17.984 INFO [1] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }
51456 Sep 22 23:15:17.984 INFO [2] received reconcile message
51457 Sep 22 23:15:17.984 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(737), op: ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51458 Sep 22 23:15:17.984 INFO [2] client ExtentClose { repair_id: ReconciliationId(737), extent_id: 101 }
51459 Sep 22 23:15:17.984 DEBG 737 Close extent 101
51460 Sep 22 23:15:17.984 DEBG 737 Close extent 101
51461 Sep 22 23:15:17.985 DEBG 737 Close extent 101
51462 Sep 22 23:15:17.985 DEBG [2] It's time to notify for 737
51463 Sep 22 23:15:17.985 INFO Completion from [2] id:737 status:true
51464 Sep 22 23:15:17.985 INFO [738/752] Repair commands completed
51465 Sep 22 23:15:17.985 INFO Pop front: ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51466 Sep 22 23:15:17.985 INFO Sent repair work, now wait for resp
51467 Sep 22 23:15:17.985 INFO [0] received reconcile message
51468 Sep 22 23:15:17.985 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51469 Sep 22 23:15:17.985 INFO [0] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51470 Sep 22 23:15:17.985 INFO [0] Sending repair request ReconciliationId(738)
51471 Sep 22 23:15:17.985 INFO [1] received reconcile message
51472 Sep 22 23:15:17.985 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51473 Sep 22 23:15:17.985 INFO [1] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51474 Sep 22 23:15:17.985 INFO [1] No action required ReconciliationId(738)
51475 Sep 22 23:15:17.986 INFO [2] received reconcile message
51476 Sep 22 23:15:17.986 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(738), op: ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51477 Sep 22 23:15:17.986 INFO [2] client ExtentRepair { repair_id: ReconciliationId(738), extent_id: 101, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51478 Sep 22 23:15:17.986 INFO [2] No action required ReconciliationId(738)
51479 Sep 22 23:15:17.986 DEBG 738 Repair extent 101 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51480 Sep 22 23:15:17.986 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/065.copy"
51481 Sep 22 23:15:18.016 DEBG up_ds_listen checked 1 jobs, back to waiting
51482 Sep 22 23:15:18.019 DEBG Flush :1093 extent_limit None deps:[JobId(1092)] res:true f:36 g:1
51483 Sep 22 23:15:18.019 INFO [lossy] sleeping 1 second
51484 Sep 22 23:15:18.025 DEBG Read :1092 deps:[JobId(1091)] res:true
51485 Sep 22 23:15:18.049 INFO accepted connection, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51486 Sep 22 23:15:18.050 TRCE incoming request, uri: /extent/101/files, method: GET, req_id: b42ac015-5ece-42e9-9c01-471c121351f3, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51487 Sep 22 23:15:18.050 INFO request completed, latency_us: 236, response_code: 200, uri: /extent/101/files, method: GET, req_id: b42ac015-5ece-42e9-9c01-471c121351f3, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51488 Sep 22 23:15:18.050 INFO eid:101 Found repair files: ["065", "065.db"]
51489 Sep 22 23:15:18.050 TRCE incoming request, uri: /newextent/101/data, method: GET, req_id: fa2a560b-2358-4083-9284-b12aac1957ce, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51490 Sep 22 23:15:18.051 INFO request completed, latency_us: 346, response_code: 200, uri: /newextent/101/data, method: GET, req_id: fa2a560b-2358-4083-9284-b12aac1957ce, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51491 Sep 22 23:15:18.056 TRCE incoming request, uri: /newextent/101/db, method: GET, req_id: fd527bda-8071-4e73-bc9e-67997877c3d5, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51492 Sep 22 23:15:18.056 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/101/db, method: GET, req_id: fd527bda-8071-4e73-bc9e-67997877c3d5, remote_addr: 127.0.0.1:39584, local_addr: 127.0.0.1:46213, task: repair
51493 Sep 22 23:15:18.057 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/065.copy" to "/tmp/downstairs-vrx8aK6L/00/000/065.replace"
51494 Sep 22 23:15:18.057 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51495 Sep 22 23:15:18.059 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/065.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51496 Sep 22 23:15:18.059 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/065"
51497 Sep 22 23:15:18.059 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/065.db"
51498 Sep 22 23:15:18.059 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51499 Sep 22 23:15:18.059 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/065.replace" to "/tmp/downstairs-vrx8aK6L/00/000/065.completed"
51500 Sep 22 23:15:18.059 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51501 Sep 22 23:15:18.059 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51502 Sep 22 23:15:18.059 DEBG [0] It's time to notify for 738
51503 Sep 22 23:15:18.059 INFO Completion from [0] id:738 status:true
51504 Sep 22 23:15:18.059 INFO [739/752] Repair commands completed
51505 Sep 22 23:15:18.059 INFO Pop front: ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }, state: ClientData([New, New, New]) }
51506 Sep 22 23:15:18.060 INFO Sent repair work, now wait for resp
51507 Sep 22 23:15:18.060 INFO [0] received reconcile message
51508 Sep 22 23:15:18.060 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }, state: ClientData([InProgress, New, New]) }, : downstairs
51509 Sep 22 23:15:18.060 INFO [0] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }
51510 Sep 22 23:15:18.060 INFO [1] received reconcile message
51511 Sep 22 23:15:18.060 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51512 Sep 22 23:15:18.060 INFO [1] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }
51513 Sep 22 23:15:18.060 INFO [2] received reconcile message
51514 Sep 22 23:15:18.060 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(739), op: ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51515 Sep 22 23:15:18.060 INFO [2] client ExtentReopen { repair_id: ReconciliationId(739), extent_id: 101 }
51516 Sep 22 23:15:18.060 DEBG 739 Reopen extent 101
51517 Sep 22 23:15:18.061 DEBG 739 Reopen extent 101
51518 Sep 22 23:15:18.061 DEBG 739 Reopen extent 101
51519 Sep 22 23:15:18.062 DEBG [2] It's time to notify for 739
51520 Sep 22 23:15:18.062 INFO Completion from [2] id:739 status:true
51521 Sep 22 23:15:18.062 INFO [740/752] Repair commands completed
51522 Sep 22 23:15:18.062 INFO Pop front: ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51523 Sep 22 23:15:18.062 INFO Sent repair work, now wait for resp
51524 Sep 22 23:15:18.062 INFO [0] received reconcile message
51525 Sep 22 23:15:18.062 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51526 Sep 22 23:15:18.062 INFO [0] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51527 Sep 22 23:15:18.062 INFO [1] received reconcile message
51528 Sep 22 23:15:18.062 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51529 Sep 22 23:15:18.062 INFO [1] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51530 Sep 22 23:15:18.062 INFO [2] received reconcile message
51531 Sep 22 23:15:18.062 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(740), op: ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51532 Sep 22 23:15:18.062 INFO [2] client ExtentFlush { repair_id: ReconciliationId(740), extent_id: 74, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51533 Sep 22 23:15:18.062 DEBG 740 Flush extent 74 with f:2 g:2
51534 Sep 22 23:15:18.062 DEBG Flush just extent 74 with f:2 and g:2
51535 Sep 22 23:15:18.062 DEBG [1] It's time to notify for 740
51536 Sep 22 23:15:18.062 INFO Completion from [1] id:740 status:true
51537 Sep 22 23:15:18.062 INFO [741/752] Repair commands completed
51538 Sep 22 23:15:18.062 INFO Pop front: ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }, state: ClientData([New, New, New]) }
51539 Sep 22 23:15:18.062 INFO Sent repair work, now wait for resp
51540 Sep 22 23:15:18.062 INFO [0] received reconcile message
51541 Sep 22 23:15:18.062 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }, state: ClientData([InProgress, New, New]) }, : downstairs
51542 Sep 22 23:15:18.063 INFO [0] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }
51543 Sep 22 23:15:18.063 INFO [1] received reconcile message
51544 Sep 22 23:15:18.063 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51545 Sep 22 23:15:18.063 INFO [1] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }
51546 Sep 22 23:15:18.063 INFO [2] received reconcile message
51547 Sep 22 23:15:18.063 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(741), op: ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51548 Sep 22 23:15:18.063 INFO [2] client ExtentClose { repair_id: ReconciliationId(741), extent_id: 74 }
51549 Sep 22 23:15:18.063 DEBG 741 Close extent 74
51550 Sep 22 23:15:18.063 DEBG 741 Close extent 74
51551 Sep 22 23:15:18.063 DEBG 741 Close extent 74
51552 Sep 22 23:15:18.064 DEBG [2] It's time to notify for 741
51553 Sep 22 23:15:18.064 INFO Completion from [2] id:741 status:true
51554 Sep 22 23:15:18.064 INFO [742/752] Repair commands completed
51555 Sep 22 23:15:18.064 INFO Pop front: ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51556 Sep 22 23:15:18.064 INFO Sent repair work, now wait for resp
51557 Sep 22 23:15:18.064 INFO [0] received reconcile message
51558 Sep 22 23:15:18.064 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51559 Sep 22 23:15:18.064 INFO [0] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51560 Sep 22 23:15:18.064 INFO [0] Sending repair request ReconciliationId(742)
51561 Sep 22 23:15:18.064 INFO [1] received reconcile message
51562 Sep 22 23:15:18.064 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51563 Sep 22 23:15:18.064 INFO [1] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51564 Sep 22 23:15:18.064 INFO [1] No action required ReconciliationId(742)
51565 Sep 22 23:15:18.064 INFO [2] received reconcile message
51566 Sep 22 23:15:18.064 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(742), op: ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51567 Sep 22 23:15:18.064 INFO [2] client ExtentRepair { repair_id: ReconciliationId(742), extent_id: 74, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51568 Sep 22 23:15:18.064 INFO [2] No action required ReconciliationId(742)
51569 Sep 22 23:15:18.064 DEBG 742 Repair extent 74 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51570 Sep 22 23:15:18.064 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/04A.copy"
51571 Sep 22 23:15:18.095 DEBG IO Read 1094 has deps [JobId(1093)]
51572 Sep 22 23:15:18.128 INFO accepted connection, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51573 Sep 22 23:15:18.129 TRCE incoming request, uri: /extent/74/files, method: GET, req_id: 21400e2a-efd3-44d8-9bb3-1729a2814135, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51574 Sep 22 23:15:18.129 INFO request completed, latency_us: 203, response_code: 200, uri: /extent/74/files, method: GET, req_id: 21400e2a-efd3-44d8-9bb3-1729a2814135, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51575 Sep 22 23:15:18.129 INFO eid:74 Found repair files: ["04A", "04A.db"]
51576 Sep 22 23:15:18.129 TRCE incoming request, uri: /newextent/74/data, method: GET, req_id: 56b803b4-cffb-4623-9b1c-0d1585917eb0, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51577 Sep 22 23:15:18.130 INFO request completed, latency_us: 255, response_code: 200, uri: /newextent/74/data, method: GET, req_id: 56b803b4-cffb-4623-9b1c-0d1585917eb0, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51578 Sep 22 23:15:18.135 TRCE incoming request, uri: /newextent/74/db, method: GET, req_id: cd138ed0-99c5-4e8f-b2b2-561e7538d650, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51579 Sep 22 23:15:18.135 INFO request completed, latency_us: 296, response_code: 200, uri: /newextent/74/db, method: GET, req_id: cd138ed0-99c5-4e8f-b2b2-561e7538d650, remote_addr: 127.0.0.1:53810, local_addr: 127.0.0.1:46213, task: repair
51580 Sep 22 23:15:18.136 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/04A.copy" to "/tmp/downstairs-vrx8aK6L/00/000/04A.replace"
51581 Sep 22 23:15:18.136 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51582 Sep 22 23:15:18.137 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/04A.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51583 Sep 22 23:15:18.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04A"
51584 Sep 22 23:15:18.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/04A.db"
51585 Sep 22 23:15:18.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51586 Sep 22 23:15:18.138 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/04A.replace" to "/tmp/downstairs-vrx8aK6L/00/000/04A.completed"
51587 Sep 22 23:15:18.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51588 Sep 22 23:15:18.138 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51589 Sep 22 23:15:18.138 DEBG [0] It's time to notify for 742
51590 Sep 22 23:15:18.138 INFO Completion from [0] id:742 status:true
51591 Sep 22 23:15:18.138 INFO [743/752] Repair commands completed
51592 Sep 22 23:15:18.138 INFO Pop front: ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }, state: ClientData([New, New, New]) }
51593 Sep 22 23:15:18.138 INFO Sent repair work, now wait for resp
51594 Sep 22 23:15:18.138 INFO [0] received reconcile message
51595 Sep 22 23:15:18.138 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }, state: ClientData([InProgress, New, New]) }, : downstairs
51596 Sep 22 23:15:18.138 INFO [0] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }
51597 Sep 22 23:15:18.138 INFO [1] received reconcile message
51598 Sep 22 23:15:18.138 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51599 Sep 22 23:15:18.138 INFO [1] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }
51600 Sep 22 23:15:18.138 INFO [2] received reconcile message
51601 Sep 22 23:15:18.138 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(743), op: ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51602 Sep 22 23:15:18.138 INFO [2] client ExtentReopen { repair_id: ReconciliationId(743), extent_id: 74 }
51603 Sep 22 23:15:18.139 DEBG 743 Reopen extent 74
51604 Sep 22 23:15:18.139 DEBG 743 Reopen extent 74
51605 Sep 22 23:15:18.140 DEBG 743 Reopen extent 74
51606 Sep 22 23:15:18.140 DEBG [2] It's time to notify for 743
51607 Sep 22 23:15:18.140 INFO Completion from [2] id:743 status:true
51608 Sep 22 23:15:18.140 INFO [744/752] Repair commands completed
51609 Sep 22 23:15:18.140 INFO Pop front: ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51610 Sep 22 23:15:18.140 INFO Sent repair work, now wait for resp
51611 Sep 22 23:15:18.140 INFO [0] received reconcile message
51612 Sep 22 23:15:18.140 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51613 Sep 22 23:15:18.140 INFO [0] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51614 Sep 22 23:15:18.140 INFO [1] received reconcile message
51615 Sep 22 23:15:18.140 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51616 Sep 22 23:15:18.141 INFO [1] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51617 Sep 22 23:15:18.141 INFO [2] received reconcile message
51618 Sep 22 23:15:18.141 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(744), op: ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51619 Sep 22 23:15:18.141 INFO [2] client ExtentFlush { repair_id: ReconciliationId(744), extent_id: 83, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51620 Sep 22 23:15:18.141 DEBG 744 Flush extent 83 with f:2 g:2
51621 Sep 22 23:15:18.141 DEBG Flush just extent 83 with f:2 and g:2
51622 Sep 22 23:15:18.141 DEBG [1] It's time to notify for 744
51623 Sep 22 23:15:18.141 INFO Completion from [1] id:744 status:true
51624 Sep 22 23:15:18.141 INFO [745/752] Repair commands completed
51625 Sep 22 23:15:18.141 INFO Pop front: ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }, state: ClientData([New, New, New]) }
51626 Sep 22 23:15:18.141 INFO Sent repair work, now wait for resp
51627 Sep 22 23:15:18.141 INFO [0] received reconcile message
51628 Sep 22 23:15:18.141 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }, state: ClientData([InProgress, New, New]) }, : downstairs
51629 Sep 22 23:15:18.141 INFO [0] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }
51630 Sep 22 23:15:18.141 INFO [1] received reconcile message
51631 Sep 22 23:15:18.141 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51632 Sep 22 23:15:18.141 INFO [1] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }
51633 Sep 22 23:15:18.141 INFO [2] received reconcile message
51634 Sep 22 23:15:18.141 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(745), op: ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51635 Sep 22 23:15:18.141 INFO [2] client ExtentClose { repair_id: ReconciliationId(745), extent_id: 83 }
51636 Sep 22 23:15:18.141 DEBG 745 Close extent 83
51637 Sep 22 23:15:18.142 DEBG 745 Close extent 83
51638 Sep 22 23:15:18.142 DEBG 745 Close extent 83
51639 Sep 22 23:15:18.142 DEBG [2] It's time to notify for 745
51640 Sep 22 23:15:18.142 INFO Completion from [2] id:745 status:true
51641 Sep 22 23:15:18.142 INFO [746/752] Repair commands completed
51642 Sep 22 23:15:18.142 INFO Pop front: ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51643 Sep 22 23:15:18.142 INFO Sent repair work, now wait for resp
51644 Sep 22 23:15:18.142 INFO [0] received reconcile message
51645 Sep 22 23:15:18.142 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51646 Sep 22 23:15:18.142 INFO [0] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51647 Sep 22 23:15:18.142 INFO [0] Sending repair request ReconciliationId(746)
51648 Sep 22 23:15:18.143 INFO [1] received reconcile message
51649 Sep 22 23:15:18.143 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51650 Sep 22 23:15:18.143 INFO [1] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51651 Sep 22 23:15:18.143 INFO [1] No action required ReconciliationId(746)
51652 Sep 22 23:15:18.143 INFO [2] received reconcile message
51653 Sep 22 23:15:18.143 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(746), op: ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51654 Sep 22 23:15:18.143 INFO [2] client ExtentRepair { repair_id: ReconciliationId(746), extent_id: 83, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51655 Sep 22 23:15:18.143 INFO [2] No action required ReconciliationId(746)
51656 Sep 22 23:15:18.143 DEBG 746 Repair extent 83 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51657 Sep 22 23:15:18.143 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/053.copy"
51658 Sep 22 23:15:18.204 INFO accepted connection, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51659 Sep 22 23:15:18.204 TRCE incoming request, uri: /extent/83/files, method: GET, req_id: a7b795f1-964c-4238-b9d0-33b73a84f4ae, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51660 Sep 22 23:15:18.204 INFO request completed, latency_us: 198, response_code: 200, uri: /extent/83/files, method: GET, req_id: a7b795f1-964c-4238-b9d0-33b73a84f4ae, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51661 Sep 22 23:15:18.204 INFO eid:83 Found repair files: ["053", "053.db"]
51662 Sep 22 23:15:18.204 TRCE incoming request, uri: /newextent/83/data, method: GET, req_id: 251d9f17-bc9b-4f2a-87db-66c05c6787e5, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51663 Sep 22 23:15:18.205 INFO request completed, latency_us: 251, response_code: 200, uri: /newextent/83/data, method: GET, req_id: 251d9f17-bc9b-4f2a-87db-66c05c6787e5, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51664 Sep 22 23:15:18.210 TRCE incoming request, uri: /newextent/83/db, method: GET, req_id: 797bb5db-ea30-4d14-a905-45f53c265bc8, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51665 Sep 22 23:15:18.210 INFO request completed, latency_us: 293, response_code: 200, uri: /newextent/83/db, method: GET, req_id: 797bb5db-ea30-4d14-a905-45f53c265bc8, remote_addr: 127.0.0.1:41622, local_addr: 127.0.0.1:46213, task: repair
51666 Sep 22 23:15:18.211 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/053.copy" to "/tmp/downstairs-vrx8aK6L/00/000/053.replace"
51667 Sep 22 23:15:18.211 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51668 Sep 22 23:15:18.212 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/053.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51669 Sep 22 23:15:18.212 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/053"
51670 Sep 22 23:15:18.212 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/053.db"
51671 Sep 22 23:15:18.212 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51672 Sep 22 23:15:18.212 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/053.replace" to "/tmp/downstairs-vrx8aK6L/00/000/053.completed"
51673 Sep 22 23:15:18.212 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51674 Sep 22 23:15:18.212 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51675 Sep 22 23:15:18.213 DEBG [0] It's time to notify for 746
51676 Sep 22 23:15:18.213 INFO Completion from [0] id:746 status:true
51677 Sep 22 23:15:18.213 INFO [747/752] Repair commands completed
51678 Sep 22 23:15:18.213 INFO Pop front: ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }, state: ClientData([New, New, New]) }
51679 Sep 22 23:15:18.213 INFO Sent repair work, now wait for resp
51680 Sep 22 23:15:18.213 INFO [0] received reconcile message
51681 Sep 22 23:15:18.213 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }, state: ClientData([InProgress, New, New]) }, : downstairs
51682 Sep 22 23:15:18.213 INFO [0] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }
51683 Sep 22 23:15:18.213 INFO [1] received reconcile message
51684 Sep 22 23:15:18.213 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51685 Sep 22 23:15:18.213 INFO [1] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }
51686 Sep 22 23:15:18.213 INFO [2] received reconcile message
51687 Sep 22 23:15:18.213 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(747), op: ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51688 Sep 22 23:15:18.213 INFO [2] client ExtentReopen { repair_id: ReconciliationId(747), extent_id: 83 }
51689 Sep 22 23:15:18.213 DEBG 747 Reopen extent 83
51690 Sep 22 23:15:18.214 DEBG 747 Reopen extent 83
51691 Sep 22 23:15:18.214 DEBG 747 Reopen extent 83
51692 Sep 22 23:15:18.215 DEBG [2] It's time to notify for 747
51693 Sep 22 23:15:18.215 INFO Completion from [2] id:747 status:true
51694 Sep 22 23:15:18.215 INFO [748/752] Repair commands completed
51695 Sep 22 23:15:18.215 INFO Pop front: ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([New, New, New]) }
51696 Sep 22 23:15:18.215 INFO Sent repair work, now wait for resp
51697 Sep 22 23:15:18.215 INFO [0] received reconcile message
51698 Sep 22 23:15:18.215 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([InProgress, New, New]) }, : downstairs
51699 Sep 22 23:15:18.215 INFO [0] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51700 Sep 22 23:15:18.215 INFO [1] received reconcile message
51701 Sep 22 23:15:18.215 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, New]) }, : downstairs
51702 Sep 22 23:15:18.215 INFO [1] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51703 Sep 22 23:15:18.215 INFO [2] received reconcile message
51704 Sep 22 23:15:18.215 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(748), op: ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }, state: ClientData([Done, InProgress, InProgress]) }, : downstairs
51705 Sep 22 23:15:18.215 INFO [2] client ExtentFlush { repair_id: ReconciliationId(748), extent_id: 119, client_id: ClientId(1), flush_number: 2, gen_number: 2 }
51706 Sep 22 23:15:18.215 DEBG 748 Flush extent 119 with f:2 g:2
51707 Sep 22 23:15:18.215 DEBG Flush just extent 119 with f:2 and g:2
51708 Sep 22 23:15:18.215 DEBG [1] It's time to notify for 748
51709 Sep 22 23:15:18.215 INFO Completion from [1] id:748 status:true
51710 Sep 22 23:15:18.215 INFO [749/752] Repair commands completed
51711 Sep 22 23:15:18.216 INFO Pop front: ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }, state: ClientData([New, New, New]) }
51712 Sep 22 23:15:18.216 INFO Sent repair work, now wait for resp
51713 Sep 22 23:15:18.216 INFO [0] received reconcile message
51714 Sep 22 23:15:18.216 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }, state: ClientData([InProgress, New, New]) }, : downstairs
51715 Sep 22 23:15:18.216 INFO [0] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }
51716 Sep 22 23:15:18.216 INFO [1] received reconcile message
51717 Sep 22 23:15:18.216 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51718 Sep 22 23:15:18.216 INFO [1] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }
51719 Sep 22 23:15:18.216 INFO [2] received reconcile message
51720 Sep 22 23:15:18.216 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(749), op: ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51721 Sep 22 23:15:18.216 INFO [2] client ExtentClose { repair_id: ReconciliationId(749), extent_id: 119 }
51722 Sep 22 23:15:18.216 DEBG 749 Close extent 119
51723 Sep 22 23:15:18.216 DEBG 749 Close extent 119
51724 Sep 22 23:15:18.216 DEBG 749 Close extent 119
51725 Sep 22 23:15:18.217 DEBG [2] It's time to notify for 749
51726 Sep 22 23:15:18.217 INFO Completion from [2] id:749 status:true
51727 Sep 22 23:15:18.217 INFO [750/752] Repair commands completed
51728 Sep 22 23:15:18.217 INFO Pop front: ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([New, New, New]) }
51729 Sep 22 23:15:18.217 INFO Sent repair work, now wait for resp
51730 Sep 22 23:15:18.217 INFO [0] received reconcile message
51731 Sep 22 23:15:18.217 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, New, New]) }, : downstairs
51732 Sep 22 23:15:18.217 INFO [0] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51733 Sep 22 23:15:18.217 INFO [0] Sending repair request ReconciliationId(750)
51734 Sep 22 23:15:18.217 INFO [1] received reconcile message
51735 Sep 22 23:15:18.217 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51736 Sep 22 23:15:18.217 INFO [1] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51737 Sep 22 23:15:18.217 INFO [1] No action required ReconciliationId(750)
51738 Sep 22 23:15:18.217 INFO [2] received reconcile message
51739 Sep 22 23:15:18.217 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(750), op: ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }, state: ClientData([InProgress, Done, InProgress]) }, : downstairs
51740 Sep 22 23:15:18.217 INFO [2] client ExtentRepair { repair_id: ReconciliationId(750), extent_id: 119, source_client_id: ClientId(1), source_repair_address: 127.0.0.1:46213, dest_clients: [ClientId(0)] }
51741 Sep 22 23:15:18.217 INFO [2] No action required ReconciliationId(750)
51742 Sep 22 23:15:18.217 DEBG 750 Repair extent 119 source:[1] 127.0.0.1:46213 dest:[ClientId(0)]
51743 Sep 22 23:15:18.217 INFO Created copy dir "/tmp/downstairs-vrx8aK6L/00/000/077.copy"
51744 Sep 22 23:15:18.280 INFO accepted connection, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51745 Sep 22 23:15:18.280 TRCE incoming request, uri: /extent/119/files, method: GET, req_id: 62beac8e-278f-4ac3-b123-b86e144a2a78, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51746 Sep 22 23:15:18.281 INFO request completed, latency_us: 190, response_code: 200, uri: /extent/119/files, method: GET, req_id: 62beac8e-278f-4ac3-b123-b86e144a2a78, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51747 Sep 22 23:15:18.281 INFO eid:119 Found repair files: ["077", "077.db"]
51748 Sep 22 23:15:18.281 TRCE incoming request, uri: /newextent/119/data, method: GET, req_id: 52a95044-6a92-4278-b72e-80ab157b9d18, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51749 Sep 22 23:15:18.281 INFO request completed, latency_us: 299, response_code: 200, uri: /newextent/119/data, method: GET, req_id: 52a95044-6a92-4278-b72e-80ab157b9d18, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51750 Sep 22 23:15:18.286 TRCE incoming request, uri: /newextent/119/db, method: GET, req_id: 4bae20a0-7f94-442c-be90-91fe1ddbc59b, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51751 Sep 22 23:15:18.287 INFO request completed, latency_us: 291, response_code: 200, uri: /newextent/119/db, method: GET, req_id: 4bae20a0-7f94-442c-be90-91fe1ddbc59b, remote_addr: 127.0.0.1:44274, local_addr: 127.0.0.1:46213, task: repair
51752 Sep 22 23:15:18.288 INFO Repair files downloaded, move directory "/tmp/downstairs-vrx8aK6L/00/000/077.copy" to "/tmp/downstairs-vrx8aK6L/00/000/077.replace"
51753 Sep 22 23:15:18.288 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51754 Sep 22 23:15:18.289 INFO Copy files from "/tmp/downstairs-vrx8aK6L/00/000/077.replace" in "/tmp/downstairs-vrx8aK6L/00/000"
51755 Sep 22 23:15:18.289 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/077"
51756 Sep 22 23:15:18.289 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000/077.db"
51757 Sep 22 23:15:18.289 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51758 Sep 22 23:15:18.289 INFO Move directory "/tmp/downstairs-vrx8aK6L/00/000/077.replace" to "/tmp/downstairs-vrx8aK6L/00/000/077.completed"
51759 Sep 22 23:15:18.289 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51760 Sep 22 23:15:18.289 DEBG fsync completed for: "/tmp/downstairs-vrx8aK6L/00/000"
51761 Sep 22 23:15:18.289 DEBG [0] It's time to notify for 750
51762 Sep 22 23:15:18.290 INFO Completion from [0] id:750 status:true
51763 Sep 22 23:15:18.290 INFO [751/752] Repair commands completed
51764 Sep 22 23:15:18.290 INFO Pop front: ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }, state: ClientData([New, New, New]) }
51765 Sep 22 23:15:18.290 INFO Sent repair work, now wait for resp
51766 Sep 22 23:15:18.290 INFO [0] received reconcile message
51767 Sep 22 23:15:18.290 INFO [0] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }, state: ClientData([InProgress, New, New]) }, : downstairs
51768 Sep 22 23:15:18.290 INFO [0] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }
51769 Sep 22 23:15:18.290 INFO [1] received reconcile message
51770 Sep 22 23:15:18.290 INFO [1] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }, state: ClientData([InProgress, InProgress, New]) }, : downstairs
51771 Sep 22 23:15:18.290 INFO [1] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }
51772 Sep 22 23:15:18.290 INFO [2] received reconcile message
51773 Sep 22 23:15:18.290 INFO [2] rep_in_progress: return ReconcileIO { id: ReconciliationId(751), op: ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }, state: ClientData([InProgress, InProgress, InProgress]) }, : downstairs
51774 Sep 22 23:15:18.290 INFO [2] client ExtentReopen { repair_id: ReconciliationId(751), extent_id: 119 }
51775 Sep 22 23:15:18.290 DEBG 751 Reopen extent 119
51776 Sep 22 23:15:18.291 DEBG 751 Reopen extent 119
51777 Sep 22 23:15:18.291 DEBG 751 Reopen extent 119
51778 Sep 22 23:15:18.292 DEBG [2] It's time to notify for 751
51779 Sep 22 23:15:18.292 INFO Completion from [2] id:751 status:true
51780 Sep 22 23:15:18.292 INFO [752/752] Repair commands completed
51781 Sep 22 23:15:18.292 INFO 188 extents repaired in 14.627 ave:0.0778
51782 Sep 22 23:15:18.292 INFO All required repair work is completed
51783 Sep 22 23:15:18.292 INFO Set Downstairs and Upstairs active after repairs
51784 Sep 22 23:15:18.292 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 is now active with session: 2cf90053-4dbd-4cc4-8468-9d35f085e47a
51785 Sep 22 23:15:18.292 INFO Notify all downstairs, region set compare is done.
51786 Sep 22 23:15:18.292 INFO Set check for repair
51787 Sep 22 23:15:18.292 INFO [1] 127.0.0.1:64149 task reports connection:true
51788 Sep 22 23:15:18.292 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Active Active Active
51789 Sep 22 23:15:18.292 INFO Set check for repair
51790 Sep 22 23:15:18.292 INFO [2] 127.0.0.1:58182 task reports connection:true
51791 Sep 22 23:15:18.292 INFO 6e5c0f70-fd56-4280-9d20-71288e488216 Active Active Active
51792 Sep 22 23:15:18.292 INFO Set check for repair
51793 Sep 22 23:15:18.292 INFO [0] received reconcile message
51794 Sep 22 23:15:18.292 INFO [0] All repairs completed, exit
51795 Sep 22 23:15:18.292 INFO [0] Starts cmd_loop
51796 Sep 22 23:15:18.292 INFO [1] received reconcile message
51797 Sep 22 23:15:18.292 INFO [1] All repairs completed, exit
51798 Sep 22 23:15:18.292 INFO [1] Starts cmd_loop
51799 Sep 22 23:15:18.292 INFO [2] received reconcile message
51800 Sep 22 23:15:18.292 INFO [2] All repairs completed, exit
51801 Sep 22 23:15:18.292 INFO [2] Starts cmd_loop
51802 The guest has finished waiting for activation
51803 Sep 22 23:15:18.311 DEBG IO Read 1000 has deps []
51804 Sep 22 23:15:18.433 DEBG up_ds_listen was notified
51805 Sep 22 23:15:18.433 DEBG up_ds_listen process 1093
51806 Sep 22 23:15:18.433 DEBG [A] ack job 1093:94, : downstairs
51807 Sep 22 23:15:18.433 DEBG up_ds_listen checked 1 jobs, back to waiting
51808 Sep 22 23:15:18.440 DEBG IO Flush 1095 has deps [JobId(1094), JobId(1093)]
51809 Sep 22 23:15:18.443 DEBG Flush :1093 extent_limit None deps:[JobId(1092)] res:true f:36 g:1
51810 Sep 22 23:15:18.449 DEBG Read :1094 deps:[JobId(1093)] res:true
51811 Sep 22 23:15:18.850 DEBG [rc] retire 1093 clears [JobId(1092), JobId(1093)], : downstairs
51812 Sep 22 23:15:18.857 DEBG Read :1094 deps:[JobId(1093)] res:true
51813 Sep 22 23:15:18.881 DEBG Flush :1095 extent_limit None deps:[JobId(1094), JobId(1093)] res:true f:37 g:1
51814 Sep 22 23:15:18.881 INFO [lossy] sleeping 1 second
51815 Sep 22 23:15:19.263 DEBG [2] Read AckReady 1094, : downstairs
51816 Sep 22 23:15:19.264 DEBG up_ds_listen was notified
51817 Sep 22 23:15:19.264 DEBG up_ds_listen process 1094
51818 Sep 22 23:15:19.264 DEBG [A] ack job 1094:95, : downstairs
51819 Sep 22 23:15:19.318 DEBG up_ds_listen checked 1 jobs, back to waiting
51820 Sep 22 23:15:19.320 WARN returning error on flush!
51821 Sep 22 23:15:19.320 DEBG Flush :1095 extent_limit None deps:[JobId(1094), JobId(1093)] res:false f:37 g:1
51822 Sep 22 23:15:19.320 INFO [lossy] skipping 1095
51823 Sep 22 23:15:19.320 DEBG Flush :1095 extent_limit None deps:[JobId(1094), JobId(1093)] res:true f:37 g:1
51824 Sep 22 23:15:19.320 INFO [lossy] sleeping 1 second
51825 Sep 22 23:15:19.340 DEBG Read :1094 deps:[JobId(1093)] res:true
51826 Sep 22 23:15:19.507 DEBG IO Read 1096 has deps [JobId(1095)]
51827 Sep 22 23:15:19.877 ERRO [1] job id 1095 saw error GenericError("test error")
51828 Sep 22 23:15:19.882 DEBG up_ds_listen was notified
51829 Sep 22 23:15:19.882 DEBG up_ds_listen process 1095
51830 Sep 22 23:15:19.882 DEBG [A] ack job 1095:96, : downstairs
51831 Sep 22 23:15:19.882 DEBG up_ds_listen checked 1 jobs, back to waiting
51832 Sep 22 23:15:19.889 DEBG IO Flush 1097 has deps [JobId(1096), JobId(1095)]
51833 Sep 22 23:15:19.889 INFO [lossy] sleeping 1 second
51834 Sep 22 23:15:19.892 INFO [lossy] skipping 1095
51835 Sep 22 23:15:19.892 DEBG Flush :1095 extent_limit None deps:[JobId(1094), JobId(1093)] res:true f:37 g:1
51836 Sep 22 23:15:19.910 DEBG Read :1096 deps:[JobId(1095)] res:true
51837 Sep 22 23:15:20.450 DEBG [rc] retire 1095 clears [JobId(1094), JobId(1095)], : downstairs
51838 Sep 22 23:15:20.451 WARN returning error on read!
51839 Sep 22 23:15:20.451 DEBG Read :1096 deps:[JobId(1095)] res:false
51840 Sep 22 23:15:20.457 DEBG Read :1096 deps:[JobId(1095)] res:true
51841 Sep 22 23:15:20.479 ERRO [1] job id 1096 saw error GenericError("test error")
51842 Sep 22 23:15:20.481 INFO [lossy] skipping 1097
51843 Sep 22 23:15:20.481 INFO [lossy] skipping 1097
51844 Sep 22 23:15:20.481 INFO [lossy] skipping 1097
51845 Sep 22 23:15:20.481 INFO [lossy] skipping 1097
51846 Sep 22 23:15:20.481 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:true f:38 g:1
51847 Sep 22 23:15:20.481 INFO [lossy] sleeping 1 second
51848 Sep 22 23:15:21.054 DEBG Read :1000 deps:[] res:true
51849 Sep 22 23:15:21.055 DEBG [0] Read AckReady 1096, : downstairs
51850 Sep 22 23:15:21.056 DEBG up_ds_listen was notified
51851 Sep 22 23:15:21.056 DEBG up_ds_listen process 1096
51852 Sep 22 23:15:21.056 DEBG [A] ack job 1096:97, : downstairs
51853 Sep 22 23:15:21.109 DEBG up_ds_listen checked 1 jobs, back to waiting
51854 Sep 22 23:15:21.111 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:true f:38 g:1
51855 Sep 22 23:15:21.111 INFO [lossy] sleeping 1 second
51856 Sep 22 23:15:21.118 DEBG Read :1096 deps:[JobId(1095)] res:true
51857 Sep 22 23:15:21.188 DEBG IO Read 1098 has deps [JobId(1097)]
51858 Sep 22 23:15:21.525 DEBG up_ds_listen was notified
51859 Sep 22 23:15:21.525 DEBG up_ds_listen process 1097
51860 Sep 22 23:15:21.525 DEBG [A] ack job 1097:98, : downstairs
51861 Sep 22 23:15:21.525 DEBG up_ds_listen checked 1 jobs, back to waiting
51862 Sep 22 23:15:21.532 INFO [lossy] sleeping 1 second
51863 Sep 22 23:15:21.534 WARN returning error on flush!
51864 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:false f:38 g:1
51865 Sep 22 23:15:21.534 WARN returning error on flush!
51866 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:false f:38 g:1
51867 Sep 22 23:15:21.534 INFO [lossy] skipping 1097
51868 Sep 22 23:15:21.534 INFO [lossy] skipping 1097
51869 Sep 22 23:15:21.534 INFO [lossy] skipping 1097
51870 Sep 22 23:15:21.534 WARN returning error on flush!
51871 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:false f:38 g:1
51872 Sep 22 23:15:21.534 WARN returning error on flush!
51873 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:false f:38 g:1
51874 Sep 22 23:15:21.534 WARN returning error on flush!
51875 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:false f:38 g:1
51876 Sep 22 23:15:21.534 DEBG Flush :1097 extent_limit None deps:[JobId(1096), JobId(1095)] res:true f:38 g:1
51877 Sep 22 23:15:21.535 WARN returning error on read!
51878 Sep 22 23:15:21.535 DEBG Read :1098 deps:[JobId(1097)] res:false
51879 Sep 22 23:15:21.541 DEBG Read :1098 deps:[JobId(1097)] res:true
51880 Sep 22 23:15:21.943 ERRO [2] job id 1097 saw error GenericError("test error")
51881 Sep 22 23:15:21.943 ERRO [2] job id 1097 saw error GenericError("test error")
51882 Sep 22 23:15:21.943 ERRO [2] job id 1097 saw error GenericError("test error")
51883 Sep 22 23:15:21.943 ERRO [2] job id 1097 saw error GenericError("test error")
51884 Sep 22 23:15:21.943 ERRO [2] job id 1097 saw error GenericError("test error")
51885 Sep 22 23:15:21.943 DEBG [rc] retire 1097 clears [JobId(1096), JobId(1097)], : downstairs
51886 Sep 22 23:15:21.943 ERRO [2] job id 1098 saw error GenericError("test error")
51887 Sep 22 23:15:21.943 DEBG IO Flush 1099 has deps [JobId(1098)]
51888 Sep 22 23:15:21.946 WARN returning error on flush!
51889 Sep 22 23:15:21.946 DEBG Flush :1099 extent_limit None deps:[JobId(1098)] res:false f:39 g:1
51890 Sep 22 23:15:21.946 DEBG Flush :1099 extent_limit None deps:[JobId(1098)] res:true f:39 g:1
51891 Sep 22 23:15:21.946 INFO [lossy] sleeping 1 second
51892 Sep 22 23:15:22.322 DEBG [2] Read AckReady 1098, : downstairs
51893 Sep 22 23:15:22.323 ERRO [2] job id 1099 saw error GenericError("test error")
51894 Sep 22 23:15:22.323 DEBG up_ds_listen was notified
51895 Sep 22 23:15:22.324 DEBG up_ds_listen process 1098
51896 Sep 22 23:15:22.324 DEBG [A] ack job 1098:99, : downstairs
51897 Sep 22 23:15:22.376 DEBG up_ds_listen checked 1 jobs, back to waiting
51898 Sep 22 23:15:22.423 DEBG Read :1098 deps:[JobId(1097)] res:true
51899 Sep 22 23:15:22.445 DEBG IO Read 1100 has deps [JobId(1099)]
51900 Sep 22 23:15:22.445 DEBG IO Flush 1101 has deps [JobId(1100), JobId(1099)]
51901 Sep 22 23:15:22.459 DEBG Flush :1099 extent_limit None deps:[JobId(1098)] res:true f:39 g:1
51902 Sep 22 23:15:22.459 INFO [lossy] skipping 1100
51903 Sep 22 23:15:22.459 INFO [lossy] skipping 1100
51904 Sep 22 23:15:22.480 DEBG Read :1100 deps:[JobId(1099)] res:true
51905 Sep 22 23:15:22.639 DEBG Read :1000 deps:[] res:true
51906 Sep 22 23:15:23.017 DEBG up_ds_listen was notified
51907 Sep 22 23:15:23.017 DEBG up_ds_listen process 1099
51908 Sep 22 23:15:23.018 DEBG [A] ack job 1099:100, : downstairs
51909 Sep 22 23:15:23.018 DEBG up_ds_listen checked 1 jobs, back to waiting
51910 Sep 22 23:15:23.024 DEBG Read :1098 deps:[JobId(1097)] res:true
51911 Sep 22 23:15:23.045 INFO [lossy] skipping 1100
51912 Sep 22 23:15:23.045 INFO [lossy] skipping 1101
51913 Sep 22 23:15:23.051 DEBG Read :1100 deps:[JobId(1099)] res:true
51914 Sep 22 23:15:23.075 INFO [lossy] skipping 1101
51915 Sep 22 23:15:23.075 WARN returning error on flush!
51916 Sep 22 23:15:23.075 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:false f:40 g:1
51917 Sep 22 23:15:23.075 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:true f:40 g:1
51918 Sep 22 23:15:23.075 INFO [lossy] sleeping 1 second
51919 Sep 22 23:15:23.480 DEBG [1] Read AckReady 1100, : downstairs
51920 Sep 22 23:15:23.481 ERRO [1] job id 1101 saw error GenericError("test error")
51921 Sep 22 23:15:23.481 DEBG up_ds_listen was notified
51922 Sep 22 23:15:23.481 DEBG up_ds_listen process 1100
51923 Sep 22 23:15:23.481 DEBG [A] ack job 1100:101, : downstairs
51924 Sep 22 23:15:23.534 DEBG up_ds_listen checked 1 jobs, back to waiting
51925 Sep 22 23:15:23.536 WARN returning error on flush!
51926 Sep 22 23:15:23.536 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:false f:40 g:1
51927 Sep 22 23:15:23.536 INFO [lossy] skipping 1101
51928 Sep 22 23:15:23.536 INFO [lossy] skipping 1101
51929 Sep 22 23:15:23.536 INFO [lossy] skipping 1101
51930 Sep 22 23:15:23.536 WARN returning error on flush!
51931 Sep 22 23:15:23.536 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:false f:40 g:1
51932 Sep 22 23:15:23.536 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:true f:40 g:1
51933 Sep 22 23:15:23.536 INFO [lossy] sleeping 1 second
51934 Sep 22 23:15:23.538 DEBG Flush :1099 extent_limit None deps:[JobId(1098)] res:true f:39 g:1
51935 Sep 22 23:15:23.538 INFO [lossy] skipping 1100
51936 Sep 22 23:15:23.544 DEBG Read :1100 deps:[JobId(1099)] res:true
51937 Sep 22 23:15:23.685 DEBG IO Read 1102 has deps [JobId(1101)]
51938 Sep 22 23:15:23.685 DEBG IO Flush 1103 has deps [JobId(1102), JobId(1101)]
51939 Sep 22 23:15:24.086 DEBG [rc] retire 1099 clears [JobId(1098), JobId(1099)], : downstairs
51940 Sep 22 23:15:24.088 DEBG Read :1000 deps:[] res:true
51941 Sep 22 23:15:24.416 ERRO [2] job id 1101 saw error GenericError("test error")
51942 Sep 22 23:15:24.416 ERRO [2] job id 1101 saw error GenericError("test error")
51943 Sep 22 23:15:24.422 DEBG up_ds_listen was notified
51944 Sep 22 23:15:24.422 DEBG up_ds_listen process 1101
51945 Sep 22 23:15:24.422 DEBG [A] ack job 1101:102, : downstairs
51946 Sep 22 23:15:24.422 DEBG up_ds_listen checked 1 jobs, back to waiting
51947 Sep 22 23:15:24.429 INFO [lossy] sleeping 1 second
51948 Sep 22 23:15:24.431 DEBG Flush :1101 extent_limit None deps:[JobId(1100), JobId(1099)] res:true f:40 g:1
51949 Sep 22 23:15:24.437 DEBG Read :1102 deps:[JobId(1101)] res:true
51950 Sep 22 23:15:24.838 DEBG [rc] retire 1101 clears [JobId(1100), JobId(1101)], : downstairs
51951 Sep 22 23:15:24.838 INFO [lossy] skipping 1102
51952 Sep 22 23:15:24.838 INFO [lossy] skipping 1102
51953 Sep 22 23:15:24.838 WARN returning error on read!
51954 Sep 22 23:15:24.838 DEBG Read :1102 deps:[JobId(1101)] res:false
51955 Sep 22 23:15:24.844 DEBG Read :1102 deps:[JobId(1101)] res:true
51956 Sep 22 23:15:24.866 ERRO [2] job id 1102 saw error GenericError("test error")
51957 Sep 22 23:15:24.868 WARN returning error on flush!
51958 Sep 22 23:15:24.868 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:false f:41 g:1
51959 Sep 22 23:15:24.868 WARN returning error on flush!
51960 Sep 22 23:15:24.868 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:false f:41 g:1
51961 Sep 22 23:15:24.868 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:true f:41 g:1
51962 Sep 22 23:15:24.868 INFO [lossy] sleeping 1 second
51963 Sep 22 23:15:24.869 WARN returning error on flush!
51964 Sep 22 23:15:24.869 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:false f:41 g:1
51965 Sep 22 23:15:24.869 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:true f:41 g:1
51966 Sep 22 23:15:24.869 INFO [lossy] sleeping 1 second
51967 Sep 22 23:15:25.294 DEBG [0] Read AckReady 1102, : downstairs
51968 Sep 22 23:15:25.294 ERRO [0] job id 1103 saw error GenericError("test error")
51969 Sep 22 23:15:25.343 DEBG IO Flush 1001 has deps [JobId(1000)]
51970 Sep 22 23:15:25.343 INFO Checking if live repair is needed
51971 Sep 22 23:15:25.343 INFO No Live Repair required at this time
51972 Sep 22 23:15:25.517 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
51973 Sep 22 23:15:25.579 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
51974 Sep 22 23:15:25.623 DEBG [2] Read already AckReady 1102, : downstairs
51975 Sep 22 23:15:25.626 ERRO [2] job id 1103 saw error GenericError("test error")
51976 Sep 22 23:15:25.626 ERRO [2] job id 1103 saw error GenericError("test error")
51977 Sep 22 23:15:25.626 DEBG up_ds_listen was notified
51978 Sep 22 23:15:25.626 DEBG up_ds_listen process 1102
51979 Sep 22 23:15:25.626 DEBG [A] ack job 1102:103, : downstairs
51980 Sep 22 23:15:25.669 DEBG Flush :1001 extent_limit None deps:[JobId(1000)] res:true f:2 g:2
51981 Sep 22 23:15:25.683 DEBG up_ds_listen process 1103
51982 Sep 22 23:15:25.683 DEBG [A] ack job 1103:104, : downstairs
51983 Sep 22 23:15:25.683 DEBG up_ds_listen checked 2 jobs, back to waiting
51984 Sep 22 23:15:25.683 DEBG up_ds_listen was notified
51985 Sep 22 23:15:25.683 DEBG up_ds_listen checked 0 jobs, back to waiting
51986 Sep 22 23:15:25.684 INFO [lossy] skipping 1102
51987 Sep 22 23:15:25.690 DEBG Read :1102 deps:[JobId(1101)] res:true
51988 Sep 22 23:15:25.711 DEBG IO Read 1104 has deps [JobId(1103)]
51989 Sep 22 23:15:25.725 DEBG Flush :1103 extent_limit None deps:[JobId(1102), JobId(1101)] res:true f:41 g:1
51990 Sep 22 23:15:25.731 DEBG Read :1104 deps:[JobId(1103)] res:true
51991 Sep 22 23:15:26.129 DEBG [rc] retire 1103 clears [JobId(1102), JobId(1103)], : downstairs
51992 Sep 22 23:15:26.129 INFO [lossy] skipping 1104
51993 Sep 22 23:15:26.129 WARN returning error on read!
51994 Sep 22 23:15:26.129 DEBG Read :1104 deps:[JobId(1103)] res:false
51995 Sep 22 23:15:26.129 WARN returning error on read!
51996 Sep 22 23:15:26.129 DEBG Read :1104 deps:[JobId(1103)] res:false
51997 Sep 22 23:15:26.135 DEBG Read :1104 deps:[JobId(1103)] res:true
51998 Sep 22 23:15:26.162 DEBG Read :1104 deps:[JobId(1103)] res:true
51999 Sep 22 23:15:26.184 ERRO [2] job id 1104 saw error GenericError("test error")
52000 Sep 22 23:15:26.184 ERRO [2] job id 1104 saw error GenericError("test error")
52001 Sep 22 23:15:26.185 DEBG IO Flush 1105 has deps [JobId(1104)]
52002 Sep 22 23:15:26.187 INFO [lossy] skipping 1105
52003 Sep 22 23:15:26.187 DEBG Flush :1105 extent_limit None deps:[JobId(1104)] res:true f:42 g:1
52004 Sep 22 23:15:26.187 INFO [lossy] sleeping 1 second
52005 Sep 22 23:15:26.563 DEBG [1] Read AckReady 1104, : downstairs
52006 Sep 22 23:15:26.564 DEBG up_ds_listen was notified
52007 Sep 22 23:15:26.564 DEBG up_ds_listen process 1104
52008 Sep 22 23:15:26.564 DEBG [A] ack job 1104:105, : downstairs
52009 Sep 22 23:15:26.616 DEBG up_ds_listen checked 1 jobs, back to waiting
52010 Sep 22 23:15:26.618 INFO [lossy] sleeping 1 second
52011 Sep 22 23:15:26.619 DEBG Flush :1105 extent_limit None deps:[JobId(1104)] res:true f:42 g:1
52012 Sep 22 23:15:26.620 INFO [lossy] sleeping 1 second
52013 Sep 22 23:15:26.714 DEBG IO Read 1106 has deps [JobId(1105)]
52014 Sep 22 23:15:27.380 DEBG up_ds_listen was notified
52015 Sep 22 23:15:27.380 DEBG up_ds_listen process 1105
52016 Sep 22 23:15:27.380 DEBG [A] ack job 1105:106, : downstairs
52017 Sep 22 23:15:27.380 DEBG up_ds_listen checked 1 jobs, back to waiting
52018 Sep 22 23:15:27.387 DEBG IO Flush 1107 has deps [JobId(1106), JobId(1105)]
52019 Sep 22 23:15:27.387 INFO [lossy] sleeping 1 second
52020 Sep 22 23:15:27.620 DEBG Flush :1105 extent_limit None deps:[JobId(1104)] res:true f:42 g:1
52021 Sep 22 23:15:27.626 DEBG Read :1106 deps:[JobId(1105)] res:true
52022 Sep 22 23:15:27.647 INFO [lossy] skipping 1106
52023 Sep 22 23:15:27.653 DEBG Read :1106 deps:[JobId(1105)] res:true
52024 test test::integration_test_problematic_downstairs has been running for over 60 seconds
52025 Sep 22 23:15:27.675 DEBG [rc] retire 1105 clears [JobId(1104), JobId(1105)], : downstairs
52026 Sep 22 23:15:27.677 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:43 g:1
52027 Sep 22 23:15:27.677 INFO [lossy] sleeping 1 second
52028 Sep 22 23:15:27.678 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:43 g:1
52029 Sep 22 23:15:27.678 INFO [lossy] sleeping 1 second
52030 test test::integration_test_volume_replace_downstairs_then_takeover has been running for over 60 seconds
52031 Sep 22 23:15:28.101 DEBG [0] Read AckReady 1106, : downstairs
52032 Sep 22 23:15:28.429 DEBG [2] Read already AckReady 1106, : downstairs
52033 Sep 22 23:15:28.431 DEBG up_ds_listen was notified
52034 Sep 22 23:15:28.431 DEBG up_ds_listen process 1106
52035 Sep 22 23:15:28.431 DEBG [A] ack job 1106:107, : downstairs
52036 Sep 22 23:15:28.484 DEBG up_ds_listen process 1107
52037 Sep 22 23:15:28.484 DEBG [A] ack job 1107:108, : downstairs
52038 Sep 22 23:15:28.484 DEBG up_ds_listen checked 2 jobs, back to waiting
52039 Sep 22 23:15:28.484 DEBG up_ds_listen was notified
52040 Sep 22 23:15:28.484 DEBG up_ds_listen checked 0 jobs, back to waiting
52041 Sep 22 23:15:28.485 INFO [lossy] skipping 1106
52042 Sep 22 23:15:28.485 INFO [lossy] skipping 1107
52043 Sep 22 23:15:28.485 WARN returning error on read!
52044 Sep 22 23:15:28.485 DEBG Read :1106 deps:[JobId(1105)] res:false
52045 Sep 22 23:15:28.485 INFO [lossy] skipping 1106
52046 Sep 22 23:15:28.491 DEBG Read :1106 deps:[JobId(1105)] res:true
52047 Sep 22 23:15:28.513 DEBG IO Read 1108 has deps [JobId(1107)]
52048 Sep 22 23:15:28.513 ERRO [1] job id 1106 saw error GenericError("test error")
52049 Sep 22 23:15:28.527 DEBG Flush :1107 extent_limit None deps:[JobId(1106), JobId(1105)] res:true f:43 g:1
52050 Sep 22 23:15:28.532 DEBG Read :1108 deps:[JobId(1107)] res:true
52051 Sep 22 23:15:28.931 DEBG [rc] retire 1107 clears [JobId(1106), JobId(1107)], : downstairs
52052 Sep 22 23:15:28.937 DEBG Read :1108 deps:[JobId(1107)] res:true
52053 Sep 22 23:15:28.965 DEBG Read :1108 deps:[JobId(1107)] res:true
52054 Sep 22 23:15:28.986 DEBG IO Flush 1109 has deps [JobId(1108)]
52055 Sep 22 23:15:28.989 INFO [lossy] skipping 1109
52056 Sep 22 23:15:28.989 DEBG Flush :1109 extent_limit None deps:[JobId(1108)] res:true f:44 g:1
52057 Sep 22 23:15:28.989 INFO [lossy] sleeping 1 second
52058 Sep 22 23:15:29.366 DEBG [1] Read AckReady 1108, : downstairs
52059 Sep 22 23:15:29.367 DEBG up_ds_listen was notified
52060 Sep 22 23:15:29.367 DEBG up_ds_listen process 1108
52061 Sep 22 23:15:29.367 DEBG [A] ack job 1108:109, : downstairs
52062 Sep 22 23:15:29.419 DEBG up_ds_listen checked 1 jobs, back to waiting
52063 Sep 22 23:15:29.422 INFO [lossy] skipping 1109
52064 Sep 22 23:15:29.422 INFO [lossy] skipping 1109
52065 Sep 22 23:15:29.422 DEBG Flush :1109 extent_limit None deps:[JobId(1108)] res:true f:44 g:1
52066 Sep 22 23:15:29.422 INFO [lossy] sleeping 1 second
52067 Sep 22 23:15:29.423 DEBG Flush :1109 extent_limit None deps:[JobId(1108)] res:true f:44 g:1
52068 Sep 22 23:15:29.423 INFO [lossy] sleeping 1 second
52069 Sep 22 23:15:29.518 DEBG IO Read 1110 has deps [JobId(1109)]
52070 Sep 22 23:15:30.187 DEBG up_ds_listen was notified
52071 Sep 22 23:15:30.187 DEBG up_ds_listen process 1109
52072 Sep 22 23:15:30.188 DEBG [A] ack job 1109:110, : downstairs
52073 Sep 22 23:15:30.188 DEBG [rc] retire 1109 clears [JobId(1108), JobId(1109)], : downstairs
52074 Sep 22 23:15:30.188 DEBG up_ds_listen checked 1 jobs, back to waiting
52075 Sep 22 23:15:30.195 DEBG IO Flush 1111 has deps [JobId(1110)]
52076 Sep 22 23:15:30.195 INFO [lossy] sleeping 1 second
52077 Sep 22 23:15:30.429 DEBG Read :1110 deps:[JobId(1109)] res:true
52078 Sep 22 23:15:30.457 DEBG Read :1110 deps:[JobId(1109)] res:true
52079 Sep 22 23:15:30.481 INFO [lossy] skipping 1111
52080 Sep 22 23:15:30.481 INFO [lossy] skipping 1111
52081 Sep 22 23:15:30.481 WARN returning error on flush!
52082 Sep 22 23:15:30.481 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:false f:45 g:1
52083 Sep 22 23:15:30.481 WARN returning error on flush!
52084 Sep 22 23:15:30.481 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:false f:45 g:1
52085 Sep 22 23:15:30.481 INFO [lossy] skipping 1111
52086 Sep 22 23:15:30.481 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:true f:45 g:1
52087 Sep 22 23:15:30.481 INFO [lossy] sleeping 1 second
52088 Sep 22 23:15:30.529 INFO [lossy] skipping 1111
52089 Sep 22 23:15:30.530 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:true f:45 g:1
52090 Sep 22 23:15:30.859 DEBG [2] Read AckReady 1110, : downstairs
52091 Sep 22 23:15:30.860 ERRO [2] job id 1111 saw error GenericError("test error")
52092 Sep 22 23:15:30.860 ERRO [2] job id 1111 saw error GenericError("test error")
52093 Sep 22 23:15:30.860 DEBG up_ds_listen was notified
52094 Sep 22 23:15:30.860 DEBG up_ds_listen process 1110
52095 Sep 22 23:15:30.860 DEBG [A] ack job 1110:111, : downstairs
52096 Sep 22 23:15:30.912 DEBG up_ds_listen checked 1 jobs, back to waiting
52097 Sep 22 23:15:31.291 DEBG up_ds_listen was notified
52098 Sep 22 23:15:31.291 DEBG up_ds_listen process 1111
52099 Sep 22 23:15:31.291 DEBG [A] ack job 1111:112, : downstairs
52100 Sep 22 23:15:31.291 DEBG up_ds_listen checked 1 jobs, back to waiting
52101 Sep 22 23:15:31.298 DEBG Read :1110 deps:[JobId(1109)] res:true
52102 Sep 22 23:15:31.319 DEBG IO Read 1112 has deps [JobId(1111)]
52103 Sep 22 23:15:31.332 INFO [lossy] skipping 1112
52104 Sep 22 23:15:31.332 INFO [lossy] skipping 1112
52105 Sep 22 23:15:31.338 DEBG Read :1112 deps:[JobId(1111)] res:true
52106 Sep 22 23:15:31.361 WARN returning error on flush!
52107 Sep 22 23:15:31.361 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:false f:45 g:1
52108 Sep 22 23:15:31.361 INFO [lossy] skipping 1111
52109 Sep 22 23:15:31.361 DEBG Flush :1111 extent_limit None deps:[JobId(1110)] res:true f:45 g:1
52110 Sep 22 23:15:31.361 INFO [lossy] sleeping 1 second
52111 Sep 22 23:15:31.738 ERRO [1] job id 1111 saw error GenericError("test error")
52112 Sep 22 23:15:31.739 DEBG [rc] retire 1111 clears [JobId(1110), JobId(1111)], : downstairs
52113 Sep 22 23:15:31.786 DEBG IO Flush 1113 has deps [JobId(1112)]
52114 Sep 22 23:15:31.786 WARN returning error on read!
52115 Sep 22 23:15:31.786 DEBG Read :1112 deps:[JobId(1111)] res:false
52116 Sep 22 23:15:31.792 DEBG Read :1112 deps:[JobId(1111)] res:true
52117 Sep 22 23:15:32.142 DEBG [0] Read AckReady 1112, : downstairs
52118 Sep 22 23:15:32.143 DEBG up_ds_listen was notified
52119 Sep 22 23:15:32.143 DEBG up_ds_listen process 1112
52120 Sep 22 23:15:32.143 DEBG [A] ack job 1112:113, : downstairs
52121 Sep 22 23:15:32.196 DEBG up_ds_listen checked 1 jobs, back to waiting
52122 Sep 22 23:15:32.197 ERRO [2] job id 1112 saw error GenericError("test error")
52123 Sep 22 23:15:32.197 INFO [lossy] skipping 1113
52124 Sep 22 23:15:32.197 INFO [lossy] skipping 1113
52125 Sep 22 23:15:32.197 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:46 g:1
52126 Sep 22 23:15:32.197 DEBG IO Read 1114 has deps [JobId(1113)]
52127 Sep 22 23:15:32.216 DEBG Read :1114 deps:[JobId(1113)] res:true
52128 Sep 22 23:15:32.239 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:46 g:1
52129 Sep 22 23:15:32.245 DEBG Read :1114 deps:[JobId(1113)] res:true
52130 Sep 22 23:15:32.645 DEBG up_ds_listen was notified
52131 Sep 22 23:15:32.646 DEBG up_ds_listen process 1113
52132 Sep 22 23:15:32.646 DEBG [A] ack job 1113:114, : downstairs
52133 Sep 22 23:15:32.646 DEBG up_ds_listen checked 1 jobs, back to waiting
52134 Sep 22 23:15:32.646 DEBG IO Flush 1115 has deps [JobId(1114), JobId(1113)]
52135 Sep 22 23:15:32.646 INFO [lossy] skipping 1112
52136 Sep 22 23:15:32.657 DEBG Read :1112 deps:[JobId(1111)] res:true
52137 Sep 22 23:15:32.680 DEBG Flush :1115 extent_limit None deps:[JobId(1114), JobId(1113)] res:true f:47 g:1
52138 Sep 22 23:15:33.171 DEBG [0] Read AckReady 1114, : downstairs
52139 Sep 22 23:15:33.172 DEBG up_ds_listen was notified
52140 Sep 22 23:15:33.172 DEBG up_ds_listen process 1114
52141 Sep 22 23:15:33.172 DEBG [A] ack job 1114:115, : downstairs
52142 Sep 22 23:15:33.225 DEBG up_ds_listen checked 1 jobs, back to waiting
52143 Sep 22 23:15:33.226 DEBG IO Read 1116 has deps [JobId(1115)]
52144 Sep 22 23:15:33.246 DEBG Read :1116 deps:[JobId(1115)] res:true
52145 Sep 22 23:15:33.269 INFO [lossy] skipping 1115
52146 Sep 22 23:15:33.270 DEBG Flush :1115 extent_limit None deps:[JobId(1114), JobId(1113)] res:true f:47 g:1
52147 Sep 22 23:15:33.270 INFO [lossy] sleeping 1 second
52148 Sep 22 23:15:33.651 DEBG up_ds_listen was notified
52149 Sep 22 23:15:33.651 DEBG up_ds_listen process 1115
52150 Sep 22 23:15:33.651 DEBG [A] ack job 1115:116, : downstairs
52151 Sep 22 23:15:33.651 DEBG up_ds_listen checked 1 jobs, back to waiting
52152 Sep 22 23:15:33.652 WARN returning error on flush!
52153 Sep 22 23:15:33.653 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:false f:46 g:1
52154 Sep 22 23:15:33.653 DEBG Flush :1113 extent_limit None deps:[JobId(1112)] res:true f:46 g:1
52155 Sep 22 23:15:33.653 WARN returning error on read!
52156 Sep 22 23:15:33.653 DEBG Read :1114 deps:[JobId(1113)] res:false
52157 Sep 22 23:15:33.653 WARN 1115 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52158 Sep 22 23:15:33.653 INFO [lossy] skipping 1116
52159 Sep 22 23:15:33.653 WARN returning error on read!
52160 Sep 22 23:15:33.653 DEBG Read :1114 deps:[JobId(1113)] res:false
52161 Sep 22 23:15:33.653 WARN returning error on read!
52162 Sep 22 23:15:33.653 DEBG Read :1114 deps:[JobId(1113)] res:false
52163 Sep 22 23:15:33.659 DEBG Read :1114 deps:[JobId(1113)] res:true
52164 Sep 22 23:15:34.057 ERRO [1] job id 1113 saw error GenericError("test error")
52165 Sep 22 23:15:34.057 DEBG [rc] retire 1113 clears [JobId(1112), JobId(1113)], : downstairs
52166 Sep 22 23:15:34.057 ERRO [1] job id 1114 saw error GenericError("test error")
52167 Sep 22 23:15:34.057 ERRO [1] job id 1114 saw error GenericError("test error")
52168 Sep 22 23:15:34.057 ERRO [1] job id 1114 saw error GenericError("test error")
52169 Sep 22 23:15:34.058 DEBG IO Flush 1117 has deps [JobId(1116), JobId(1115)]
52170 Sep 22 23:15:34.060 INFO [lossy] sleeping 1 second
52171 Sep 22 23:15:34.435 DEBG [0] Read AckReady 1116, : downstairs
52172 Sep 22 23:15:34.436 DEBG up_ds_listen was notified
52173 Sep 22 23:15:34.436 DEBG up_ds_listen process 1116
52174 Sep 22 23:15:34.436 DEBG [A] ack job 1116:117, : downstairs
52175 Sep 22 23:15:34.489 DEBG up_ds_listen checked 1 jobs, back to waiting
52176 Sep 22 23:15:34.495 DEBG Read :1116 deps:[JobId(1115)] res:true
52177 Sep 22 23:15:34.517 DEBG IO Read 1118 has deps [JobId(1117)]
52178 Sep 22 23:15:34.531 DEBG Flush :1115 extent_limit None deps:[JobId(1114), JobId(1113)] res:true f:47 g:1
52179 Sep 22 23:15:34.531 INFO [lossy] skipping 1116
52180 Sep 22 23:15:34.531 WARN returning error on read!
52181 Sep 22 23:15:34.531 DEBG Read :1116 deps:[JobId(1115)] res:false
52182 Sep 22 23:15:34.537 DEBG Read :1116 deps:[JobId(1115)] res:true
52183 Sep 22 23:15:34.606 DEBG IO Flush 1119 has deps [JobId(1118), JobId(1117)]
52184 Sep 22 23:15:34.937 DEBG [rc] retire 1115 clears [JobId(1114), JobId(1115)], : downstairs
52185 Sep 22 23:15:34.937 ERRO [1] job id 1116 saw error GenericError("test error")
52186 Sep 22 23:15:34.939 INFO [lossy] skipping 1117
52187 Sep 22 23:15:34.939 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:48 g:1
52188 Sep 22 23:15:34.940 WARN returning error on read!
52189 Sep 22 23:15:34.940 DEBG Read :1118 deps:[JobId(1117)] res:false
52190 Sep 22 23:15:34.945 DEBG Read :1118 deps:[JobId(1117)] res:true
52191 Sep 22 23:15:35.344 ERRO [2] job id 1118 saw error GenericError("test error")
52192 Sep 22 23:15:35.344 INFO [lossy] skipping 1117
52193 Sep 22 23:15:35.344 INFO [lossy] skipping 1119
52194 Sep 22 23:15:35.344 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:48 g:1
52195 Sep 22 23:15:35.350 DEBG Read :1118 deps:[JobId(1117)] res:true
52196 Sep 22 23:15:35.372 DEBG up_ds_listen was notified
52197 Sep 22 23:15:35.372 DEBG up_ds_listen process 1117
52198 Sep 22 23:15:35.372 DEBG [A] ack job 1117:118, : downstairs
52199 Sep 22 23:15:35.372 DEBG up_ds_listen checked 1 jobs, back to waiting
52200 Sep 22 23:15:35.374 DEBG Flush :1117 extent_limit None deps:[JobId(1116), JobId(1115)] res:true f:48 g:1
52201 Sep 22 23:15:35.380 DEBG Read :1118 deps:[JobId(1117)] res:true
52202 Sep 22 23:15:35.784 DEBG [rc] retire 1117 clears [JobId(1116), JobId(1117)], : downstairs
52203 Sep 22 23:15:35.786 INFO [lossy] skipping 1119
52204 Sep 22 23:15:35.786 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:49 g:1
52205 Sep 22 23:15:36.162 DEBG [2] Read AckReady 1118, : downstairs
52206 Sep 22 23:15:36.162 DEBG up_ds_listen was notified
52207 Sep 22 23:15:36.163 DEBG up_ds_listen process 1118
52208 Sep 22 23:15:36.163 DEBG [A] ack job 1118:119, : downstairs
52209 Sep 22 23:15:36.215 DEBG up_ds_listen checked 1 jobs, back to waiting
52210 Sep 22 23:15:36.218 INFO [lossy] skipping 1119
52211 Sep 22 23:15:36.218 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:49 g:1
52212 Sep 22 23:15:36.218 INFO [lossy] sleeping 1 second
52213 Sep 22 23:15:36.265 DEBG IO Read 1120 has deps [JobId(1119)]
52214 Sep 22 23:15:36.600 DEBG up_ds_listen was notified
52215 Sep 22 23:15:36.600 DEBG up_ds_listen process 1119
52216 Sep 22 23:15:36.600 DEBG [A] ack job 1119:120, : downstairs
52217 Sep 22 23:15:36.600 DEBG up_ds_listen checked 1 jobs, back to waiting
52218 Sep 22 23:15:36.607 INFO [lossy] sleeping 1 second
52219 Sep 22 23:15:36.609 INFO [lossy] skipping 1119
52220 Sep 22 23:15:36.609 DEBG Flush :1119 extent_limit None deps:[JobId(1118), JobId(1117)] res:true f:49 g:1
52221 Sep 22 23:15:36.609 INFO [lossy] skipping 1120
52222 Sep 22 23:15:36.615 DEBG Read :1120 deps:[JobId(1119)] res:true
52223 Sep 22 23:15:37.013 DEBG [rc] retire 1119 clears [JobId(1118), JobId(1119)], : downstairs
52224 Sep 22 23:15:37.014 DEBG IO Flush 1121 has deps [JobId(1120)]
52225 Sep 22 23:15:37.016 DEBG Flush :1121 extent_limit None deps:[JobId(1120)] res:true f:50 g:1
52226 Sep 22 23:15:37.016 INFO [lossy] sleeping 1 second
52227 Sep 22 23:15:37.392 DEBG [1] Read AckReady 1120, : downstairs
52228 Sep 22 23:15:37.392 DEBG up_ds_listen was notified
52229 Sep 22 23:15:37.392 DEBG up_ds_listen process 1120
52230 Sep 22 23:15:37.392 DEBG [A] ack job 1120:121, : downstairs
52231 Sep 22 23:15:37.445 DEBG up_ds_listen checked 1 jobs, back to waiting
52232 Sep 22 23:15:37.446 INFO [lossy] skipping 1120
52233 Sep 22 23:15:37.446 WARN returning error on read!
52234 Sep 22 23:15:37.446 DEBG Read :1120 deps:[JobId(1119)] res:false
52235 Sep 22 23:15:37.452 DEBG Read :1120 deps:[JobId(1119)] res:true
52236 Sep 22 23:15:37.473 DEBG IO Read 1122 has deps [JobId(1121)]
52237 Sep 22 23:15:37.473 ERRO [0] job id 1120 saw error GenericError("test error")
52238 Sep 22 23:15:37.487 DEBG Flush :1121 extent_limit None deps:[JobId(1120)] res:true f:50 g:1
52239 Sep 22 23:15:37.493 DEBG Read :1122 deps:[JobId(1121)] res:true
52240 Sep 22 23:15:37.891 DEBG up_ds_listen was notified
52241 Sep 22 23:15:37.891 DEBG up_ds_listen process 1121
52242 Sep 22 23:15:37.891 DEBG [A] ack job 1121:122, : downstairs
52243 Sep 22 23:15:37.891 DEBG up_ds_listen checked 1 jobs, back to waiting
52244 Sep 22 23:15:37.891 DEBG IO Flush 1123 has deps [JobId(1122), JobId(1121)]
52245 Sep 22 23:15:37.892 WARN returning error on read!
52246 Sep 22 23:15:37.892 DEBG Read :1120 deps:[JobId(1119)] res:false
52247 Sep 22 23:15:37.892 INFO [lossy] skipping 1120
52248 Sep 22 23:15:37.892 INFO [lossy] skipping 1120
52249 Sep 22 23:15:37.897 DEBG Read :1120 deps:[JobId(1119)] res:true
52250 Sep 22 23:15:37.919 ERRO [2] job id 1120 saw error GenericError("test error")
52251 Sep 22 23:15:37.921 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:true f:51 g:1
52252 Sep 22 23:15:37.921 INFO [lossy] sleeping 1 second
52253 Sep 22 23:15:38.296 DEBG [0] Read AckReady 1122, : downstairs
52254 Sep 22 23:15:38.297 DEBG up_ds_listen was notified
52255 Sep 22 23:15:38.297 DEBG up_ds_listen process 1122
52256 Sep 22 23:15:38.297 DEBG [A] ack job 1122:123, : downstairs
52257 Sep 22 23:15:38.350 DEBG up_ds_listen checked 1 jobs, back to waiting
52258 Sep 22 23:15:38.352 DEBG Flush :1121 extent_limit None deps:[JobId(1120)] res:true f:50 g:1
52259 Sep 22 23:15:38.352 INFO [lossy] skipping 1122
52260 Sep 22 23:15:38.352 INFO [lossy] skipping 1122
52261 Sep 22 23:15:38.358 DEBG Read :1122 deps:[JobId(1121)] res:true
52262 Sep 22 23:15:38.385 DEBG Read :1122 deps:[JobId(1121)] res:true
52263 Sep 22 23:15:38.454 DEBG IO Read 1124 has deps [JobId(1123)]
52264 Sep 22 23:15:38.784 DEBG [rc] retire 1121 clears [JobId(1120), JobId(1121)], : downstairs
52265 Sep 22 23:15:38.800 INFO [lossy] skipping 1123
52266 Sep 22 23:15:38.800 WARN returning error on flush!
52267 Sep 22 23:15:38.800 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:false f:51 g:1
52268 Sep 22 23:15:38.800 INFO [lossy] skipping 1123
52269 Sep 22 23:15:38.800 WARN returning error on flush!
52270 Sep 22 23:15:38.800 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:false f:51 g:1
52271 Sep 22 23:15:38.800 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:true f:51 g:1
52272 Sep 22 23:15:38.800 WARN returning error on read!
52273 Sep 22 23:15:38.800 DEBG Read :1124 deps:[JobId(1123)] res:false
52274 Sep 22 23:15:38.806 DEBG Read :1124 deps:[JobId(1123)] res:true
52275 Sep 22 23:15:38.827 INFO [lossy] skipping 1123
52276 Sep 22 23:15:38.827 WARN returning error on flush!
52277 Sep 22 23:15:38.828 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:false f:51 g:1
52278 Sep 22 23:15:38.828 DEBG Flush :1123 extent_limit None deps:[JobId(1122), JobId(1121)] res:true f:51 g:1
52279 Sep 22 23:15:38.833 DEBG Read :1124 deps:[JobId(1123)] res:true
52280 Sep 22 23:15:39.281 ERRO [2] job id 1123 saw error GenericError("test error")
52281 Sep 22 23:15:39.610 ERRO [1] job id 1123 saw error GenericError("test error")
52282 Sep 22 23:15:39.611 ERRO [1] job id 1123 saw error GenericError("test error")
52283 Sep 22 23:15:39.611 ERRO [1] job id 1124 saw error GenericError("test error")
52284 Sep 22 23:15:39.611 DEBG up_ds_listen was notified
52285 Sep 22 23:15:39.611 DEBG up_ds_listen process 1123
52286 Sep 22 23:15:39.611 DEBG [A] ack job 1123:124, : downstairs
52287 Sep 22 23:15:39.611 DEBG [rc] retire 1123 clears [JobId(1122), JobId(1123)], : downstairs
52288 Sep 22 23:15:39.611 DEBG up_ds_listen checked 1 jobs, back to waiting
52289 Sep 22 23:15:39.611 WARN returning error on read!
52290 Sep 22 23:15:39.611 DEBG Read :1124 deps:[JobId(1123)] res:false
52291 Sep 22 23:15:39.617 DEBG Read :1124 deps:[JobId(1123)] res:true
52292 Sep 22 23:15:39.638 DEBG IO Flush 1125 has deps [JobId(1124)]
52293 Sep 22 23:15:39.639 ERRO [0] job id 1124 saw error GenericError("test error")
52294 Sep 22 23:15:39.641 DEBG Flush :1125 extent_limit None deps:[JobId(1124)] res:true f:52 g:1
52295 Sep 22 23:15:39.642 INFO [lossy] sleeping 1 second
52296 Sep 22 23:15:40.017 DEBG [1] Read AckReady 1124, : downstairs
52297 Sep 22 23:15:40.018 DEBG up_ds_listen was notified
52298 Sep 22 23:15:40.018 DEBG up_ds_listen process 1124
52299 Sep 22 23:15:40.018 DEBG [A] ack job 1124:125, : downstairs
52300 Sep 22 23:15:40.071 DEBG up_ds_listen checked 1 jobs, back to waiting
52301 Sep 22 23:15:40.073 DEBG Flush :1125 extent_limit None deps:[JobId(1124)] res:true f:52 g:1
52302 Sep 22 23:15:40.075 DEBG Flush :1125 extent_limit None deps:[JobId(1124)] res:true f:52 g:1
52303 Sep 22 23:15:40.075 INFO [lossy] sleeping 1 second
52304 Sep 22 23:15:40.170 DEBG IO Read 1126 has deps [JobId(1125)]
52305 Sep 22 23:15:40.835 DEBG up_ds_listen was notified
52306 Sep 22 23:15:40.835 DEBG up_ds_listen process 1125
52307 Sep 22 23:15:40.835 DEBG [A] ack job 1125:126, : downstairs
52308 Sep 22 23:15:40.835 DEBG [rc] retire 1125 clears [JobId(1124), JobId(1125)], : downstairs
52309 Sep 22 23:15:40.835 DEBG up_ds_listen checked 1 jobs, back to waiting
52310 Sep 22 23:15:40.842 DEBG IO Flush 1127 has deps [JobId(1126)]
52311 Sep 22 23:15:40.848 DEBG Read :1126 deps:[JobId(1125)] res:true
52312 Sep 22 23:15:40.875 DEBG Read :1126 deps:[JobId(1125)] res:true
52313 Sep 22 23:15:40.899 DEBG Flush :1127 extent_limit None deps:[JobId(1126)] res:true f:53 g:1
52314 Sep 22 23:15:40.900 INFO [lossy] skipping 1127
52315 Sep 22 23:15:40.900 DEBG Flush :1127 extent_limit None deps:[JobId(1126)] res:true f:53 g:1
52316 Sep 22 23:15:41.323 DEBG [2] Read AckReady 1126, : downstairs
52317 Sep 22 23:15:41.652 DEBG [1] Read already AckReady 1126, : downstairs
52318 Sep 22 23:15:41.654 DEBG up_ds_listen was notified
52319 Sep 22 23:15:41.654 DEBG up_ds_listen process 1126
52320 Sep 22 23:15:41.654 DEBG [A] ack job 1126:127, : downstairs
52321 Sep 22 23:15:41.707 DEBG up_ds_listen process 1127
52322 Sep 22 23:15:41.707 DEBG [A] ack job 1127:128, : downstairs
52323 Sep 22 23:15:41.707 DEBG up_ds_listen checked 2 jobs, back to waiting
52324 Sep 22 23:15:41.707 DEBG up_ds_listen was notified
52325 Sep 22 23:15:41.707 DEBG up_ds_listen checked 0 jobs, back to waiting
52326 Sep 22 23:15:41.708 WARN returning error on read!
52327 Sep 22 23:15:41.708 DEBG Read :1126 deps:[JobId(1125)] res:false
52328 Sep 22 23:15:41.708 INFO [lossy] skipping 1127
52329 Sep 22 23:15:41.714 DEBG Read :1126 deps:[JobId(1125)] res:true
52330 Sep 22 23:15:41.736 DEBG IO Read 1128 has deps [JobId(1127)]
52331 Sep 22 23:15:41.736 ERRO [0] job id 1126 saw error GenericError("test error")
52332 Sep 22 23:15:41.748 INFO [lossy] sleeping 1 second
52333 Sep 22 23:15:41.754 DEBG Read :1128 deps:[JobId(1127)] res:true
52334 Sep 22 23:15:41.777 DEBG Flush :1127 extent_limit None deps:[JobId(1126)] res:true f:53 g:1
52335 Sep 22 23:15:41.783 DEBG Read :1128 deps:[JobId(1127)] res:true
52336 Sep 22 23:15:42.182 DEBG [rc] retire 1127 clears [JobId(1126), JobId(1127)], : downstairs
52337 Sep 22 23:15:42.559 DEBG [2] Read AckReady 1128, : downstairs
52338 Sep 22 23:15:42.560 DEBG up_ds_listen was notified
52339 Sep 22 23:15:42.560 DEBG up_ds_listen process 1128
52340 Sep 22 23:15:42.560 DEBG [A] ack job 1128:129, : downstairs
52341 Sep 22 23:15:42.612 DEBG up_ds_listen checked 1 jobs, back to waiting
52342 Sep 22 23:15:42.613 DEBG IO Flush 1129 has deps [JobId(1128)]
52343 Sep 22 23:15:42.614 DEBG IO Read 1130 has deps [JobId(1129)]
52344 Sep 22 23:15:42.619 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:true f:54 g:1
52345 Sep 22 23:15:42.626 INFO [lossy] sleeping 1 second
52346 Sep 22 23:15:42.628 WARN returning error on flush!
52347 Sep 22 23:15:42.628 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52348 Sep 22 23:15:42.628 INFO [lossy] skipping 1130
52349 Sep 22 23:15:42.628 WARN returning error on flush!
52350 Sep 22 23:15:42.628 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52351 Sep 22 23:15:42.628 INFO [lossy] skipping 1130
52352 Sep 22 23:15:42.628 WARN returning error on flush!
52353 Sep 22 23:15:42.628 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52354 Sep 22 23:15:42.628 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:true f:54 g:1
52355 Sep 22 23:15:42.628 INFO [lossy] skipping 1130
52356 Sep 22 23:15:42.629 WARN returning error on read!
52357 Sep 22 23:15:42.629 DEBG Read :1130 deps:[JobId(1129)] res:false
52358 Sep 22 23:15:42.629 INFO [lossy] skipping 1130
52359 Sep 22 23:15:42.634 DEBG Read :1130 deps:[JobId(1129)] res:true
52360 Sep 22 23:15:43.033 ERRO [0] job id 1129 saw error GenericError("test error")
52361 Sep 22 23:15:43.033 ERRO [0] job id 1129 saw error GenericError("test error")
52362 Sep 22 23:15:43.033 ERRO [0] job id 1129 saw error GenericError("test error")
52363 Sep 22 23:15:43.033 ERRO [0] job id 1130 saw error GenericError("test error")
52364 Sep 22 23:15:43.033 DEBG up_ds_listen was notified
52365 Sep 22 23:15:43.033 DEBG up_ds_listen process 1129
52366 Sep 22 23:15:43.033 DEBG [A] ack job 1129:130, : downstairs
52367 Sep 22 23:15:43.033 DEBG up_ds_listen checked 1 jobs, back to waiting
52368 Sep 22 23:15:43.034 INFO [lossy] skipping 1128
52369 Sep 22 23:15:43.034 INFO [lossy] skipping 1130
52370 Sep 22 23:15:43.034 INFO [lossy] skipping 1128
52371 Sep 22 23:15:43.034 INFO [lossy] skipping 1128
52372 Sep 22 23:15:43.034 INFO [lossy] skipping 1128
52373 Sep 22 23:15:43.040 DEBG Read :1128 deps:[JobId(1127)] res:true
52374 Sep 22 23:15:43.063 WARN returning error on flush!
52375 Sep 22 23:15:43.063 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52376 Sep 22 23:15:43.063 INFO [lossy] skipping 1130
52377 Sep 22 23:15:43.063 WARN returning error on flush!
52378 Sep 22 23:15:43.063 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52379 Sep 22 23:15:43.063 WARN returning error on flush!
52380 Sep 22 23:15:43.063 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:false f:54 g:1
52381 Sep 22 23:15:43.063 DEBG Flush :1129 extent_limit None deps:[JobId(1128)] res:true f:54 g:1
52382 Sep 22 23:15:43.063 INFO [lossy] skipping 1130
52383 Sep 22 23:15:43.069 DEBG Read :1130 deps:[JobId(1129)] res:true
52384 Sep 22 23:15:43.091 INFO [lossy] sleeping 1 second
52385 Sep 22 23:15:43.514 DEBG [0] Read AckReady 1130, : downstairs
52386 Sep 22 23:15:43.844 ERRO [1] job id 1129 saw error GenericError("test error")
52387 Sep 22 23:15:43.844 ERRO [1] job id 1129 saw error GenericError("test error")
52388 Sep 22 23:15:43.844 ERRO [1] job id 1129 saw error GenericError("test error")
52389 Sep 22 23:15:43.845 DEBG [rc] retire 1129 clears [JobId(1128), JobId(1129)], : downstairs
52390 Sep 22 23:15:43.845 DEBG up_ds_listen was notified
52391 Sep 22 23:15:43.845 DEBG up_ds_listen process 1130
52392 Sep 22 23:15:43.845 DEBG [A] ack job 1130:131, : downstairs
52393 Sep 22 23:15:43.898 DEBG up_ds_listen checked 1 jobs, back to waiting
52394 Sep 22 23:15:43.899 DEBG IO Flush 1131 has deps [JobId(1130)]
52395 Sep 22 23:15:43.905 DEBG Read :1130 deps:[JobId(1129)] res:true
52396 Sep 22 23:15:43.927 DEBG IO Read 1132 has deps [JobId(1131)]
52397 Sep 22 23:15:43.941 INFO [lossy] sleeping 1 second
52398 Sep 22 23:15:44.320 INFO [lossy] sleeping 1 second
52399 Sep 22 23:15:44.321 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:true f:55 g:1
52400 Sep 22 23:15:44.327 DEBG Read :1132 deps:[JobId(1131)] res:true
52401 Sep 22 23:15:44.725 DEBG IO Flush 1133 has deps [JobId(1132), JobId(1131)]
52402 Sep 22 23:15:44.728 INFO [lossy] sleeping 1 second
52403 Sep 22 23:15:45.103 DEBG [0] Read AckReady 1132, : downstairs
52404 Sep 22 23:15:45.104 DEBG up_ds_listen was notified
52405 Sep 22 23:15:45.104 DEBG up_ds_listen process 1132
52406 Sep 22 23:15:45.104 DEBG [A] ack job 1132:133, : downstairs
52407 Sep 22 23:15:45.157 DEBG up_ds_listen checked 1 jobs, back to waiting
52408 Sep 22 23:15:45.158 INFO [lossy] skipping 1131
52409 Sep 22 23:15:45.158 INFO [lossy] skipping 1132
52410 Sep 22 23:15:45.158 WARN returning error on flush!
52411 Sep 22 23:15:45.158 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:false f:55 g:1
52412 Sep 22 23:15:45.158 WARN returning error on flush!
52413 Sep 22 23:15:45.158 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:false f:55 g:1
52414 Sep 22 23:15:45.158 INFO [lossy] skipping 1131
52415 Sep 22 23:15:45.158 INFO [lossy] skipping 1131
52416 Sep 22 23:15:45.158 WARN returning error on flush!
52417 Sep 22 23:15:45.158 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:false f:55 g:1
52418 Sep 22 23:15:45.158 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:true f:55 g:1
52419 Sep 22 23:15:45.158 INFO [lossy] skipping 1132
52420 Sep 22 23:15:45.158 WARN 1133 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52421 Sep 22 23:15:45.164 DEBG Read :1132 deps:[JobId(1131)] res:true
52422 Sep 22 23:15:45.186 DEBG IO Read 1134 has deps [JobId(1133)]
52423 Sep 22 23:15:45.186 ERRO [1] job id 1131 saw error GenericError("test error")
52424 Sep 22 23:15:45.186 ERRO [1] job id 1131 saw error GenericError("test error")
52425 Sep 22 23:15:45.186 ERRO [1] job id 1131 saw error GenericError("test error")
52426 Sep 22 23:15:45.191 DEBG up_ds_listen was notified
52427 Sep 22 23:15:45.191 DEBG up_ds_listen process 1131
52428 Sep 22 23:15:45.191 DEBG [A] ack job 1131:132, : downstairs
52429 Sep 22 23:15:45.191 DEBG up_ds_listen checked 1 jobs, back to waiting
52430 Sep 22 23:15:45.199 DEBG Flush :1133 extent_limit None deps:[JobId(1132), JobId(1131)] res:true f:56 g:1
52431 Sep 22 23:15:45.199 INFO [lossy] skipping 1134
52432 Sep 22 23:15:45.199 WARN returning error on read!
52433 Sep 22 23:15:45.199 DEBG Read :1134 deps:[JobId(1133)] res:false
52434 Sep 22 23:15:45.205 DEBG Read :1134 deps:[JobId(1133)] res:true
52435 Sep 22 23:15:45.274 DEBG IO Flush 1135 has deps [JobId(1134), JobId(1133)]
52436 Sep 22 23:15:45.604 ERRO [1] job id 1134 saw error GenericError("test error")
52437 Sep 22 23:15:45.605 DEBG Flush :1131 extent_limit None deps:[JobId(1130)] res:true f:55 g:1
52438 Sep 22 23:15:45.610 DEBG Read :1132 deps:[JobId(1131)] res:true
52439 Sep 22 23:15:45.632 DEBG [rc] retire 1131 clears [JobId(1130), JobId(1131)], : downstairs
52440 Sep 22 23:15:45.633 INFO [lossy] skipping 1135
52441 Sep 22 23:15:45.633 INFO [lossy] skipping 1135
52442 Sep 22 23:15:45.633 INFO [lossy] skipping 1135
52443 Sep 22 23:15:45.633 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:57 g:1
52444 Sep 22 23:15:46.009 DEBG [1] Read AckReady 1134, : downstairs
52445 Sep 22 23:15:46.010 DEBG up_ds_listen was notified
52446 Sep 22 23:15:46.010 DEBG up_ds_listen process 1134
52447 Sep 22 23:15:46.010 DEBG [A] ack job 1134:135, : downstairs
52448 Sep 22 23:15:46.063 DEBG up_ds_listen checked 1 jobs, back to waiting
52449 Sep 22 23:15:46.065 WARN returning error on flush!
52450 Sep 22 23:15:46.065 DEBG Flush :1133 extent_limit None deps:[JobId(1132), JobId(1131)] res:false f:56 g:1
52451 Sep 22 23:15:46.065 DEBG Flush :1133 extent_limit None deps:[JobId(1132), JobId(1131)] res:true f:56 g:1
52452 Sep 22 23:15:46.071 DEBG Read :1134 deps:[JobId(1133)] res:true
52453 Sep 22 23:15:46.093 DEBG Flush :1133 extent_limit None deps:[JobId(1132), JobId(1131)] res:true f:56 g:1
52454 Sep 22 23:15:46.093 WARN returning error on read!
52455 Sep 22 23:15:46.093 DEBG Read :1134 deps:[JobId(1133)] res:false
52456 Sep 22 23:15:46.093 WARN returning error on read!
52457 Sep 22 23:15:46.093 DEBG Read :1134 deps:[JobId(1133)] res:false
52458 Sep 22 23:15:46.093 INFO [lossy] skipping 1134
52459 Sep 22 23:15:46.093 INFO [lossy] skipping 1134
52460 Sep 22 23:15:46.093 WARN returning error on read!
52461 Sep 22 23:15:46.093 DEBG Read :1134 deps:[JobId(1133)] res:false
52462 Sep 22 23:15:46.093 INFO [lossy] skipping 1134
52463 Sep 22 23:15:46.093 INFO [lossy] skipping 1134
52464 Sep 22 23:15:46.093 WARN returning error on read!
52465 Sep 22 23:15:46.093 DEBG Read :1134 deps:[JobId(1133)] res:false
52466 Sep 22 23:15:46.100 DEBG Read :1134 deps:[JobId(1133)] res:true
52467 Sep 22 23:15:46.170 DEBG IO Read 1136 has deps [JobId(1135)]
52468 Sep 22 23:15:46.170 ERRO [0] job id 1134 saw error GenericError("test error")
52469 Sep 22 23:15:46.170 ERRO [0] job id 1134 saw error GenericError("test error")
52470 Sep 22 23:15:46.170 ERRO [0] job id 1134 saw error GenericError("test error")
52471 Sep 22 23:15:46.170 ERRO [0] job id 1134 saw error GenericError("test error")
52472 Sep 22 23:15:46.500 ERRO [2] job id 1133 saw error GenericError("test error")
52473 Sep 22 23:15:46.505 DEBG up_ds_listen was notified
52474 Sep 22 23:15:46.505 DEBG up_ds_listen process 1133
52475 Sep 22 23:15:46.505 DEBG [A] ack job 1133:134, : downstairs
52476 Sep 22 23:15:46.505 DEBG [rc] retire 1133 clears [JobId(1132), JobId(1133)], : downstairs
52477 Sep 22 23:15:46.505 DEBG up_ds_listen checked 1 jobs, back to waiting
52478 Sep 22 23:15:46.512 INFO [lossy] skipping 1136
52479 Sep 22 23:15:46.518 DEBG Read :1136 deps:[JobId(1135)] res:true
52480 Sep 22 23:15:46.542 WARN returning error on flush!
52481 Sep 22 23:15:46.542 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:false f:57 g:1
52482 Sep 22 23:15:46.542 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:57 g:1
52483 Sep 22 23:15:46.542 INFO [lossy] sleeping 1 second
52484 Sep 22 23:15:46.544 INFO [lossy] sleeping 1 second
52485 Sep 22 23:15:47.300 ERRO [2] job id 1135 saw error GenericError("test error")
52486 Sep 22 23:15:47.300 DEBG up_ds_listen was notified
52487 Sep 22 23:15:47.300 DEBG up_ds_listen process 1135
52488 Sep 22 23:15:47.300 DEBG [A] ack job 1135:136, : downstairs
52489 Sep 22 23:15:47.300 DEBG up_ds_listen checked 1 jobs, back to waiting
52490 Sep 22 23:15:47.302 DEBG IO Flush 1137 has deps [JobId(1136), JobId(1135)]
52491 Sep 22 23:15:47.302 INFO [lossy] sleeping 1 second
52492 Sep 22 23:15:47.678 DEBG [1] Read AckReady 1136, : downstairs
52493 Sep 22 23:15:47.679 DEBG up_ds_listen was notified
52494 Sep 22 23:15:47.679 DEBG up_ds_listen process 1136
52495 Sep 22 23:15:47.679 DEBG [A] ack job 1136:137, : downstairs
52496 Sep 22 23:15:47.731 DEBG up_ds_listen checked 1 jobs, back to waiting
52497 Sep 22 23:15:47.738 DEBG Read :1136 deps:[JobId(1135)] res:true
52498 Sep 22 23:15:47.759 DEBG Flush :1135 extent_limit None deps:[JobId(1134), JobId(1133)] res:true f:57 g:1
52499 Sep 22 23:15:47.765 DEBG Read :1136 deps:[JobId(1135)] res:true
52500 Sep 22 23:15:47.786 DEBG IO Read 1138 has deps [JobId(1137)]
52501 Sep 22 23:15:47.787 DEBG [rc] retire 1135 clears [JobId(1134), JobId(1135)], : downstairs
52502 Sep 22 23:15:47.801 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:58 g:1
52503 Sep 22 23:15:47.806 DEBG Read :1138 deps:[JobId(1137)] res:true
52504 Sep 22 23:15:47.828 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:58 g:1
52505 Sep 22 23:15:47.828 WARN returning error on read!
52506 Sep 22 23:15:47.828 DEBG Read :1138 deps:[JobId(1137)] res:false
52507 Sep 22 23:15:47.834 DEBG Read :1138 deps:[JobId(1137)] res:true
52508 Sep 22 23:15:47.952 DEBG IO Flush 1139 has deps [JobId(1138), JobId(1137)]
52509 Sep 22 23:15:48.282 ERRO [0] job id 1138 saw error GenericError("test error")
52510 Sep 22 23:15:48.518 DEBG [0] Read AckReady 1000, : downstairs
52511 Sep 22 23:15:48.613 DEBG up_ds_listen was notified
52512 Sep 22 23:15:48.613 DEBG up_ds_listen process 1137
52513 Sep 22 23:15:48.614 DEBG [A] ack job 1137:138, : downstairs
52514 Sep 22 23:15:48.614 DEBG up_ds_listen checked 1 jobs, back to waiting
52515 Sep 22 23:15:48.614 INFO [lossy] skipping 1137
52516 Sep 22 23:15:48.614 INFO [lossy] skipping 1138
52517 Sep 22 23:15:48.614 DEBG Flush :1137 extent_limit None deps:[JobId(1136), JobId(1135)] res:true f:58 g:1
52518 Sep 22 23:15:48.620 DEBG Read :1138 deps:[JobId(1137)] res:true
52519 Sep 22 23:15:48.642 DEBG [rc] retire 1137 clears [JobId(1136), JobId(1137)], : downstairs
52520 Sep 22 23:15:48.645 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:59 g:1
52521 Sep 22 23:15:48.646 INFO [lossy] skipping 1139
52522 Sep 22 23:15:48.646 INFO [lossy] skipping 1139
52523 Sep 22 23:15:48.646 INFO [lossy] skipping 1139
52524 Sep 22 23:15:48.646 WARN returning error on flush!
52525 Sep 22 23:15:48.646 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:false f:59 g:1
52526 Sep 22 23:15:48.646 WARN returning error on flush!
52527 Sep 22 23:15:48.646 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:false f:59 g:1
52528 Sep 22 23:15:48.646 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:59 g:1
52529 Sep 22 23:15:48.646 INFO [lossy] sleeping 1 second
52530 Sep 22 23:15:49.070 DEBG [0] Read AckReady 1138, : downstairs
52531 Sep 22 23:15:49.071 ERRO [0] job id 1139 saw error GenericError("test error")
52532 Sep 22 23:15:49.071 ERRO [0] job id 1139 saw error GenericError("test error")
52533 Sep 22 23:15:49.399 DEBG [2] Read already AckReady 1138, : downstairs
52534 Sep 22 23:15:49.401 DEBG up_ds_listen was notified
52535 Sep 22 23:15:49.401 DEBG up_ds_listen process 1138
52536 Sep 22 23:15:49.401 DEBG [A] ack job 1138:139, : downstairs
52537 Sep 22 23:15:49.454 DEBG up_ds_listen process 1139
52538 Sep 22 23:15:49.454 DEBG [A] ack job 1139:140, : downstairs
52539 Sep 22 23:15:49.454 DEBG up_ds_listen checked 2 jobs, back to waiting
52540 Sep 22 23:15:49.454 DEBG up_ds_listen was notified
52541 Sep 22 23:15:49.454 DEBG up_ds_listen checked 0 jobs, back to waiting
52542 Sep 22 23:15:49.456 INFO [lossy] sleeping 1 second
52543 Sep 22 23:15:49.504 DEBG IO Read 1140 has deps [JobId(1139)]
52544 Sep 22 23:15:49.851 DEBG Read :1140 deps:[JobId(1139)] res:true
52545 Sep 22 23:15:49.878 DEBG Read :1140 deps:[JobId(1139)] res:true
52546 Sep 22 23:15:50.326 DEBG [0] Read AckReady 1140, : downstairs
52547 Sep 22 23:15:50.655 DEBG [2] Read already AckReady 1140, : downstairs
52548 Sep 22 23:15:50.657 DEBG up_ds_listen was notified
52549 Sep 22 23:15:50.657 DEBG up_ds_listen process 1140
52550 Sep 22 23:15:50.657 DEBG [A] ack job 1140:141, : downstairs
52551 Sep 22 23:15:50.709 DEBG up_ds_listen checked 1 jobs, back to waiting
52552 Sep 22 23:15:50.711 DEBG IO Flush 1141 has deps [JobId(1140), JobId(1139)]
52553 Sep 22 23:15:50.711 WARN returning error on flush!
52554 Sep 22 23:15:50.711 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:false f:59 g:1
52555 Sep 22 23:15:50.711 INFO [lossy] skipping 1140
52556 Sep 22 23:15:50.711 DEBG Flush :1139 extent_limit None deps:[JobId(1138), JobId(1137)] res:true f:59 g:1
52557 Sep 22 23:15:50.711 INFO [lossy] skipping 1140
52558 Sep 22 23:15:50.711 WARN returning error on read!
52559 Sep 22 23:15:50.711 DEBG Read :1140 deps:[JobId(1139)] res:false
52560 Sep 22 23:15:50.711 INFO [lossy] skipping 1140
52561 Sep 22 23:15:50.711 WARN returning error on read!
52562 Sep 22 23:15:50.711 DEBG Read :1140 deps:[JobId(1139)] res:false
52563 Sep 22 23:15:50.717 DEBG Read :1140 deps:[JobId(1139)] res:true
52564 Sep 22 23:15:50.739 DEBG IO Read 1142 has deps [JobId(1141)]
52565 Sep 22 23:15:50.739 ERRO [1] job id 1139 saw error GenericError("test error")
52566 Sep 22 23:15:50.739 DEBG [rc] retire 1139 clears [JobId(1138), JobId(1139)], : downstairs
52567 Sep 22 23:15:50.739 ERRO [1] job id 1140 saw error GenericError("test error")
52568 Sep 22 23:15:50.739 ERRO [1] job id 1140 saw error GenericError("test error")
52569 Sep 22 23:15:50.745 INFO [lossy] sleeping 1 second
52570 Sep 22 23:15:50.745 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:60 g:1
52571 Sep 22 23:15:50.751 INFO [lossy] sleeping 1 second
52572 Sep 22 23:15:50.753 INFO [lossy] skipping 1141
52573 Sep 22 23:15:50.753 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:60 g:1
52574 Sep 22 23:15:50.753 INFO [lossy] skipping 1142
52575 Sep 22 23:15:50.753 WARN returning error on read!
52576 Sep 22 23:15:50.753 DEBG Read :1142 deps:[JobId(1141)] res:false
52577 Sep 22 23:15:50.753 WARN returning error on read!
52578 Sep 22 23:15:50.753 DEBG Read :1142 deps:[JobId(1141)] res:false
52579 Sep 22 23:15:50.759 DEBG Read :1142 deps:[JobId(1141)] res:true
52580 Sep 22 23:15:51.157 ERRO [1] job id 1142 saw error GenericError("test error")
52581 Sep 22 23:15:51.158 ERRO [1] job id 1142 saw error GenericError("test error")
52582 Sep 22 23:15:51.158 DEBG up_ds_listen was notified
52583 Sep 22 23:15:51.158 DEBG up_ds_listen process 1141
52584 Sep 22 23:15:51.158 DEBG [A] ack job 1141:142, : downstairs
52585 Sep 22 23:15:51.158 DEBG up_ds_listen checked 1 jobs, back to waiting
52586 Sep 22 23:15:51.535 DEBG [1] Read AckReady 1142, : downstairs
52587 Sep 22 23:15:51.536 DEBG up_ds_listen was notified
52588 Sep 22 23:15:51.536 DEBG up_ds_listen process 1142
52589 Sep 22 23:15:51.536 DEBG [A] ack job 1142:143, : downstairs
52590 Sep 22 23:15:51.588 DEBG up_ds_listen checked 1 jobs, back to waiting
52591 Sep 22 23:15:51.589 DEBG IO Flush 1143 has deps [JobId(1142), JobId(1141)]
52592 Sep 22 23:15:51.590 DEBG IO Read 1144 has deps [JobId(1143)]
52593 Sep 22 23:15:51.595 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:61 g:1
52594 Sep 22 23:15:51.608 DEBG Read :1144 deps:[JobId(1143)] res:true
52595 Sep 22 23:15:52.005 DEBG [1] Read AckReady 1144, : downstairs
52596 Sep 22 23:15:52.006 DEBG up_ds_listen was notified
52597 Sep 22 23:15:52.006 DEBG up_ds_listen process 1144
52598 Sep 22 23:15:52.006 DEBG [A] ack job 1144:145, : downstairs
52599 Sep 22 23:15:52.058 DEBG up_ds_listen checked 1 jobs, back to waiting
52600 Sep 22 23:15:52.059 DEBG Flush :1141 extent_limit None deps:[JobId(1140), JobId(1139)] res:true f:60 g:1
52601 Sep 22 23:15:52.065 DEBG Read :1142 deps:[JobId(1141)] res:true
52602 Sep 22 23:15:52.092 DEBG Read :1142 deps:[JobId(1141)] res:true
52603 Sep 22 23:15:52.114 DEBG IO Flush 1145 has deps [JobId(1144), JobId(1143)]
52604 Sep 22 23:15:52.114 DEBG IO Read 1146 has deps [JobId(1145)]
52605 Sep 22 23:15:52.114 DEBG [rc] retire 1141 clears [JobId(1140), JobId(1141)], : downstairs
52606 Sep 22 23:15:52.127 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:62 g:1
52607 Sep 22 23:15:52.127 INFO [lossy] skipping 1146
52608 Sep 22 23:15:52.127 INFO [lossy] skipping 1146
52609 Sep 22 23:15:52.133 DEBG Read :1146 deps:[JobId(1145)] res:true
52610 Sep 22 23:15:52.157 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:61 g:1
52611 Sep 22 23:15:52.163 DEBG Read :1144 deps:[JobId(1143)] res:true
52612 Sep 22 23:15:52.184 INFO [lossy] skipping 1143
52613 Sep 22 23:15:52.184 INFO [lossy] skipping 1144
52614 Sep 22 23:15:52.184 WARN returning error on flush!
52615 Sep 22 23:15:52.184 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:false f:61 g:1
52616 Sep 22 23:15:52.184 DEBG Flush :1143 extent_limit None deps:[JobId(1142), JobId(1141)] res:true f:61 g:1
52617 Sep 22 23:15:52.190 DEBG Read :1144 deps:[JobId(1143)] res:true
52618 Sep 22 23:15:52.642 ERRO [0] job id 1143 saw error GenericError("test error")
52619 Sep 22 23:15:52.972 DEBG up_ds_listen was notified
52620 Sep 22 23:15:52.972 DEBG up_ds_listen process 1143
52621 Sep 22 23:15:52.972 DEBG [A] ack job 1143:144, : downstairs
52622 Sep 22 23:15:52.972 DEBG [rc] retire 1143 clears [JobId(1142), JobId(1143)], : downstairs
52623 Sep 22 23:15:52.972 DEBG up_ds_listen checked 1 jobs, back to waiting
52624 Sep 22 23:15:52.974 DEBG IO Flush 1147 has deps [JobId(1146), JobId(1145)]
52625 Sep 22 23:15:53.350 DEBG [1] Read AckReady 1146, : downstairs
52626 Sep 22 23:15:53.351 DEBG up_ds_listen was notified
52627 Sep 22 23:15:53.351 DEBG up_ds_listen process 1146
52628 Sep 22 23:15:53.351 DEBG [A] ack job 1146:147, : downstairs
52629 Sep 22 23:15:53.403 DEBG up_ds_listen checked 1 jobs, back to waiting
52630 Sep 22 23:15:53.403 INFO [lossy] sleeping 1 second
52631 Sep 22 23:15:53.405 DEBG IO Read 1148 has deps [JobId(1147)]
52632 Sep 22 23:15:53.419 INFO [lossy] sleeping 1 second
52633 Sep 22 23:15:53.420 INFO [lossy] skipping 1145
52634 Sep 22 23:15:53.420 INFO [lossy] skipping 1146
52635 Sep 22 23:15:53.420 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:62 g:1
52636 Sep 22 23:15:53.420 INFO [lossy] skipping 1146
52637 Sep 22 23:15:53.420 INFO [lossy] skipping 1146
52638 Sep 22 23:15:53.426 DEBG Read :1146 deps:[JobId(1145)] res:true
52639 Sep 22 23:15:54.203 DEBG up_ds_listen was notified
52640 Sep 22 23:15:54.203 DEBG up_ds_listen process 1145
52641 Sep 22 23:15:54.203 DEBG [A] ack job 1145:146, : downstairs
52642 Sep 22 23:15:54.203 DEBG up_ds_listen checked 1 jobs, back to waiting
52643 Sep 22 23:15:54.203 DEBG IO Flush 1149 has deps [JobId(1148), JobId(1147)]
52644 Sep 22 23:15:54.206 INFO [lossy] skipping 1147
52645 Sep 22 23:15:54.206 INFO [lossy] skipping 1147
52646 Sep 22 23:15:54.206 DEBG Flush :1147 extent_limit None deps:[JobId(1146), JobId(1145)] res:true f:63 g:1
52647 Sep 22 23:15:54.211 DEBG Read :1148 deps:[JobId(1147)] res:true
52648 Sep 22 23:15:54.610 INFO [lossy] skipping 1147
52649 Sep 22 23:15:54.610 DEBG Flush :1147 extent_limit None deps:[JobId(1146), JobId(1145)] res:true f:63 g:1
52650 Sep 22 23:15:54.610 INFO [lossy] sleeping 1 second
52651 Sep 22 23:15:54.611 WARN returning error on flush!
52652 Sep 22 23:15:54.611 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:false f:62 g:1
52653 Sep 22 23:15:54.611 INFO [lossy] skipping 1146
52654 Sep 22 23:15:54.611 WARN returning error on flush!
52655 Sep 22 23:15:54.611 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:false f:62 g:1
52656 Sep 22 23:15:54.611 DEBG Flush :1145 extent_limit None deps:[JobId(1144), JobId(1143)] res:true f:62 g:1
52657 Sep 22 23:15:54.616 DEBG Read :1146 deps:[JobId(1145)] res:true
52658 Sep 22 23:15:54.638 ERRO [2] job id 1145 saw error GenericError("test error")
52659 Sep 22 23:15:54.638 ERRO [2] job id 1145 saw error GenericError("test error")
52660 Sep 22 23:15:54.638 DEBG [rc] retire 1145 clears [JobId(1144), JobId(1145)], : downstairs
52661 Sep 22 23:15:54.638 DEBG up_ds_listen was notified
52662 Sep 22 23:15:54.638 DEBG up_ds_listen process 1147
52663 Sep 22 23:15:54.638 DEBG [A] ack job 1147:148, : downstairs
52664 Sep 22 23:15:54.638 DEBG up_ds_listen checked 1 jobs, back to waiting
52665 Sep 22 23:15:54.640 DEBG Flush :1149 extent_limit None deps:[JobId(1148), JobId(1147)] res:true f:64 g:1
52666 Sep 22 23:15:54.640 INFO [lossy] sleeping 1 second
52667 Sep 22 23:15:55.016 DEBG [0] Read AckReady 1148, : downstairs
52668 Sep 22 23:15:55.016 DEBG up_ds_listen was notified
52669 Sep 22 23:15:55.016 DEBG up_ds_listen process 1148
52670 Sep 22 23:15:55.017 DEBG [A] ack job 1148:149, : downstairs
52671 Sep 22 23:15:55.069 DEBG up_ds_listen checked 1 jobs, back to waiting
52672 Sep 22 23:15:55.071 INFO [lossy] skipping 1147
52673 Sep 22 23:15:55.071 DEBG Flush :1147 extent_limit None deps:[JobId(1146), JobId(1145)] res:true f:63 g:1
52674 Sep 22 23:15:55.071 INFO [lossy] sleeping 1 second
52675 Sep 22 23:15:55.119 DEBG IO Read 1150 has deps [JobId(1149)]
52676 Sep 22 23:15:55.449 DEBG [rc] retire 1147 clears [JobId(1146), JobId(1147)], : downstairs
52677 Sep 22 23:15:55.572 DEBG IO Flush 1151 has deps [JobId(1150), JobId(1149)]
52678 Sep 22 23:15:55.612 INFO [lossy] skipping 1148
52679 Sep 22 23:15:55.612 WARN 1149 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52680 Sep 22 23:15:55.618 DEBG Read :1148 deps:[JobId(1147)] res:true
52681 Sep 22 23:15:55.640 WARN returning error on flush!
52682 Sep 22 23:15:55.641 DEBG Flush :1149 extent_limit None deps:[JobId(1148), JobId(1147)] res:false f:64 g:1
52683 Sep 22 23:15:55.641 INFO [lossy] skipping 1151
52684 Sep 22 23:15:55.641 DEBG Flush :1149 extent_limit None deps:[JobId(1148), JobId(1147)] res:true f:64 g:1
52685 Sep 22 23:15:55.641 INFO [lossy] skipping 1151
52686 Sep 22 23:15:55.641 WARN 1151 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52687 Sep 22 23:15:55.641 INFO [lossy] skipping 1150
52688 Sep 22 23:15:55.646 DEBG Read :1150 deps:[JobId(1149)] res:true
52689 Sep 22 23:15:55.715 WARN returning error on read!
52690 Sep 22 23:15:55.715 DEBG Read :1150 deps:[JobId(1149)] res:false
52691 Sep 22 23:15:55.722 DEBG Read :1150 deps:[JobId(1149)] res:true
52692 Sep 22 23:15:56.073 ERRO [1] job id 1149 saw error GenericError("test error")
52693 Sep 22 23:15:56.073 DEBG up_ds_listen was notified
52694 Sep 22 23:15:56.073 DEBG up_ds_listen process 1149
52695 Sep 22 23:15:56.073 DEBG [A] ack job 1149:150, : downstairs
52696 Sep 22 23:15:56.073 DEBG up_ds_listen checked 1 jobs, back to waiting
52697 Sep 22 23:15:56.074 INFO [lossy] skipping 1148
52698 Sep 22 23:15:56.074 INFO [lossy] skipping 1149
52699 Sep 22 23:15:56.074 INFO [lossy] skipping 1151
52700 Sep 22 23:15:56.074 INFO [lossy] skipping 1148
52701 Sep 22 23:15:56.074 WARN 1149 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52702 Sep 22 23:15:56.080 DEBG Read :1148 deps:[JobId(1147)] res:true
52703 Sep 22 23:15:56.101 ERRO [0] job id 1150 saw error GenericError("test error")
52704 Sep 22 23:15:56.104 INFO [lossy] sleeping 1 second
52705 Sep 22 23:15:56.152 INFO [lossy] skipping 1151
52706 Sep 22 23:15:56.152 INFO [lossy] skipping 1151
52707 Sep 22 23:15:56.152 INFO [lossy] skipping 1151
52708 Sep 22 23:15:56.152 DEBG Flush :1151 extent_limit None deps:[JobId(1150), JobId(1149)] res:true f:65 g:1
52709 Sep 22 23:15:56.152 INFO [lossy] sleeping 1 second
52710 Sep 22 23:15:56.480 DEBG [1] Read AckReady 1150, : downstairs
52711 Sep 22 23:15:56.480 DEBG up_ds_listen was notified
52712 Sep 22 23:15:56.480 DEBG up_ds_listen process 1150
52713 Sep 22 23:15:56.481 DEBG [A] ack job 1150:151, : downstairs
52714 Sep 22 23:15:56.533 DEBG up_ds_listen checked 1 jobs, back to waiting
52715 Sep 22 23:15:56.582 INFO [lossy] skipping 1149
52716 Sep 22 23:15:56.582 DEBG Flush :1149 extent_limit None deps:[JobId(1148), JobId(1147)] res:true f:64 g:1
52717 Sep 22 23:15:56.588 DEBG Read :1150 deps:[JobId(1149)] res:true
52718 Sep 22 23:15:56.987 DEBG IO Read 1152 has deps [JobId(1151)]
52719 Sep 22 23:15:57.317 DEBG [rc] retire 1149 clears [JobId(1148), JobId(1149)], : downstairs
52720 Sep 22 23:15:57.329 DEBG Flush :1151 extent_limit None deps:[JobId(1150), JobId(1149)] res:true f:65 g:1
52721 Sep 22 23:15:57.329 INFO [lossy] skipping 1152
52722 Sep 22 23:15:57.329 WARN returning error on read!
52723 Sep 22 23:15:57.329 DEBG Read :1152 deps:[JobId(1151)] res:false
52724 Sep 22 23:15:57.329 INFO [lossy] skipping 1152
52725 Sep 22 23:15:57.329 INFO [lossy] skipping 1152
52726 Sep 22 23:15:57.335 DEBG Read :1152 deps:[JobId(1151)] res:true
52727 Sep 22 23:15:57.356 WARN returning error on read!
52728 Sep 22 23:15:57.356 DEBG Read :1152 deps:[JobId(1151)] res:false
52729 Sep 22 23:15:57.362 DEBG Read :1152 deps:[JobId(1151)] res:true
52730 Sep 22 23:15:57.384 ERRO [0] job id 1152 saw error GenericError("test error")
52731 Sep 22 23:15:57.384 ERRO [1] job id 1152 saw error GenericError("test error")
52732 Sep 22 23:15:57.384 DEBG up_ds_listen was notified
52733 Sep 22 23:15:57.384 DEBG up_ds_listen process 1151
52734 Sep 22 23:15:57.384 DEBG [A] ack job 1151:152, : downstairs
52735 Sep 22 23:15:57.384 DEBG up_ds_listen checked 1 jobs, back to waiting
52736 Sep 22 23:15:57.386 DEBG Flush :1151 extent_limit None deps:[JobId(1150), JobId(1149)] res:true f:65 g:1
52737 Sep 22 23:15:57.392 DEBG Read :1152 deps:[JobId(1151)] res:true
52738 Sep 22 23:15:57.792 DEBG [rc] retire 1151 clears [JobId(1150), JobId(1151)], : downstairs
52739 Sep 22 23:15:57.795 DEBG IO Flush 1153 has deps [JobId(1152)]
52740 Sep 22 23:15:58.218 DEBG [0] Read AckReady 1152, : downstairs
52741 Sep 22 23:15:58.548 DEBG [1] Read already AckReady 1152, : downstairs
52742 Sep 22 23:15:58.549 DEBG up_ds_listen was notified
52743 Sep 22 23:15:58.549 DEBG up_ds_listen process 1152
52744 Sep 22 23:15:58.549 DEBG [A] ack job 1152:153, : downstairs
52745 Sep 22 23:15:58.602 DEBG up_ds_listen checked 1 jobs, back to waiting
52746 Sep 22 23:15:58.602 INFO [lossy] skipping 1153
52747 Sep 22 23:15:58.602 DEBG Flush :1153 extent_limit None deps:[JobId(1152)] res:true f:66 g:1
52748 Sep 22 23:15:58.602 INFO [lossy] skipping 1153
52749 Sep 22 23:15:58.602 DEBG Flush :1153 extent_limit None deps:[JobId(1152)] res:true f:66 g:1
52750 Sep 22 23:15:58.604 INFO [lossy] sleeping 1 second
52751 Sep 22 23:15:58.604 DEBG up_ds_listen was notified
52752 Sep 22 23:15:58.604 DEBG up_ds_listen process 1153
52753 Sep 22 23:15:58.604 DEBG [A] ack job 1153:154, : downstairs
52754 Sep 22 23:15:58.604 DEBG up_ds_listen checked 1 jobs, back to waiting
52755 Sep 22 23:15:58.604 DEBG IO Read 1154 has deps [JobId(1153)]
52756 Sep 22 23:15:58.616 INFO [lossy] skipping 1154
52757 Sep 22 23:15:58.616 WARN returning error on read!
52758 Sep 22 23:15:58.616 DEBG Read :1154 deps:[JobId(1153)] res:false
52759 Sep 22 23:15:58.622 DEBG Read :1154 deps:[JobId(1153)] res:true
52760 Sep 22 23:15:58.643 ERRO [0] job id 1154 saw error GenericError("test error")
52761 Sep 22 23:15:58.645 INFO [lossy] sleeping 1 second
52762 Sep 22 23:15:59.398 DEBG [0] Read AckReady 1154, : downstairs
52763 Sep 22 23:15:59.398 DEBG up_ds_listen was notified
52764 Sep 22 23:15:59.399 DEBG up_ds_listen process 1154
52765 Sep 22 23:15:59.399 DEBG [A] ack job 1154:155, : downstairs
52766 Sep 22 23:15:59.451 DEBG up_ds_listen checked 1 jobs, back to waiting
52767 Sep 22 23:15:59.452 DEBG IO Flush 1155 has deps [JobId(1154), JobId(1153)]
52768 Sep 22 23:15:59.453 DEBG IO Read 1156 has deps [JobId(1155)]
52769 Sep 22 23:15:59.458 DEBG Flush :1155 extent_limit None deps:[JobId(1154), JobId(1153)] res:true f:67 g:1
52770 Sep 22 23:15:59.465 INFO [lossy] sleeping 1 second
52771 Sep 22 23:15:59.610 DEBG Read :1154 deps:[JobId(1153)] res:true
52772 Sep 22 23:15:59.633 INFO [lossy] skipping 1155
52773 Sep 22 23:15:59.633 INFO [lossy] skipping 1156
52774 Sep 22 23:15:59.633 DEBG Flush :1155 extent_limit None deps:[JobId(1154), JobId(1153)] res:true f:67 g:1
52775 Sep 22 23:15:59.638 DEBG Read :1156 deps:[JobId(1155)] res:true
52776 Sep 22 23:15:59.707 WARN returning error on flush!
52777 Sep 22 23:15:59.707 DEBG Flush :1153 extent_limit None deps:[JobId(1152)] res:false f:66 g:1
52778 Sep 22 23:15:59.707 INFO [lossy] skipping 1156
52779 Sep 22 23:15:59.707 DEBG Flush :1153 extent_limit None deps:[JobId(1152)] res:true f:66 g:1
52780 Sep 22 23:15:59.707 WARN returning error on read!
52781 Sep 22 23:15:59.707 DEBG Read :1154 deps:[JobId(1153)] res:false
52782 Sep 22 23:15:59.707 WARN 1155 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52783 Sep 22 23:15:59.707 WARN returning error on read!
52784 Sep 22 23:15:59.707 DEBG Read :1154 deps:[JobId(1153)] res:false
52785 Sep 22 23:15:59.714 DEBG Read :1154 deps:[JobId(1153)] res:true
52786 Sep 22 23:16:00.066 DEBG up_ds_listen was notified
52787 Sep 22 23:16:00.066 DEBG up_ds_listen process 1155
52788 Sep 22 23:16:00.066 DEBG [A] ack job 1155:156, : downstairs
52789 Sep 22 23:16:00.066 DEBG up_ds_listen checked 1 jobs, back to waiting
52790 Sep 22 23:16:00.066 DEBG IO Flush 1157 has deps [JobId(1156), JobId(1155)]
52791 Sep 22 23:16:00.066 ERRO [2] job id 1153 saw error GenericError("test error")
52792 Sep 22 23:16:00.066 DEBG [rc] retire 1153 clears [JobId(1152), JobId(1153)], : downstairs
52793 Sep 22 23:16:00.066 ERRO [2] job id 1154 saw error GenericError("test error")
52794 Sep 22 23:16:00.066 ERRO [2] job id 1154 saw error GenericError("test error")
52795 Sep 22 23:16:00.069 INFO [lossy] skipping 1157
52796 Sep 22 23:16:00.069 INFO [lossy] skipping 1157
52797 Sep 22 23:16:00.070 DEBG Flush :1157 extent_limit None deps:[JobId(1156), JobId(1155)] res:true f:68 g:1
52798 Sep 22 23:16:00.118 DEBG Flush :1155 extent_limit None deps:[JobId(1154), JobId(1153)] res:true f:67 g:1
52799 Sep 22 23:16:00.118 WARN returning error on read!
52800 Sep 22 23:16:00.118 DEBG Read :1156 deps:[JobId(1155)] res:false
52801 Sep 22 23:16:00.118 WARN returning error on read!
52802 Sep 22 23:16:00.118 DEBG Read :1156 deps:[JobId(1155)] res:false
52803 Sep 22 23:16:00.124 DEBG Read :1156 deps:[JobId(1155)] res:true
52804 Sep 22 23:16:00.473 DEBG [1] Read AckReady 1156, : downstairs
52805 Sep 22 23:16:00.473 DEBG up_ds_listen was notified
52806 Sep 22 23:16:00.474 DEBG up_ds_listen process 1156
52807 Sep 22 23:16:00.474 DEBG [A] ack job 1156:157, : downstairs
52808 Sep 22 23:16:00.526 DEBG up_ds_listen checked 1 jobs, back to waiting
52809 Sep 22 23:16:00.574 INFO [lossy] skipping 1156
52810 Sep 22 23:16:00.580 DEBG Read :1156 deps:[JobId(1155)] res:true
52811 Sep 22 23:16:00.932 DEBG [rc] retire 1155 clears [JobId(1154), JobId(1155)], : downstairs
52812 Sep 22 23:16:00.932 ERRO [2] job id 1156 saw error GenericError("test error")
52813 Sep 22 23:16:00.932 ERRO [2] job id 1156 saw error GenericError("test error")
52814 Sep 22 23:16:00.933 DEBG IO Read 1158 has deps [JobId(1157)]
52815 Sep 22 23:16:00.933 DEBG IO Flush 1159 has deps [JobId(1158), JobId(1157)]
52816 Sep 22 23:16:00.945 INFO [lossy] skipping 1158
52817 Sep 22 23:16:00.945 INFO [lossy] skipping 1158
52818 Sep 22 23:16:00.951 DEBG Read :1158 deps:[JobId(1157)] res:true
52819 Sep 22 23:16:00.975 INFO [lossy] sleeping 1 second
52820 Sep 22 23:16:01.023 DEBG Flush :1157 extent_limit None deps:[JobId(1156), JobId(1155)] res:true f:68 g:1
52821 Sep 22 23:16:01.023 WARN returning error on read!
52822 Sep 22 23:16:01.023 DEBG Read :1158 deps:[JobId(1157)] res:false
52823 Sep 22 23:16:01.023 INFO [lossy] skipping 1158
52824 Sep 22 23:16:01.023 INFO [lossy] skipping 1158
52825 Sep 22 23:16:01.023 INFO [lossy] skipping 1158
52826 Sep 22 23:16:01.029 DEBG Read :1158 deps:[JobId(1157)] res:true
52827 Sep 22 23:16:01.756 ERRO [0] job id 1158 saw error GenericError("test error")
52828 Sep 22 23:16:01.756 DEBG up_ds_listen was notified
52829 Sep 22 23:16:01.756 DEBG up_ds_listen process 1157
52830 Sep 22 23:16:01.756 DEBG [A] ack job 1157:158, : downstairs
52831 Sep 22 23:16:01.756 DEBG up_ds_listen checked 1 jobs, back to waiting
52832 Sep 22 23:16:01.758 DEBG Flush :1159 extent_limit None deps:[JobId(1158), JobId(1157)] res:true f:69 g:1
52833 Sep 22 23:16:02.135 DEBG [1] Read AckReady 1158, : downstairs
52834 Sep 22 23:16:02.136 DEBG up_ds_listen was notified
52835 Sep 22 23:16:02.136 DEBG up_ds_listen process 1158
52836 Sep 22 23:16:02.136 DEBG [A] ack job 1158:159, : downstairs
52837 Sep 22 23:16:02.189 DEBG up_ds_listen checked 1 jobs, back to waiting
52838 Sep 22 23:16:02.190 DEBG Flush :1157 extent_limit None deps:[JobId(1156), JobId(1155)] res:true f:68 g:1
52839 Sep 22 23:16:02.190 INFO [lossy] skipping 1158
52840 Sep 22 23:16:02.190 WARN returning error on read!
52841 Sep 22 23:16:02.190 DEBG Read :1158 deps:[JobId(1157)] res:false
52842 Sep 22 23:16:02.196 DEBG Read :1158 deps:[JobId(1157)] res:true
52843 Sep 22 23:16:02.218 DEBG IO Read 1160 has deps [JobId(1159)]
52844 Sep 22 23:16:02.219 DEBG [rc] retire 1157 clears [JobId(1156), JobId(1157)], : downstairs
52845 Sep 22 23:16:02.219 ERRO [2] job id 1158 saw error GenericError("test error")
52846 Sep 22 23:16:02.231 INFO [lossy] skipping 1160
52847 Sep 22 23:16:02.237 DEBG Read :1160 deps:[JobId(1159)] res:true
52848 Sep 22 23:16:02.259 DEBG IO Flush 1161 has deps [JobId(1160), JobId(1159)]
52849 Sep 22 23:16:02.261 DEBG Flush :1159 extent_limit None deps:[JobId(1158), JobId(1157)] res:true f:69 g:1
52850 Sep 22 23:16:02.267 DEBG Read :1160 deps:[JobId(1159)] res:true
52851 Sep 22 23:16:02.668 DEBG up_ds_listen was notified
52852 Sep 22 23:16:02.668 DEBG up_ds_listen process 1159
52853 Sep 22 23:16:02.668 DEBG [A] ack job 1159:160, : downstairs
52854 Sep 22 23:16:02.668 DEBG up_ds_listen checked 1 jobs, back to waiting
52855 Sep 22 23:16:02.670 INFO [lossy] sleeping 1 second
52856 Sep 22 23:16:03.048 DEBG Flush :1161 extent_limit None deps:[JobId(1160), JobId(1159)] res:true f:70 g:1
52857 Sep 22 23:16:03.424 DEBG [1] Read AckReady 1160, : downstairs
52858 Sep 22 23:16:03.425 DEBG up_ds_listen was notified
52859 Sep 22 23:16:03.425 DEBG up_ds_listen process 1160
52860 Sep 22 23:16:03.425 DEBG [A] ack job 1160:161, : downstairs
52861 Sep 22 23:16:03.477 DEBG up_ds_listen checked 1 jobs, back to waiting
52862 Sep 22 23:16:03.479 DEBG IO Read 1162 has deps [JobId(1161)]
52863 Sep 22 23:16:03.497 DEBG Read :1162 deps:[JobId(1161)] res:true
52864 Sep 22 23:16:03.519 INFO [lossy] skipping 1161
52865 Sep 22 23:16:03.519 DEBG Flush :1161 extent_limit None deps:[JobId(1160), JobId(1159)] res:true f:70 g:1
52866 Sep 22 23:16:03.519 WARN returning error on read!
52867 Sep 22 23:16:03.519 DEBG Read :1162 deps:[JobId(1161)] res:false
52868 Sep 22 23:16:03.520 WARN returning error on read!
52869 Sep 22 23:16:03.520 DEBG Read :1162 deps:[JobId(1161)] res:false
52870 Sep 22 23:16:03.520 INFO [lossy] skipping 1162
52871 Sep 22 23:16:03.520 WARN returning error on read!
52872 Sep 22 23:16:03.520 DEBG Read :1162 deps:[JobId(1161)] res:false
52873 Sep 22 23:16:03.525 DEBG Read :1162 deps:[JobId(1161)] res:true
52874 Sep 22 23:16:03.924 ERRO [0] job id 1162 saw error GenericError("test error")
52875 Sep 22 23:16:03.924 ERRO [0] job id 1162 saw error GenericError("test error")
52876 Sep 22 23:16:03.924 ERRO [0] job id 1162 saw error GenericError("test error")
52877 Sep 22 23:16:03.924 DEBG up_ds_listen was notified
52878 Sep 22 23:16:03.924 DEBG up_ds_listen process 1161
52879 Sep 22 23:16:03.924 DEBG [A] ack job 1161:162, : downstairs
52880 Sep 22 23:16:03.924 DEBG up_ds_listen checked 1 jobs, back to waiting
52881 Sep 22 23:16:03.924 DEBG IO Flush 1163 has deps [JobId(1162), JobId(1161)]
52882 Sep 22 23:16:03.924 WARN returning error on flush!
52883 Sep 22 23:16:03.924 DEBG Flush :1159 extent_limit None deps:[JobId(1158), JobId(1157)] res:false f:69 g:1
52884 Sep 22 23:16:03.924 INFO [lossy] skipping 1160
52885 Sep 22 23:16:03.924 INFO [lossy] skipping 1159
52886 Sep 22 23:16:03.924 DEBG Flush :1159 extent_limit None deps:[JobId(1158), JobId(1157)] res:true f:69 g:1
52887 Sep 22 23:16:03.930 DEBG Read :1160 deps:[JobId(1159)] res:true
52888 Sep 22 23:16:03.952 ERRO [2] job id 1159 saw error GenericError("test error")
52889 Sep 22 23:16:03.952 DEBG [rc] retire 1159 clears [JobId(1158), JobId(1159)], : downstairs
52890 Sep 22 23:16:03.953 INFO [lossy] skipping 1163
52891 Sep 22 23:16:03.953 WARN returning error on flush!
52892 Sep 22 23:16:03.953 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:false f:71 g:1
52893 Sep 22 23:16:03.953 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:true f:71 g:1
52894 Sep 22 23:16:04.005 DEBG [1] Read already AckReady 1000, : downstairs
52895 Sep 22 23:16:04.330 DEBG [1] Read AckReady 1162, : downstairs
52896 Sep 22 23:16:04.330 ERRO [1] job id 1163 saw error GenericError("test error")
52897 Sep 22 23:16:04.330 DEBG up_ds_listen was notified
52898 Sep 22 23:16:04.330 DEBG up_ds_listen process 1162
52899 Sep 22 23:16:04.331 DEBG [A] ack job 1162:163, : downstairs
52900 Sep 22 23:16:04.383 DEBG up_ds_listen checked 1 jobs, back to waiting
52901 Sep 22 23:16:04.385 DEBG IO Read 1164 has deps [JobId(1163)]
52902 Sep 22 23:16:04.403 DEBG Read :1164 deps:[JobId(1163)] res:true
52903 Sep 22 23:16:04.425 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:true f:71 g:1
52904 Sep 22 23:16:04.426 INFO [lossy] skipping 1164
52905 Sep 22 23:16:04.426 WARN returning error on read!
52906 Sep 22 23:16:04.426 DEBG Read :1164 deps:[JobId(1163)] res:false
52907 Sep 22 23:16:04.426 WARN returning error on read!
52908 Sep 22 23:16:04.426 DEBG Read :1164 deps:[JobId(1163)] res:false
52909 Sep 22 23:16:04.431 DEBG Read :1164 deps:[JobId(1163)] res:true
52910 Sep 22 23:16:04.453 DEBG IO Flush 1165 has deps [JobId(1164), JobId(1163)]
52911 Sep 22 23:16:04.831 ERRO [0] job id 1164 saw error GenericError("test error")
52912 Sep 22 23:16:04.831 ERRO [0] job id 1164 saw error GenericError("test error")
52913 Sep 22 23:16:04.831 DEBG up_ds_listen was notified
52914 Sep 22 23:16:04.831 DEBG up_ds_listen process 1163
52915 Sep 22 23:16:04.831 DEBG [A] ack job 1163:164, : downstairs
52916 Sep 22 23:16:04.831 DEBG up_ds_listen checked 1 jobs, back to waiting
52917 Sep 22 23:16:04.833 DEBG Flush :1161 extent_limit None deps:[JobId(1160), JobId(1159)] res:true f:70 g:1
52918 Sep 22 23:16:04.839 DEBG Read :1162 deps:[JobId(1161)] res:true
52919 Sep 22 23:16:05.238 DEBG [rc] retire 1161 clears [JobId(1160), JobId(1161)], : downstairs
52920 Sep 22 23:16:05.240 DEBG Flush :1165 extent_limit None deps:[JobId(1164), JobId(1163)] res:true f:72 g:1
52921 Sep 22 23:16:05.616 DEBG [1] Read AckReady 1164, : downstairs
52922 Sep 22 23:16:05.617 DEBG up_ds_listen was notified
52923 Sep 22 23:16:05.617 DEBG up_ds_listen process 1164
52924 Sep 22 23:16:05.617 DEBG [A] ack job 1164:165, : downstairs
52925 Sep 22 23:16:05.669 DEBG up_ds_listen checked 1 jobs, back to waiting
52926 Sep 22 23:16:07.770 DEBG IO Write 1166 has deps [JobId(1165), JobId(1163)]
52927 Sep 22 23:16:07.770 DEBG up_ds_listen was notified
52928 Sep 22 23:16:07.770 DEBG up_ds_listen process 1166
52929 Sep 22 23:16:07.770 DEBG [A] ack job 1166:167, : downstairs
52930 Sep 22 23:16:07.770 DEBG up_ds_listen checked 1 jobs, back to waiting
52931 Sep 22 23:16:07.834 DEBG Flush :1165 extent_limit None deps:[JobId(1164), JobId(1163)] res:true f:72 g:1
52932 Sep 22 23:16:07.834 INFO [lossy] sleeping 1 second
52933 Sep 22 23:16:08.163 DEBG IO Write 1167 has deps [JobId(1165), JobId(1163)]
52934 Sep 22 23:16:08.163 DEBG up_ds_listen was notified
52935 Sep 22 23:16:08.163 DEBG up_ds_listen process 1167
52936 Sep 22 23:16:08.163 DEBG [A] ack job 1167:168, : downstairs
52937 Sep 22 23:16:08.163 DEBG up_ds_listen checked 1 jobs, back to waiting
52938 Sep 22 23:16:08.164 DEBG IO Flush 1168 has deps [JobId(1167), JobId(1166), JobId(1165)]
52939 Sep 22 23:16:08.493 DEBG IO Write 1169 has deps [JobId(1168), JobId(1165), JobId(1163)]
52940 Sep 22 23:16:08.493 DEBG up_ds_listen was notified
52941 Sep 22 23:16:08.493 DEBG up_ds_listen process 1169
52942 Sep 22 23:16:08.493 DEBG [A] ack job 1169:170, : downstairs
52943 Sep 22 23:16:08.493 DEBG up_ds_listen checked 1 jobs, back to waiting
52944 Sep 22 23:16:08.822 DEBG IO Write 1170 has deps [JobId(1168), JobId(1165), JobId(1163)]
52945 Sep 22 23:16:08.822 DEBG up_ds_listen was notified
52946 Sep 22 23:16:08.822 DEBG up_ds_listen process 1170
52947 Sep 22 23:16:08.823 DEBG [A] ack job 1170:171, : downstairs
52948 Sep 22 23:16:08.823 DEBG up_ds_listen checked 1 jobs, back to waiting
52949 Sep 22 23:16:08.823 DEBG IO Flush 1171 has deps [JobId(1170), JobId(1169), JobId(1168)]
52950 Sep 22 23:16:09.152 DEBG IO Write 1172 has deps [JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52951 Sep 22 23:16:09.152 DEBG up_ds_listen was notified
52952 Sep 22 23:16:09.152 DEBG up_ds_listen process 1172
52953 Sep 22 23:16:09.153 DEBG [A] ack job 1172:173, : downstairs
52954 Sep 22 23:16:09.153 DEBG up_ds_listen checked 1 jobs, back to waiting
52955 Sep 22 23:16:09.153 INFO [lossy] sleeping 1 second
52956 Sep 22 23:16:09.482 DEBG IO Write 1173 has deps [JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52957 Sep 22 23:16:09.482 DEBG up_ds_listen was notified
52958 Sep 22 23:16:09.482 DEBG up_ds_listen process 1173
52959 Sep 22 23:16:09.482 DEBG [A] ack job 1173:174, : downstairs
52960 Sep 22 23:16:09.483 DEBG up_ds_listen checked 1 jobs, back to waiting
52961 Sep 22 23:16:09.483 DEBG IO Flush 1174 has deps [JobId(1173), JobId(1172), JobId(1171)]
52962 Sep 22 23:16:09.812 DEBG IO Write 1175 has deps [JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52963 Sep 22 23:16:09.812 DEBG up_ds_listen was notified
52964 Sep 22 23:16:09.812 DEBG up_ds_listen process 1175
52965 Sep 22 23:16:09.812 DEBG [A] ack job 1175:176, : downstairs
52966 Sep 22 23:16:09.812 DEBG up_ds_listen checked 1 jobs, back to waiting
52967 Sep 22 23:16:10.142 DEBG IO Write 1176 has deps [JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52968 Sep 22 23:16:10.142 DEBG up_ds_listen was notified
52969 Sep 22 23:16:10.142 DEBG up_ds_listen process 1176
52970 Sep 22 23:16:10.142 DEBG [A] ack job 1176:177, : downstairs
52971 Sep 22 23:16:10.142 DEBG up_ds_listen checked 1 jobs, back to waiting
52972 Sep 22 23:16:10.142 DEBG IO Flush 1177 has deps [JobId(1176), JobId(1175), JobId(1174)]
52973 Sep 22 23:16:10.471 DEBG IO Write 1178 has deps [JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52974 Sep 22 23:16:10.472 DEBG up_ds_listen was notified
52975 Sep 22 23:16:10.472 DEBG up_ds_listen process 1178
52976 Sep 22 23:16:10.472 DEBG [A] ack job 1178:179, : downstairs
52977 Sep 22 23:16:10.472 DEBG up_ds_listen checked 1 jobs, back to waiting
52978 Sep 22 23:16:10.911 DEBG IO Write 1179 has deps [JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
52979 Sep 22 23:16:11.241 DEBG up_ds_listen was notified
52980 Sep 22 23:16:11.241 DEBG up_ds_listen process 1165
52981 Sep 22 23:16:11.241 DEBG [A] ack job 1165:166, : downstairs
52982 Sep 22 23:16:11.242 DEBG up_ds_listen process 1179
52983 Sep 22 23:16:11.242 DEBG [A] ack job 1179:180, : downstairs
52984 Sep 22 23:16:11.242 DEBG up_ds_listen checked 2 jobs, back to waiting
52985 Sep 22 23:16:11.242 DEBG up_ds_listen was notified
52986 Sep 22 23:16:11.242 DEBG up_ds_listen checked 0 jobs, back to waiting
52987 Sep 22 23:16:11.289 WARN returning error on flush!
52988 Sep 22 23:16:11.289 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:false f:71 g:1
52989 Sep 22 23:16:11.289 WARN returning error on flush!
52990 Sep 22 23:16:11.289 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:false f:71 g:1
52991 Sep 22 23:16:11.289 DEBG Flush :1163 extent_limit None deps:[JobId(1162), JobId(1161)] res:true f:71 g:1
52992 Sep 22 23:16:11.290 WARN returning error on read!
52993 Sep 22 23:16:11.290 DEBG Read :1164 deps:[JobId(1163)] res:false
52994 Sep 22 23:16:11.290 WARN 1165 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
52995 Sep 22 23:16:11.295 DEBG Read :1164 deps:[JobId(1163)] res:true
52996 Sep 22 23:16:11.409 DEBG IO Flush 1180 has deps [JobId(1179), JobId(1178), JobId(1177)]
52997 Sep 22 23:16:11.409 INFO [lossy] sleeping 1 second
52998 Sep 22 23:16:11.411 WARN returning error on write!
52999 Sep 22 23:16:11.411 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:false
53000 Sep 22 23:16:11.440 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:true
53001 Sep 22 23:16:11.770 DEBG IO Write 1181 has deps [JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53002 Sep 22 23:16:11.770 DEBG up_ds_listen was notified
53003 Sep 22 23:16:11.770 DEBG up_ds_listen process 1181
53004 Sep 22 23:16:11.770 DEBG [A] ack job 1181:182, : downstairs
53005 Sep 22 23:16:11.770 DEBG up_ds_listen checked 1 jobs, back to waiting
53006 Sep 22 23:16:12.099 DEBG IO Write 1182 has deps [JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53007 Sep 22 23:16:12.100 DEBG up_ds_listen was notified
53008 Sep 22 23:16:12.100 DEBG up_ds_listen process 1182
53009 Sep 22 23:16:12.100 DEBG [A] ack job 1182:183, : downstairs
53010 Sep 22 23:16:12.100 DEBG up_ds_listen checked 1 jobs, back to waiting
53011 Sep 22 23:16:12.100 DEBG IO Flush 1183 has deps [JobId(1182), JobId(1181), JobId(1180)]
53012 Sep 22 23:16:12.430 DEBG IO Write 1184 has deps [JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53013 Sep 22 23:16:12.430 DEBG up_ds_listen was notified
53014 Sep 22 23:16:12.430 DEBG up_ds_listen process 1184
53015 Sep 22 23:16:12.430 DEBG [A] ack job 1184:185, : downstairs
53016 Sep 22 23:16:12.430 DEBG up_ds_listen checked 1 jobs, back to waiting
53017 Sep 22 23:16:12.432 WARN returning error on write!
53018 Sep 22 23:16:12.432 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:false
53019 Sep 22 23:16:12.433 WARN returning error on write!
53020 Sep 22 23:16:12.433 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:false
53021 Sep 22 23:16:12.433 INFO [lossy] skipping 1166
53022 Sep 22 23:16:12.433 INFO [lossy] skipping 1166
53023 Sep 22 23:16:12.463 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:true
53024 Sep 22 23:16:12.793 DEBG IO Write 1185 has deps [JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53025 Sep 22 23:16:12.793 DEBG up_ds_listen was notified
53026 Sep 22 23:16:12.793 DEBG up_ds_listen process 1185
53027 Sep 22 23:16:12.793 DEBG [A] ack job 1185:186, : downstairs
53028 Sep 22 23:16:12.793 DEBG up_ds_listen checked 1 jobs, back to waiting
53029 Sep 22 23:16:12.793 DEBG IO Flush 1186 has deps [JobId(1185), JobId(1184), JobId(1183)]
53030 Sep 22 23:16:13.123 DEBG IO Write 1187 has deps [JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53031 Sep 22 23:16:13.123 DEBG up_ds_listen was notified
53032 Sep 22 23:16:13.123 DEBG up_ds_listen process 1187
53033 Sep 22 23:16:13.123 DEBG [A] ack job 1187:188, : downstairs
53034 Sep 22 23:16:13.123 DEBG up_ds_listen checked 1 jobs, back to waiting
53035 Sep 22 23:16:13.453 DEBG IO Write 1188 has deps [JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53036 Sep 22 23:16:13.453 DEBG up_ds_listen was notified
53037 Sep 22 23:16:13.453 DEBG up_ds_listen process 1188
53038 Sep 22 23:16:13.453 DEBG [A] ack job 1188:189, : downstairs
53039 Sep 22 23:16:13.453 DEBG up_ds_listen checked 1 jobs, back to waiting
53040 Sep 22 23:16:13.454 DEBG IO Flush 1189 has deps [JobId(1188), JobId(1187), JobId(1186)]
53041 Sep 22 23:16:13.783 DEBG IO Write 1190 has deps [JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53042 Sep 22 23:16:13.783 DEBG up_ds_listen was notified
53043 Sep 22 23:16:13.783 DEBG up_ds_listen process 1190
53044 Sep 22 23:16:13.783 DEBG [A] ack job 1190:191, : downstairs
53045 Sep 22 23:16:13.783 DEBG up_ds_listen checked 1 jobs, back to waiting
53046 Sep 22 23:16:14.113 DEBG IO Write 1191 has deps [JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53047 Sep 22 23:16:14.113 DEBG up_ds_listen was notified
53048 Sep 22 23:16:14.113 DEBG up_ds_listen process 1191
53049 Sep 22 23:16:14.113 DEBG [A] ack job 1191:192, : downstairs
53050 Sep 22 23:16:14.113 DEBG up_ds_listen checked 1 jobs, back to waiting
53051 Sep 22 23:16:14.114 DEBG IO Flush 1192 has deps [JobId(1191), JobId(1190), JobId(1189)]
53052 Sep 22 23:16:14.505 DEBG IO Write 1193 has deps [JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53053 Sep 22 23:16:14.505 DEBG up_ds_listen was notified
53054 Sep 22 23:16:14.505 DEBG up_ds_listen process 1193
53055 Sep 22 23:16:14.505 DEBG [A] ack job 1193:194, : downstairs
53056 Sep 22 23:16:14.505 DEBG up_ds_listen checked 1 jobs, back to waiting
53057 Sep 22 23:16:14.645 INFO [lossy] sleeping 1 second
53058 Sep 22 23:16:14.646 WARN returning error on write!
53059 Sep 22 23:16:14.646 DEBG Write :1167 deps:[JobId(1165), JobId(1163)] res:false
53060 Sep 22 23:16:14.677 DEBG Write :1167 deps:[JobId(1165), JobId(1163)] res:true
53061 Sep 22 23:16:14.678 INFO [lossy] sleeping 1 second
53062 Sep 22 23:16:15.007 DEBG IO Write 1194 has deps [JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53063 Sep 22 23:16:15.007 DEBG IO Flush 1195 has deps [JobId(1194), JobId(1193), JobId(1192)]
53064 Sep 22 23:16:15.007 DEBG up_ds_listen was notified
53065 Sep 22 23:16:15.007 DEBG up_ds_listen process 1194
53066 Sep 22 23:16:15.007 DEBG [A] ack job 1194:195, : downstairs
53067 Sep 22 23:16:15.007 DEBG up_ds_listen checked 1 jobs, back to waiting
53068 Sep 22 23:16:15.337 DEBG IO Write 1196 has deps [JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53069 Sep 22 23:16:15.337 DEBG up_ds_listen was notified
53070 Sep 22 23:16:15.337 DEBG up_ds_listen process 1196
53071 Sep 22 23:16:15.337 DEBG [A] ack job 1196:197, : downstairs
53072 Sep 22 23:16:15.337 DEBG up_ds_listen checked 1 jobs, back to waiting
53073 Sep 22 23:16:15.667 DEBG IO Write 1197 has deps [JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53074 Sep 22 23:16:15.667 DEBG up_ds_listen was notified
53075 Sep 22 23:16:15.667 DEBG up_ds_listen process 1197
53076 Sep 22 23:16:15.667 DEBG [A] ack job 1197:198, : downstairs
53077 Sep 22 23:16:15.667 DEBG up_ds_listen checked 1 jobs, back to waiting
53078 Sep 22 23:16:15.668 DEBG IO Flush 1198 has deps [JobId(1197), JobId(1196), JobId(1195)]
53079 Sep 22 23:16:15.668 INFO [lossy] skipping 1167
53080 Sep 22 23:16:15.668 INFO [lossy] skipping 1168
53081 Sep 22 23:16:15.698 DEBG Write :1167 deps:[JobId(1165), JobId(1163)] res:true
53082 Sep 22 23:16:15.707 DEBG Flush :1168 extent_limit None deps:[JobId(1167), JobId(1166), JobId(1165)] res:true f:73 g:1
53083 Sep 22 23:16:15.707 INFO [lossy] sleeping 1 second
53084 Sep 22 23:16:16.035 DEBG IO Write 1199 has deps [JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53085 Sep 22 23:16:16.044 DEBG up_ds_listen was notified
53086 Sep 22 23:16:16.044 DEBG up_ds_listen process 1199
53087 Sep 22 23:16:16.044 DEBG [A] ack job 1199:200, : downstairs
53088 Sep 22 23:16:16.044 DEBG up_ds_listen checked 1 jobs, back to waiting
53089 Sep 22 23:16:16.044 DEBG Flush :1168 extent_limit None deps:[JobId(1167), JobId(1166), JobId(1165)] res:true f:73 g:1
53090 Sep 22 23:16:16.373 DEBG IO Write 1200 has deps [JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53091 Sep 22 23:16:16.373 DEBG up_ds_listen was notified
53092 Sep 22 23:16:16.373 DEBG up_ds_listen process 1200
53093 Sep 22 23:16:16.374 DEBG [A] ack job 1200:201, : downstairs
53094 Sep 22 23:16:16.374 DEBG up_ds_listen checked 1 jobs, back to waiting
53095 Sep 22 23:16:16.374 DEBG IO Flush 1201 has deps [JobId(1200), JobId(1199), JobId(1198)]
53096 Sep 22 23:16:16.703 DEBG IO Write 1202 has deps [JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53097 Sep 22 23:16:16.703 DEBG up_ds_listen was notified
53098 Sep 22 23:16:16.703 DEBG up_ds_listen process 1202
53099 Sep 22 23:16:16.703 DEBG [A] ack job 1202:203, : downstairs
53100 Sep 22 23:16:16.703 DEBG up_ds_listen checked 1 jobs, back to waiting
53101 Sep 22 23:16:17.033 DEBG IO Write 1203 has deps [JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53102 Sep 22 23:16:17.033 DEBG up_ds_listen was notified
53103 Sep 22 23:16:17.033 DEBG up_ds_listen process 1203
53104 Sep 22 23:16:17.033 DEBG [A] ack job 1203:204, : downstairs
53105 Sep 22 23:16:17.033 DEBG up_ds_listen checked 1 jobs, back to waiting
53106 Sep 22 23:16:17.034 INFO [lossy] sleeping 1 second
53107 Sep 22 23:16:17.034 DEBG IO Flush 1204 has deps [JobId(1203), JobId(1202), JobId(1201)]
53108 Sep 22 23:16:17.363 DEBG IO Write 1205 has deps [JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53109 Sep 22 23:16:17.363 DEBG up_ds_listen was notified
53110 Sep 22 23:16:17.363 DEBG up_ds_listen process 1205
53111 Sep 22 23:16:17.363 DEBG [A] ack job 1205:206, : downstairs
53112 Sep 22 23:16:17.363 DEBG up_ds_listen checked 1 jobs, back to waiting
53113 Sep 22 23:16:17.755 DEBG IO Write 1206 has deps [JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53114 Sep 22 23:16:17.755 DEBG up_ds_listen was notified
53115 Sep 22 23:16:17.755 DEBG up_ds_listen process 1206
53116 Sep 22 23:16:17.755 DEBG [A] ack job 1206:207, : downstairs
53117 Sep 22 23:16:17.755 DEBG up_ds_listen checked 1 jobs, back to waiting
53118 Sep 22 23:16:17.896 DEBG IO Flush 1207 has deps [JobId(1206), JobId(1205), JobId(1204)]
53119 Sep 22 23:16:17.896 INFO [lossy] skipping 1169
53120 Sep 22 23:16:17.926 DEBG Write :1169 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53121 Sep 22 23:16:18.256 DEBG IO Write 1208 has deps [JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53122 Sep 22 23:16:18.257 DEBG up_ds_listen was notified
53123 Sep 22 23:16:18.257 DEBG up_ds_listen process 1208
53124 Sep 22 23:16:18.257 DEBG [A] ack job 1208:209, : downstairs
53125 Sep 22 23:16:18.257 DEBG up_ds_listen checked 1 jobs, back to waiting
53126 Sep 22 23:16:18.257 INFO [lossy] skipping 1169
53127 Sep 22 23:16:18.257 INFO [lossy] skipping 1169
53128 Sep 22 23:16:18.287 DEBG Write :1169 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53129 Sep 22 23:16:18.288 INFO [lossy] sleeping 1 second
53130 Sep 22 23:16:18.617 DEBG IO Write 1209 has deps [JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53131 Sep 22 23:16:18.617 DEBG up_ds_listen was notified
53132 Sep 22 23:16:18.617 DEBG up_ds_listen process 1209
53133 Sep 22 23:16:18.617 DEBG [A] ack job 1209:210, : downstairs
53134 Sep 22 23:16:18.617 DEBG up_ds_listen checked 1 jobs, back to waiting
53135 Sep 22 23:16:18.618 DEBG IO Flush 1210 has deps [JobId(1209), JobId(1208), JobId(1207)]
53136 Sep 22 23:16:18.947 DEBG IO Write 1211 has deps [JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53137 Sep 22 23:16:18.947 DEBG up_ds_listen was notified
53138 Sep 22 23:16:18.947 DEBG up_ds_listen process 1211
53139 Sep 22 23:16:18.947 DEBG [A] ack job 1211:212, : downstairs
53140 Sep 22 23:16:18.947 DEBG up_ds_listen checked 1 jobs, back to waiting
53141 Sep 22 23:16:19.277 DEBG IO Write 1212 has deps [JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53142 Sep 22 23:16:19.277 DEBG up_ds_listen was notified
53143 Sep 22 23:16:19.277 DEBG up_ds_listen process 1212
53144 Sep 22 23:16:19.277 DEBG [A] ack job 1212:213, : downstairs
53145 Sep 22 23:16:19.277 DEBG up_ds_listen checked 1 jobs, back to waiting
53146 Sep 22 23:16:19.278 DEBG IO Flush 1213 has deps [JobId(1212), JobId(1211), JobId(1210)]
53147 Sep 22 23:16:19.553 DEBG [2] Read already AckReady 1000, : downstairs
53148 Sep 22 23:16:19.607 DEBG IO Write 1214 has deps [JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53149 Sep 22 23:16:19.608 DEBG up_ds_listen was notified
53150 Sep 22 23:16:19.608 DEBG up_ds_listen process 1214
53151 Sep 22 23:16:19.608 DEBG [A] ack job 1214:215, : downstairs
53152 Sep 22 23:16:19.608 DEBG up_ds_listen checked 1 jobs, back to waiting
53153 Sep 22 23:16:19.663 DEBG up_ds_listen was notified
53154 Sep 22 23:16:19.663 DEBG up_ds_listen process 1000
53155 Sep 22 23:16:19.666 DEBG [A] ack job 1000:1, : downstairs
53156 Sep 22 23:16:19.938 DEBG IO Write 1215 has deps [JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53157 Sep 22 23:16:19.938 DEBG up_ds_listen was notified
53158 Sep 22 23:16:19.938 DEBG up_ds_listen process 1215
53159 Sep 22 23:16:19.938 DEBG [A] ack job 1215:216, : downstairs
53160 Sep 22 23:16:19.938 DEBG up_ds_listen checked 1 jobs, back to waiting
53161 Sep 22 23:16:19.939 DEBG IO Flush 1216 has deps [JobId(1215), JobId(1214), JobId(1213)]
53162 Sep 22 23:16:19.939 INFO [lossy] sleeping 1 second
53163 Sep 22 23:16:20.268 DEBG IO Write 1217 has deps [JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53164 Sep 22 23:16:20.268 DEBG up_ds_listen was notified
53165 Sep 22 23:16:20.268 DEBG up_ds_listen process 1217
53166 Sep 22 23:16:20.268 DEBG [A] ack job 1217:218, : downstairs
53167 Sep 22 23:16:20.268 DEBG up_ds_listen checked 1 jobs, back to waiting
53168 Sep 22 23:16:20.598 DEBG IO Write 1218 has deps [JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53169 Sep 22 23:16:20.598 DEBG up_ds_listen was notified
53170 Sep 22 23:16:20.598 DEBG up_ds_listen process 1218
53171 Sep 22 23:16:20.599 DEBG [A] ack job 1218:219, : downstairs
53172 Sep 22 23:16:20.599 DEBG up_ds_listen checked 1 jobs, back to waiting
53173 Sep 22 23:16:20.599 DEBG IO Flush 1219 has deps [JobId(1218), JobId(1217), JobId(1216)]
53174 Sep 22 23:16:20.937 DEBG IO Write 1220 has deps [JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53175 Sep 22 23:16:20.937 DEBG up_ds_listen was notified
53176 Sep 22 23:16:20.937 DEBG up_ds_listen process 1220
53177 Sep 22 23:16:20.937 DEBG [A] ack job 1220:221, : downstairs
53178 Sep 22 23:16:20.937 DEBG up_ds_listen checked 1 jobs, back to waiting
53179 Sep 22 23:16:21.330 DEBG IO Write 1221 has deps [JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53180 Sep 22 23:16:21.330 DEBG up_ds_listen was notified
53181 Sep 22 23:16:21.330 DEBG up_ds_listen process 1221
53182 Sep 22 23:16:21.330 DEBG [A] ack job 1221:222, : downstairs
53183 Sep 22 23:16:21.330 DEBG up_ds_listen checked 1 jobs, back to waiting
53184 Sep 22 23:16:21.469 DEBG IO Flush 1222 has deps [JobId(1221), JobId(1220), JobId(1219)]
53185 Sep 22 23:16:21.471 WARN returning error on write!
53186 Sep 22 23:16:21.471 DEBG Write :1170 deps:[JobId(1168), JobId(1165), JobId(1163)] res:false
53187 Sep 22 23:16:21.501 DEBG Write :1170 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53188 Sep 22 23:16:21.502 INFO [lossy] sleeping 1 second
53189 Sep 22 23:16:21.510 DEBG Flush :1171 extent_limit None deps:[JobId(1170), JobId(1169), JobId(1168)] res:true f:74 g:1
53190 Sep 22 23:16:21.839 DEBG IO Write 1223 has deps [JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53191 Sep 22 23:16:21.839 DEBG up_ds_listen was notified
53192 Sep 22 23:16:21.839 DEBG up_ds_listen process 1223
53193 Sep 22 23:16:21.839 DEBG [A] ack job 1223:224, : downstairs
53194 Sep 22 23:16:21.839 DEBG up_ds_listen checked 1 jobs, back to waiting
53195 Sep 22 23:16:22.168 DEBG IO Write 1224 has deps [JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53196 Sep 22 23:16:22.169 DEBG up_ds_listen was notified
53197 Sep 22 23:16:22.169 DEBG up_ds_listen process 1224
53198 Sep 22 23:16:22.169 DEBG [A] ack job 1224:225, : downstairs
53199 Sep 22 23:16:22.169 DEBG up_ds_listen checked 1 jobs, back to waiting
53200 Sep 22 23:16:22.169 DEBG IO Flush 1225 has deps [JobId(1224), JobId(1223), JobId(1222)]
53201 Sep 22 23:16:22.178 DEBG up_ds_listen process 1001
53202 Sep 22 23:16:22.178 DEBG [A] ack job 1001:2, : downstairs
53203 Sep 22 23:16:22.178 DEBG [rc] retire 1001 clears [JobId(1000), JobId(1001)], : downstairs
53204 Sep 22 23:16:22.178 DEBG up_ds_listen checked 2 jobs, back to waiting
53205 Sep 22 23:16:22.178 DEBG up_ds_listen was notified
53206 Sep 22 23:16:22.178 DEBG up_ds_listen checked 0 jobs, back to waiting
53207 test test::integration_test_volume_replace_downstairs_then_takeover ... ok
53208 Sep 22 23:16:22.511 DEBG IO Write 1226 has deps [JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53209 Sep 22 23:16:22.511 DEBG up_ds_listen was notified
53210 Sep 22 23:16:22.512 DEBG up_ds_listen process 1226
53211 Sep 22 23:16:22.512 DEBG [A] ack job 1226:227, : downstairs
53212 Sep 22 23:16:22.512 DEBG up_ds_listen checked 1 jobs, back to waiting
53213 Sep 22 23:16:22.544 DEBG Write :1170 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53214 Sep 22 23:16:22.553 DEBG Flush :1171 extent_limit None deps:[JobId(1170), JobId(1169), JobId(1168)] res:true f:74 g:1
53215 Sep 22 23:16:22.553 INFO [lossy] sleeping 1 second
53216 Sep 22 23:16:22.882 DEBG IO Write 1227 has deps [JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53217 Sep 22 23:16:22.882 DEBG up_ds_listen was notified
53218 Sep 22 23:16:22.882 DEBG up_ds_listen process 1227
53219 Sep 22 23:16:22.882 DEBG [A] ack job 1227:228, : downstairs
53220 Sep 22 23:16:22.882 DEBG up_ds_listen checked 1 jobs, back to waiting
53221 Sep 22 23:16:22.883 DEBG IO Flush 1228 has deps [JobId(1227), JobId(1226), JobId(1225)]
53222 Sep 22 23:16:23.212 DEBG IO Write 1229 has deps [JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53223 Sep 22 23:16:23.212 DEBG up_ds_listen was notified
53224 Sep 22 23:16:23.212 DEBG up_ds_listen process 1229
53225 Sep 22 23:16:23.212 DEBG [A] ack job 1229:230, : downstairs
53226 Sep 22 23:16:23.213 DEBG up_ds_listen checked 1 jobs, back to waiting
53227 Sep 22 23:16:23.543 DEBG IO Write 1230 has deps [JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53228 Sep 22 23:16:23.543 DEBG up_ds_listen was notified
53229 Sep 22 23:16:23.543 DEBG up_ds_listen process 1230
53230 Sep 22 23:16:23.543 DEBG [A] ack job 1230:231, : downstairs
53231 Sep 22 23:16:23.543 DEBG up_ds_listen checked 1 jobs, back to waiting
53232 Sep 22 23:16:23.543 DEBG IO Flush 1231 has deps [JobId(1230), JobId(1229), JobId(1228)]
53233 Sep 22 23:16:23.873 DEBG IO Write 1232 has deps [JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)]
53234 Sep 22 23:16:23.873 DEBG up_ds_listen was notified
53235 Sep 22 23:16:23.873 DEBG up_ds_listen process 1232
53236 Sep 22 23:16:23.873 DEBG [A] ack job 1232:233, : downstairs
53237 Sep 22 23:16:23.873 DEBG up_ds_listen checked 1 jobs, back to waiting
53238 Sep 22 23:16:24.203 DEBG IO Write 1233 has deps [JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)]
53239 Sep 22 23:16:24.203 DEBG up_ds_listen was notified
53240 Sep 22 23:16:24.203 DEBG up_ds_listen process 1233
53241 Sep 22 23:16:24.203 DEBG [A] ack job 1233:234, : downstairs
53242 Sep 22 23:16:24.203 DEBG up_ds_listen checked 1 jobs, back to waiting
53243 Sep 22 23:16:24.204 DEBG IO Flush 1234 has deps [JobId(1233), JobId(1232), JobId(1231)]
53244 Sep 22 23:16:24.595 DEBG IO Write 1235 has deps [JobId(1234), JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1164), JobId(1163)]
53245 Sep 22 23:16:24.595 DEBG up_ds_listen was notified
53246 Sep 22 23:16:24.595 DEBG up_ds_listen process 1235
53247 Sep 22 23:16:24.595 DEBG [A] ack job 1235:236, : downstairs
53248 Sep 22 23:16:24.595 DEBG up_ds_listen checked 1 jobs, back to waiting
53249 Sep 22 23:16:24.735 INFO [lossy] sleeping 1 second
53250 Sep 22 23:16:24.766 DEBG Write :1172 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53251 Sep 22 23:16:24.767 DEBG IO Flush 1236 has deps [JobId(1235), JobId(1234)]
53252 Sep 22 23:16:24.767 DEBG IO Read 1237 has deps [JobId(1236)]
53253 Sep 22 23:16:24.970 INFO [lossy] sleeping 1 second
53254 Sep 22 23:16:25.377 DEBG IO Flush 1238 has deps [JobId(1237), JobId(1236)]
53255 Sep 22 23:16:25.490 ERRO [0] job id 1166 saw error GenericError("test error")
53256 Sep 22 23:16:25.819 ERRO [2] job id 1163 saw error GenericError("test error")
53257 Sep 22 23:16:25.819 ERRO [2] job id 1163 saw error GenericError("test error")
53258 Sep 22 23:16:25.819 DEBG [rc] retire 1163 clears [JobId(1162), JobId(1163)], : downstairs
53259 Sep 22 23:16:25.819 ERRO [2] job id 1164 saw error GenericError("test error")
53260 Sep 22 23:16:25.819 ERRO [1] job id 1166 saw error GenericError("test error")
53261 Sep 22 23:16:25.819 ERRO [1] job id 1166 saw error GenericError("test error")
53262 Sep 22 23:16:25.819 ERRO [1] job id 1167 saw error GenericError("test error")
53263 Sep 22 23:16:25.819 ERRO [1] job id 1170 saw error GenericError("test error")
53264 Sep 22 23:16:25.820 DEBG up_ds_listen was notified
53265 Sep 22 23:16:25.820 DEBG up_ds_listen process 1168
53266 Sep 22 23:16:25.820 DEBG [A] ack job 1168:169, : downstairs
53267 Sep 22 23:16:25.820 DEBG up_ds_listen process 1171
53268 Sep 22 23:16:25.820 DEBG [A] ack job 1171:172, : downstairs
53269 Sep 22 23:16:25.820 DEBG up_ds_listen checked 2 jobs, back to waiting
53270 Sep 22 23:16:25.820 DEBG up_ds_listen was notified
53271 Sep 22 23:16:25.820 DEBG up_ds_listen checked 0 jobs, back to waiting
53272 Sep 22 23:16:25.958 INFO [lossy] skipping 1172
53273 Sep 22 23:16:25.989 DEBG Write :1173 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53274 Sep 22 23:16:26.020 DEBG Write :1172 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53275 Sep 22 23:16:26.022 INFO [lossy] skipping 1174
53276 Sep 22 23:16:26.022 INFO [lossy] skipping 1176
53277 Sep 22 23:16:26.022 WARN returning error on flush!
53278 Sep 22 23:16:26.022 DEBG Flush :1174 extent_limit None deps:[JobId(1173), JobId(1172), JobId(1171)] res:false f:75 g:1
53279 Sep 22 23:16:26.022 WARN returning error on flush!
53280 Sep 22 23:16:26.022 DEBG Flush :1174 extent_limit None deps:[JobId(1173), JobId(1172), JobId(1171)] res:false f:75 g:1
53281 Sep 22 23:16:26.030 DEBG Flush :1174 extent_limit None deps:[JobId(1173), JobId(1172), JobId(1171)] res:true f:75 g:1
53282 Sep 22 23:16:26.060 DEBG Write :1175 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53283 Sep 22 23:16:26.092 DEBG Write :1176 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53284 Sep 22 23:16:26.093 WARN returning error on flush!
53285 Sep 22 23:16:26.093 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:false f:76 g:1
53286 Sep 22 23:16:26.101 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:true f:76 g:1
53287 Sep 22 23:16:26.101 INFO [lossy] sleeping 1 second
53288 Sep 22 23:16:26.102 WARN returning error on write!
53289 Sep 22 23:16:26.102 DEBG Write :1173 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53290 Sep 22 23:16:26.103 INFO [lossy] skipping 1174
53291 Sep 22 23:16:26.103 INFO [lossy] skipping 1173
53292 Sep 22 23:16:26.103 WARN returning error on write!
53293 Sep 22 23:16:26.103 DEBG Write :1173 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53294 Sep 22 23:16:26.104 INFO [lossy] skipping 1173
53295 Sep 22 23:16:26.134 DEBG Write :1173 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53296 Sep 22 23:16:26.143 DEBG Flush :1174 extent_limit None deps:[JobId(1173), JobId(1172), JobId(1171)] res:true f:75 g:1
53297 Sep 22 23:16:26.173 DEBG Write :1175 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53298 Sep 22 23:16:26.176 WARN returning error on write!
53299 Sep 22 23:16:26.176 DEBG Write :1176 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53300 Sep 22 23:16:26.176 WARN 1177 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53301 Sep 22 23:16:26.176 INFO [lossy] skipping 1178
53302 Sep 22 23:16:26.177 WARN returning error on write!
53303 Sep 22 23:16:26.177 DEBG Write :1176 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53304 Sep 22 23:16:26.177 WARN 1178 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53305 Sep 22 23:16:26.177 INFO [lossy] skipping 1176
53306 Sep 22 23:16:26.207 DEBG Write :1176 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53307 Sep 22 23:16:26.208 WARN returning error on flush!
53308 Sep 22 23:16:26.208 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:false f:76 g:1
53309 Sep 22 23:16:26.216 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:true f:76 g:1
53310 Sep 22 23:16:26.216 INFO [lossy] sleeping 1 second
53311 Sep 22 23:16:27.235 WARN returning error on write!
53312 Sep 22 23:16:27.235 DEBG Write :1178 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53313 Sep 22 23:16:27.266 DEBG Write :1179 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53314 Sep 22 23:16:27.267 INFO [lossy] skipping 1180
53315 Sep 22 23:16:27.267 INFO [lossy] skipping 1185
53316 Sep 22 23:16:27.297 DEBG Write :1178 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53317 Sep 22 23:16:27.298 INFO [lossy] skipping 1180
53318 Sep 22 23:16:27.299 INFO [lossy] skipping 1178
53319 Sep 22 23:16:27.299 INFO [lossy] skipping 1179
53320 Sep 22 23:16:27.299 INFO [lossy] skipping 1180
53321 Sep 22 23:16:27.299 INFO [lossy] skipping 1182
53322 Sep 22 23:16:27.299 INFO [lossy] skipping 1183
53323 Sep 22 23:16:27.299 INFO [lossy] skipping 1184
53324 Sep 22 23:16:27.299 INFO [lossy] skipping 1186
53325 Sep 22 23:16:27.329 DEBG Write :1178 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53326 Sep 22 23:16:27.360 DEBG Write :1179 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53327 Sep 22 23:16:27.377 DEBG Flush :1180 extent_limit None deps:[JobId(1179), JobId(1178), JobId(1177)] res:true f:77 g:1
53328 Sep 22 23:16:27.407 DEBG Write :1181 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53329 Sep 22 23:16:27.439 DEBG Write :1182 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53330 Sep 22 23:16:27.440 WARN returning error on flush!
53331 Sep 22 23:16:27.440 DEBG Flush :1183 extent_limit None deps:[JobId(1182), JobId(1181), JobId(1180)] res:false f:78 g:1
53332 Sep 22 23:16:27.440 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53333 Sep 22 23:16:27.440 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53334 Sep 22 23:16:27.440 INFO [lossy] skipping 1186
53335 Sep 22 23:16:27.440 DEBG Flush :1180 extent_limit None deps:[JobId(1179), JobId(1178), JobId(1177)] res:true f:77 g:1
53336 Sep 22 23:16:27.470 DEBG Write :1182 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53337 Sep 22 23:16:27.472 INFO [lossy] skipping 1186
53338 Sep 22 23:16:27.502 DEBG Write :1181 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53339 Sep 22 23:16:27.503 INFO [lossy] skipping 1183
53340 Sep 22 23:16:27.503 INFO [lossy] skipping 1184
53341 Sep 22 23:16:27.503 INFO [lossy] skipping 1185
53342 Sep 22 23:16:27.519 DEBG Flush :1183 extent_limit None deps:[JobId(1182), JobId(1181), JobId(1180)] res:true f:78 g:1
53343 Sep 22 23:16:27.519 WARN 1186 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53344 Sep 22 23:16:27.549 DEBG Write :1184 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53345 Sep 22 23:16:27.580 DEBG Write :1185 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53346 Sep 22 23:16:27.582 DEBG Flush :1183 extent_limit None deps:[JobId(1182), JobId(1181), JobId(1180)] res:true f:78 g:1
53347 Sep 22 23:16:27.612 DEBG Write :1184 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53348 Sep 22 23:16:27.643 DEBG Write :1185 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53349 Sep 22 23:16:27.645 INFO [lossy] skipping 1186
53350 Sep 22 23:16:27.660 DEBG Flush :1186 extent_limit None deps:[JobId(1185), JobId(1184), JobId(1183)] res:true f:79 g:1
53351 Sep 22 23:16:27.660 INFO [lossy] sleeping 1 second
53352 Sep 22 23:16:27.660 DEBG Flush :1186 extent_limit None deps:[JobId(1185), JobId(1184), JobId(1183)] res:true f:79 g:1
53353 Sep 22 23:16:27.660 INFO [lossy] sleeping 1 second
53354 Sep 22 23:16:28.678 WARN returning error on write!
53355 Sep 22 23:16:28.678 DEBG Write :1187 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53356 Sep 22 23:16:28.680 WARN returning error on write!
53357 Sep 22 23:16:28.680 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53358 Sep 22 23:16:28.680 INFO [lossy] skipping 1190
53359 Sep 22 23:16:28.680 INFO [lossy] skipping 1193
53360 Sep 22 23:16:28.710 DEBG Write :1187 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53361 Sep 22 23:16:28.712 WARN returning error on write!
53362 Sep 22 23:16:28.712 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53363 Sep 22 23:16:28.712 INFO [lossy] skipping 1190
53364 Sep 22 23:16:28.712 INFO [lossy] skipping 1188
53365 Sep 22 23:16:28.712 INFO [lossy] skipping 1190
53366 Sep 22 23:16:28.742 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53367 Sep 22 23:16:28.743 INFO [lossy] sleeping 1 second
53368 Sep 22 23:16:28.743 INFO [lossy] skipping 1187
53369 Sep 22 23:16:28.743 INFO [lossy] skipping 1188
53370 Sep 22 23:16:28.743 INFO [lossy] skipping 1189
53371 Sep 22 23:16:28.743 INFO [lossy] skipping 1191
53372 Sep 22 23:16:28.773 DEBG Write :1187 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53373 Sep 22 23:16:28.775 WARN returning error on write!
53374 Sep 22 23:16:28.775 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53375 Sep 22 23:16:28.806 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53376 Sep 22 23:16:28.815 DEBG Flush :1189 extent_limit None deps:[JobId(1188), JobId(1187), JobId(1186)] res:true f:80 g:1
53377 Sep 22 23:16:28.816 WARN returning error on write!
53378 Sep 22 23:16:28.816 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53379 Sep 22 23:16:28.846 DEBG Write :1191 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53380 Sep 22 23:16:28.847 INFO [lossy] skipping 1192
53381 Sep 22 23:16:28.847 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53382 Sep 22 23:16:28.848 WARN returning error on write!
53383 Sep 22 23:16:28.848 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53384 Sep 22 23:16:28.848 WARN 1192 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53385 Sep 22 23:16:28.878 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53386 Sep 22 23:16:28.887 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190), JobId(1189)] res:true f:81 g:1
53387 Sep 22 23:16:28.917 DEBG Write :1193 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53388 Sep 22 23:16:28.918 INFO [lossy] sleeping 1 second
53389 Sep 22 23:16:29.941 DEBG Flush :1189 extent_limit None deps:[JobId(1188), JobId(1187), JobId(1186)] res:true f:80 g:1
53390 Sep 22 23:16:29.942 WARN returning error on write!
53391 Sep 22 23:16:29.942 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53392 Sep 22 23:16:29.942 INFO [lossy] skipping 1191
53393 Sep 22 23:16:29.942 WARN 1192 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53394 Sep 22 23:16:29.942 INFO [lossy] skipping 1193
53395 Sep 22 23:16:29.972 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53396 Sep 22 23:16:29.973 INFO [lossy] skipping 1191
53397 Sep 22 23:16:29.973 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53398 Sep 22 23:16:30.003 DEBG Write :1191 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53399 Sep 22 23:16:30.012 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190), JobId(1189)] res:true f:81 g:1
53400 Sep 22 23:16:30.042 DEBG Write :1193 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53401 Sep 22 23:16:30.074 DEBG Write :1194 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53402 Sep 22 23:16:30.075 WARN returning error on flush!
53403 Sep 22 23:16:30.075 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:false f:82 g:1
53404 Sep 22 23:16:30.075 INFO [lossy] skipping 1196
53405 Sep 22 23:16:30.075 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53406 Sep 22 23:16:30.075 INFO [lossy] skipping 1199
53407 Sep 22 23:16:30.075 INFO [lossy] skipping 1195
53408 Sep 22 23:16:30.075 INFO [lossy] skipping 1196
53409 Sep 22 23:16:30.075 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53410 Sep 22 23:16:30.083 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:true f:82 g:1
53411 Sep 22 23:16:30.113 DEBG Write :1196 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53412 Sep 22 23:16:30.144 DEBG Write :1197 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53413 Sep 22 23:16:30.153 DEBG Flush :1198 extent_limit None deps:[JobId(1197), JobId(1196), JobId(1195)] res:true f:83 g:1
53414 Sep 22 23:16:30.183 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53415 Sep 22 23:16:30.185 INFO [lossy] sleeping 1 second
53416 Sep 22 23:16:30.215 DEBG Write :1194 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53417 Sep 22 23:16:30.224 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:true f:82 g:1
53418 Sep 22 23:16:30.254 DEBG Write :1196 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53419 Sep 22 23:16:30.286 DEBG Write :1197 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53420 Sep 22 23:16:30.295 DEBG Flush :1198 extent_limit None deps:[JobId(1197), JobId(1196), JobId(1195)] res:true f:83 g:1
53421 Sep 22 23:16:30.296 WARN returning error on write!
53422 Sep 22 23:16:30.296 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53423 Sep 22 23:16:30.326 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53424 Sep 22 23:16:30.327 INFO [lossy] skipping 1199
53425 Sep 22 23:16:30.327 INFO [lossy] skipping 1199
53426 Sep 22 23:16:30.357 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53427 Sep 22 23:16:30.366 DEBG Flush :1201 extent_limit None deps:[JobId(1200), JobId(1199), JobId(1198)] res:true f:84 g:1
53428 Sep 22 23:16:30.366 INFO [lossy] sleeping 1 second
53429 Sep 22 23:16:31.385 WARN returning error on write!
53430 Sep 22 23:16:31.385 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53431 Sep 22 23:16:31.385 INFO [lossy] skipping 1201
53432 Sep 22 23:16:31.385 INFO [lossy] skipping 1207
53433 Sep 22 23:16:31.415 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53434 Sep 22 23:16:31.416 WARN returning error on flush!
53435 Sep 22 23:16:31.416 DEBG Flush :1201 extent_limit None deps:[JobId(1200), JobId(1199), JobId(1198)] res:false f:84 g:1
53436 Sep 22 23:16:31.424 DEBG Flush :1201 extent_limit None deps:[JobId(1200), JobId(1199), JobId(1198)] res:true f:84 g:1
53437 Sep 22 23:16:31.424 INFO [lossy] sleeping 1 second
53438 Sep 22 23:16:31.425 INFO [lossy] skipping 1202
53439 Sep 22 23:16:31.425 WARN returning error on write!
53440 Sep 22 23:16:31.425 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53441 Sep 22 23:16:31.426 INFO [lossy] skipping 1205
53442 Sep 22 23:16:31.426 INFO [lossy] skipping 1208
53443 Sep 22 23:16:31.456 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53444 Sep 22 23:16:31.487 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53445 Sep 22 23:16:31.496 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:true f:85 g:1
53446 Sep 22 23:16:31.526 DEBG Write :1205 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53447 Sep 22 23:16:31.528 WARN returning error on write!
53448 Sep 22 23:16:31.528 DEBG Write :1206 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53449 Sep 22 23:16:31.528 WARN 1207 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53450 Sep 22 23:16:31.528 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53451 Sep 22 23:16:31.558 DEBG Write :1206 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53452 Sep 22 23:16:31.567 DEBG Flush :1207 extent_limit None deps:[JobId(1206), JobId(1205), JobId(1204)] res:true f:86 g:1
53453 Sep 22 23:16:31.597 DEBG Write :1208 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53454 Sep 22 23:16:31.598 INFO [lossy] sleeping 1 second
53455 Sep 22 23:16:32.616 INFO [lossy] skipping 1202
53456 Sep 22 23:16:32.647 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53457 Sep 22 23:16:32.648 INFO [lossy] skipping 1204
53458 Sep 22 23:16:32.648 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53459 Sep 22 23:16:32.648 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53460 Sep 22 23:16:32.649 WARN returning error on write!
53461 Sep 22 23:16:32.649 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53462 Sep 22 23:16:32.649 WARN 1204 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53463 Sep 22 23:16:32.679 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53464 Sep 22 23:16:32.680 INFO [lossy] skipping 1204
53465 Sep 22 23:16:32.680 INFO [lossy] skipping 1206
53466 Sep 22 23:16:32.680 INFO [lossy] skipping 1208
53467 Sep 22 23:16:32.688 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:true f:85 g:1
53468 Sep 22 23:16:32.719 DEBG Write :1206 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53469 Sep 22 23:16:32.720 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53470 Sep 22 23:16:32.720 INFO [lossy] skipping 1205
53471 Sep 22 23:16:32.720 WARN 1207 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53472 Sep 22 23:16:32.720 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53473 Sep 22 23:16:32.720 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53474 Sep 22 23:16:32.720 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53475 Sep 22 23:16:32.720 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
53476 Sep 22 23:16:32.750 DEBG Write :1205 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53477 Sep 22 23:16:32.759 DEBG Flush :1207 extent_limit None deps:[JobId(1206), JobId(1205), JobId(1204)] res:true f:86 g:1
53478 Sep 22 23:16:32.759 INFO [lossy] skipping 1208
53479 Sep 22 23:16:32.760 WARN returning error on write!
53480 Sep 22 23:16:32.760 DEBG Write :1209 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53481 Sep 22 23:16:32.761 WARN 1210 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53482 Sep 22 23:16:32.761 INFO [lossy] skipping 1211
53483 Sep 22 23:16:32.761 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53484 Sep 22 23:16:32.761 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53485 Sep 22 23:16:32.762 WARN returning error on write!
53486 Sep 22 23:16:32.762 DEBG Write :1208 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53487 Sep 22 23:16:32.792 DEBG Write :1209 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53488 Sep 22 23:16:32.793 INFO [lossy] skipping 1211
53489 Sep 22 23:16:32.823 DEBG Write :1208 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53490 Sep 22 23:16:32.825 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53491 Sep 22 23:16:32.825 WARN returning error on flush!
53492 Sep 22 23:16:32.825 DEBG Flush :1210 extent_limit None deps:[JobId(1209), JobId(1208), JobId(1207)] res:false f:87 g:1
53493 Sep 22 23:16:32.825 INFO [lossy] skipping 1212
53494 Sep 22 23:16:32.825 INFO [lossy] skipping 1213
53495 Sep 22 23:16:32.833 DEBG Flush :1210 extent_limit None deps:[JobId(1209), JobId(1208), JobId(1207)] res:true f:87 g:1
53496 Sep 22 23:16:32.833 INFO [lossy] skipping 1212
53497 Sep 22 23:16:32.833 WARN 1213 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53498 Sep 22 23:16:32.833 INFO [lossy] skipping 1212
53499 Sep 22 23:16:32.833 INFO [lossy] skipping 1212
53500 Sep 22 23:16:32.834 WARN returning error on write!
53501 Sep 22 23:16:32.834 DEBG Write :1212 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53502 Sep 22 23:16:32.834 INFO [lossy] skipping 1212
53503 Sep 22 23:16:32.864 DEBG Write :1212 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53504 Sep 22 23:16:32.865 INFO [lossy] sleeping 1 second
53505 Sep 22 23:16:32.896 DEBG Write :1209 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53506 Sep 22 23:16:32.897 WARN returning error on flush!
53507 Sep 22 23:16:32.897 DEBG Flush :1210 extent_limit None deps:[JobId(1209), JobId(1208), JobId(1207)] res:false f:87 g:1
53508 Sep 22 23:16:32.897 INFO [lossy] skipping 1211
53509 Sep 22 23:16:32.897 INFO [lossy] skipping 1214
53510 Sep 22 23:16:32.897 INFO [lossy] skipping 1210
53511 Sep 22 23:16:32.897 INFO [lossy] skipping 1214
53512 Sep 22 23:16:32.905 DEBG Flush :1210 extent_limit None deps:[JobId(1209), JobId(1208), JobId(1207)] res:true f:87 g:1
53513 Sep 22 23:16:32.905 INFO [lossy] skipping 1211
53514 Sep 22 23:16:32.936 DEBG Write :1212 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53515 Sep 22 23:16:32.937 WARN 1213 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53516 Sep 22 23:16:32.937 INFO [lossy] skipping 1214
53517 Sep 22 23:16:32.937 INFO [lossy] skipping 1215
53518 Sep 22 23:16:32.967 DEBG Write :1211 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53519 Sep 22 23:16:32.969 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53520 Sep 22 23:16:32.969 WARN returning error on flush!
53521 Sep 22 23:16:32.969 DEBG Flush :1213 extent_limit None deps:[JobId(1212), JobId(1211), JobId(1210)] res:false f:88 g:1
53522 Sep 22 23:16:32.969 INFO [lossy] skipping 1215
53523 Sep 22 23:16:32.977 DEBG Flush :1213 extent_limit None deps:[JobId(1212), JobId(1211), JobId(1210)] res:true f:88 g:1
53524 Sep 22 23:16:32.978 WARN returning error on write!
53525 Sep 22 23:16:32.978 DEBG Write :1215 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53526 Sep 22 23:16:33.008 DEBG Write :1215 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53527 Sep 22 23:16:33.040 DEBG Write :1214 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53528 Sep 22 23:16:33.041 WARN returning error on flush!
53529 Sep 22 23:16:33.041 DEBG Flush :1216 extent_limit None deps:[JobId(1215), JobId(1214), JobId(1213)] res:false f:89 g:1
53530 Sep 22 23:16:33.049 DEBG Flush :1216 extent_limit None deps:[JobId(1215), JobId(1214), JobId(1213)] res:true f:89 g:1
53531 Sep 22 23:16:33.049 INFO [lossy] sleeping 1 second
53532 Sep 22 23:16:34.094 DEBG Write :1211 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53533 Sep 22 23:16:34.095 INFO [lossy] skipping 1213
53534 Sep 22 23:16:34.095 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53535 Sep 22 23:16:34.095 INFO [lossy] skipping 1213
53536 Sep 22 23:16:34.103 DEBG Flush :1213 extent_limit None deps:[JobId(1212), JobId(1211), JobId(1210)] res:true f:88 g:1
53537 Sep 22 23:16:34.104 WARN returning error on write!
53538 Sep 22 23:16:34.104 DEBG Write :1214 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53539 Sep 22 23:16:34.106 WARN returning error on write!
53540 Sep 22 23:16:34.106 DEBG Write :1215 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53541 Sep 22 23:16:34.106 WARN 1216 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53542 Sep 22 23:16:34.106 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53543 Sep 22 23:16:34.106 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53544 Sep 22 23:16:34.106 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53545 Sep 22 23:16:34.106 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53546 Sep 22 23:16:34.106 INFO [lossy] skipping 1222
53547 Sep 22 23:16:34.136 DEBG Write :1214 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53548 Sep 22 23:16:34.167 DEBG Write :1215 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53549 Sep 22 23:16:34.176 DEBG Flush :1216 extent_limit None deps:[JobId(1215), JobId(1214), JobId(1213)] res:true f:89 g:1
53550 Sep 22 23:16:34.177 WARN returning error on write!
53551 Sep 22 23:16:34.177 DEBG Write :1217 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53552 Sep 22 23:16:34.207 DEBG Write :1218 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53553 Sep 22 23:16:34.208 INFO [lossy] skipping 1219
53554 Sep 22 23:16:34.208 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53555 Sep 22 23:16:34.209 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53556 Sep 22 23:16:34.238 DEBG Write :1217 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53557 Sep 22 23:16:34.240 INFO [lossy] skipping 1219
53558 Sep 22 23:16:34.240 INFO [lossy] skipping 1219
53559 Sep 22 23:16:34.240 INFO [lossy] skipping 1219
53560 Sep 22 23:16:34.248 DEBG Flush :1219 extent_limit None deps:[JobId(1218), JobId(1217), JobId(1216)] res:true f:90 g:1
53561 Sep 22 23:16:34.248 INFO [lossy] skipping 1220
53562 Sep 22 23:16:34.248 INFO [lossy] skipping 1221
53563 Sep 22 23:16:34.248 WARN 1222 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53564 Sep 22 23:16:34.249 WARN returning error on write!
53565 Sep 22 23:16:34.249 DEBG Write :1220 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53566 Sep 22 23:16:34.249 INFO [lossy] skipping 1221
53567 Sep 22 23:16:34.279 DEBG Write :1220 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53568 Sep 22 23:16:34.281 INFO [lossy] skipping 1221
53569 Sep 22 23:16:34.311 DEBG Write :1221 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53570 Sep 22 23:16:34.320 DEBG Flush :1222 extent_limit None deps:[JobId(1221), JobId(1220), JobId(1219)] res:true f:91 g:1
53571 Sep 22 23:16:34.320 INFO [lossy] sleeping 1 second
53572 Sep 22 23:16:34.321 WARN returning error on write!
53573 Sep 22 23:16:34.321 DEBG Write :1217 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53574 Sep 22 23:16:34.321 INFO [lossy] skipping 1218
53575 Sep 22 23:16:34.321 INFO [lossy] skipping 1220
53576 Sep 22 23:16:34.321 INFO [lossy] skipping 1222
53577 Sep 22 23:16:34.321 INFO [lossy] skipping 1217
53578 Sep 22 23:16:34.352 DEBG Write :1218 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53579 Sep 22 23:16:34.383 DEBG Write :1217 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53580 Sep 22 23:16:34.384 INFO [lossy] skipping 1219
53581 Sep 22 23:16:34.384 INFO [lossy] skipping 1223
53582 Sep 22 23:16:34.384 INFO [lossy] skipping 1219
53583 Sep 22 23:16:34.384 WARN returning error on flush!
53584 Sep 22 23:16:34.384 DEBG Flush :1219 extent_limit None deps:[JobId(1218), JobId(1217), JobId(1216)] res:false f:90 g:1
53585 Sep 22 23:16:34.384 INFO [lossy] skipping 1219
53586 Sep 22 23:16:34.384 INFO [lossy] skipping 1219
53587 Sep 22 23:16:34.392 DEBG Flush :1219 extent_limit None deps:[JobId(1218), JobId(1217), JobId(1216)] res:true f:90 g:1
53588 Sep 22 23:16:34.422 DEBG Write :1220 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53589 Sep 22 23:16:34.424 WARN returning error on write!
53590 Sep 22 23:16:34.424 DEBG Write :1221 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53591 Sep 22 23:16:34.424 WARN 1222 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53592 Sep 22 23:16:34.424 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53593 Sep 22 23:16:34.454 DEBG Write :1221 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53594 Sep 22 23:16:34.455 INFO [lossy] sleeping 1 second
53595 Sep 22 23:16:35.507 DEBG Write :1223 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53596 Sep 22 23:16:35.538 DEBG Write :1224 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53597 Sep 22 23:16:35.539 WARN returning error on flush!
53598 Sep 22 23:16:35.539 DEBG Flush :1225 extent_limit None deps:[JobId(1224), JobId(1223), JobId(1222)] res:false f:92 g:1
53599 Sep 22 23:16:35.539 INFO [lossy] skipping 1227
53600 Sep 22 23:16:35.539 INFO [lossy] skipping 1228
53601 Sep 22 23:16:35.547 DEBG Flush :1225 extent_limit None deps:[JobId(1224), JobId(1223), JobId(1222)] res:true f:92 g:1
53602 Sep 22 23:16:35.578 DEBG Write :1227 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53603 Sep 22 23:16:35.609 DEBG Write :1226 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53604 Sep 22 23:16:35.618 DEBG Flush :1228 extent_limit None deps:[JobId(1227), JobId(1226), JobId(1225)] res:true f:93 g:1
53605 Sep 22 23:16:35.648 DEBG Write :1229 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53606 Sep 22 23:16:35.649 INFO [lossy] sleeping 1 second
53607 Sep 22 23:16:35.658 DEBG Flush :1222 extent_limit None deps:[JobId(1221), JobId(1220), JobId(1219)] res:true f:91 g:1
53608 Sep 22 23:16:35.688 DEBG Write :1223 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53609 Sep 22 23:16:35.689 INFO [lossy] skipping 1224
53610 Sep 22 23:16:35.689 INFO [lossy] skipping 1228
53611 Sep 22 23:16:35.689 INFO [lossy] skipping 1229
53612 Sep 22 23:16:35.720 DEBG Write :1224 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53613 Sep 22 23:16:35.729 DEBG Flush :1225 extent_limit None deps:[JobId(1224), JobId(1223), JobId(1222)] res:true f:92 g:1
53614 Sep 22 23:16:35.759 DEBG Write :1226 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53615 Sep 22 23:16:35.791 DEBG Write :1227 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53616 Sep 22 23:16:35.792 INFO [lossy] skipping 1228
53617 Sep 22 23:16:35.792 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53618 Sep 22 23:16:35.792 INFO [lossy] skipping 1230
53619 Sep 22 23:16:35.792 INFO [lossy] skipping 1231
53620 Sep 22 23:16:35.792 INFO [lossy] skipping 1228
53621 Sep 22 23:16:35.792 INFO [lossy] skipping 1230
53622 Sep 22 23:16:35.792 INFO [lossy] skipping 1231
53623 Sep 22 23:16:35.792 INFO [lossy] skipping 1228
53624 Sep 22 23:16:35.792 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53625 Sep 22 23:16:35.792 INFO [lossy] skipping 1231
53626 Sep 22 23:16:35.800 DEBG Flush :1228 extent_limit None deps:[JobId(1227), JobId(1226), JobId(1225)] res:true f:93 g:1
53627 Sep 22 23:16:35.800 INFO [lossy] skipping 1231
53628 Sep 22 23:16:35.800 INFO [lossy] skipping 1231
53629 Sep 22 23:16:35.800 INFO [lossy] skipping 1231
53630 Sep 22 23:16:35.800 INFO [lossy] skipping 1231
53631 Sep 22 23:16:35.800 WARN 1231 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53632 Sep 22 23:16:35.801 WARN returning error on write!
53633 Sep 22 23:16:35.801 DEBG Write :1229 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53634 Sep 22 23:16:35.802 WARN returning error on write!
53635 Sep 22 23:16:35.802 DEBG Write :1230 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53636 Sep 22 23:16:35.833 DEBG Write :1229 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53637 Sep 22 23:16:35.834 INFO [lossy] skipping 1230
53638 Sep 22 23:16:35.864 DEBG Write :1230 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53639 Sep 22 23:16:35.865 INFO [lossy] skipping 1231
53640 Sep 22 23:16:35.865 INFO [lossy] skipping 1231
53641 Sep 22 23:16:35.865 WARN returning error on flush!
53642 Sep 22 23:16:35.865 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:false f:94 g:1
53643 Sep 22 23:16:35.865 INFO [lossy] skipping 1231
53644 Sep 22 23:16:35.865 WARN returning error on flush!
53645 Sep 22 23:16:35.865 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:false f:94 g:1
53646 Sep 22 23:16:35.865 WARN returning error on flush!
53647 Sep 22 23:16:35.865 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:false f:94 g:1
53648 Sep 22 23:16:35.865 INFO [lossy] skipping 1231
53649 Sep 22 23:16:35.873 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:true f:94 g:1
53650 Sep 22 23:16:35.873 INFO [lossy] sleeping 1 second
53651 Sep 22 23:16:36.428 ERRO [0] job id 1173 saw error GenericError("test error")
53652 Sep 22 23:16:36.428 ERRO [0] job id 1173 saw error GenericError("test error")
53653 Sep 22 23:16:36.428 ERRO [0] job id 1176 saw error GenericError("test error")
53654 Sep 22 23:16:36.428 ERRO [0] job id 1176 saw error GenericError("test error")
53655 Sep 22 23:16:36.428 ERRO [0] job id 1177 saw error GenericError("test error")
53656 Sep 22 23:16:36.428 ERRO [0] job id 1188 saw error GenericError("test error")
53657 Sep 22 23:16:36.428 ERRO [0] job id 1190 saw error GenericError("test error")
53658 Sep 22 23:16:36.428 ERRO [0] job id 1190 saw error GenericError("test error")
53659 Sep 22 23:16:36.429 ERRO [1] job id 1174 saw error GenericError("test error")
53660 Sep 22 23:16:36.429 ERRO [1] job id 1174 saw error GenericError("test error")
53661 Sep 22 23:16:36.429 ERRO [1] job id 1177 saw error GenericError("test error")
53662 Sep 22 23:16:36.429 ERRO [1] job id 1178 saw error GenericError("test error")
53663 Sep 22 23:16:36.429 ERRO [1] job id 1183 saw error GenericError("test error")
53664 Sep 22 23:16:36.429 ERRO [1] job id 1187 saw error GenericError("test error")
53665 Sep 22 23:16:36.429 ERRO [1] job id 1188 saw error GenericError("test error")
53666 Sep 22 23:16:36.429 ERRO [1] job id 1188 saw error GenericError("test error")
53667 Sep 22 23:16:36.430 ERRO [0] job id 1199 saw error GenericError("test error")
53668 Sep 22 23:16:36.430 ERRO [0] job id 1203 saw error GenericError("test error")
53669 Sep 22 23:16:36.430 ERRO [0] job id 1206 saw error GenericError("test error")
53670 Sep 22 23:16:36.430 ERRO [0] job id 1210 saw error GenericError("test error")
53671 Sep 22 23:16:36.430 DEBG up_ds_listen was notified
53672 Sep 22 23:16:36.430 DEBG up_ds_listen process 1174
53673 Sep 22 23:16:36.430 DEBG [A] ack job 1174:175, : downstairs
53674 Sep 22 23:16:36.430 DEBG up_ds_listen process 1177
53675 Sep 22 23:16:36.430 DEBG [A] ack job 1177:178, : downstairs
53676 Sep 22 23:16:36.430 DEBG up_ds_listen process 1180
53677 Sep 22 23:16:36.430 DEBG [A] ack job 1180:181, : downstairs
53678 Sep 22 23:16:36.430 DEBG up_ds_listen process 1183
53679 Sep 22 23:16:36.430 DEBG [A] ack job 1183:184, : downstairs
53680 Sep 22 23:16:36.430 DEBG up_ds_listen process 1186
53681 Sep 22 23:16:36.430 DEBG [A] ack job 1186:187, : downstairs
53682 Sep 22 23:16:36.430 DEBG up_ds_listen checked 5 jobs, back to waiting
53683 Sep 22 23:16:36.430 DEBG up_ds_listen was notified
53684 Sep 22 23:16:36.430 DEBG up_ds_listen checked 0 jobs, back to waiting
53685 Sep 22 23:16:36.430 DEBG up_ds_listen was notified
53686 Sep 22 23:16:36.430 DEBG up_ds_listen checked 0 jobs, back to waiting
53687 Sep 22 23:16:36.430 DEBG up_ds_listen was notified
53688 Sep 22 23:16:36.430 DEBG up_ds_listen checked 0 jobs, back to waiting
53689 Sep 22 23:16:36.430 DEBG up_ds_listen was notified
53690 Sep 22 23:16:36.430 DEBG up_ds_listen checked 0 jobs, back to waiting
53691 Sep 22 23:16:36.430 ERRO [1] job id 1190 saw error GenericError("test error")
53692 Sep 22 23:16:36.430 ERRO [1] job id 1195 saw error GenericError("test error")
53693 Sep 22 23:16:36.430 ERRO [1] job id 1200 saw error GenericError("test error")
53694 Sep 22 23:16:36.430 ERRO [1] job id 1201 saw error GenericError("test error")
53695 Sep 22 23:16:36.431 ERRO [1] job id 1202 saw error GenericError("test error")
53696 Sep 22 23:16:36.431 ERRO [0] job id 1213 saw error GenericError("test error")
53697 Sep 22 23:16:36.431 ERRO [0] job id 1215 saw error GenericError("test error")
53698 Sep 22 23:16:36.431 ERRO [0] job id 1216 saw error GenericError("test error")
53699 Sep 22 23:16:36.431 ERRO [0] job id 1217 saw error GenericError("test error")
53700 Sep 22 23:16:36.431 ERRO [0] job id 1219 saw error GenericError("test error")
53701 Sep 22 23:16:36.431 ERRO [0] job id 1221 saw error GenericError("test error")
53702 Sep 22 23:16:36.431 ERRO [0] job id 1229 saw error GenericError("test error")
53703 Sep 22 23:16:36.431 ERRO [0] job id 1230 saw error GenericError("test error")
53704 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53705 Sep 22 23:16:36.431 DEBG up_ds_listen process 1189
53706 Sep 22 23:16:36.431 DEBG [A] ack job 1189:190, : downstairs
53707 Sep 22 23:16:36.431 DEBG up_ds_listen process 1192
53708 Sep 22 23:16:36.431 DEBG [A] ack job 1192:193, : downstairs
53709 Sep 22 23:16:36.431 DEBG up_ds_listen process 1195
53710 Sep 22 23:16:36.431 DEBG [A] ack job 1195:196, : downstairs
53711 Sep 22 23:16:36.431 DEBG up_ds_listen process 1198
53712 Sep 22 23:16:36.431 DEBG [A] ack job 1198:199, : downstairs
53713 Sep 22 23:16:36.431 DEBG up_ds_listen process 1201
53714 Sep 22 23:16:36.431 DEBG [A] ack job 1201:202, : downstairs
53715 Sep 22 23:16:36.431 DEBG up_ds_listen process 1204
53716 Sep 22 23:16:36.431 DEBG [A] ack job 1204:205, : downstairs
53717 Sep 22 23:16:36.431 DEBG up_ds_listen process 1207
53718 Sep 22 23:16:36.431 DEBG [A] ack job 1207:208, : downstairs
53719 Sep 22 23:16:36.431 DEBG up_ds_listen checked 7 jobs, back to waiting
53720 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53721 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53722 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53723 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53724 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53725 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53726 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53727 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53728 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53729 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53730 Sep 22 23:16:36.431 DEBG up_ds_listen was notified
53731 Sep 22 23:16:36.431 DEBG up_ds_listen checked 0 jobs, back to waiting
53732 Sep 22 23:16:36.431 ERRO [1] job id 1209 saw error GenericError("test error")
53733 Sep 22 23:16:36.431 ERRO [1] job id 1208 saw error GenericError("test error")
53734 Sep 22 23:16:36.431 ERRO [1] job id 1210 saw error GenericError("test error")
53735 Sep 22 23:16:36.431 ERRO [1] job id 1212 saw error GenericError("test error")
53736 Sep 22 23:16:36.431 ERRO [1] job id 1214 saw error GenericError("test error")
53737 Sep 22 23:16:36.431 ERRO [1] job id 1215 saw error GenericError("test error")
53738 Sep 22 23:16:36.432 ERRO [1] job id 1217 saw error GenericError("test error")
53739 Sep 22 23:16:36.432 ERRO [1] job id 1220 saw error GenericError("test error")
53740 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53741 Sep 22 23:16:36.432 DEBG up_ds_listen process 1210
53742 Sep 22 23:16:36.432 DEBG [A] ack job 1210:211, : downstairs
53743 Sep 22 23:16:36.432 DEBG up_ds_listen process 1213
53744 Sep 22 23:16:36.432 DEBG [A] ack job 1213:214, : downstairs
53745 Sep 22 23:16:36.432 DEBG up_ds_listen process 1216
53746 Sep 22 23:16:36.432 DEBG [A] ack job 1216:217, : downstairs
53747 Sep 22 23:16:36.432 DEBG up_ds_listen process 1219
53748 Sep 22 23:16:36.432 DEBG [A] ack job 1219:220, : downstairs
53749 Sep 22 23:16:36.432 DEBG up_ds_listen process 1222
53750 Sep 22 23:16:36.432 DEBG [A] ack job 1222:223, : downstairs
53751 Sep 22 23:16:36.432 DEBG up_ds_listen checked 5 jobs, back to waiting
53752 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53753 Sep 22 23:16:36.432 DEBG up_ds_listen checked 0 jobs, back to waiting
53754 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53755 Sep 22 23:16:36.432 DEBG up_ds_listen checked 0 jobs, back to waiting
53756 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53757 Sep 22 23:16:36.432 DEBG up_ds_listen checked 0 jobs, back to waiting
53758 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53759 Sep 22 23:16:36.432 DEBG up_ds_listen checked 0 jobs, back to waiting
53760 Sep 22 23:16:36.432 ERRO [1] job id 1225 saw error GenericError("test error")
53761 Sep 22 23:16:36.432 ERRO [0] job id 1231 saw error GenericError("test error")
53762 Sep 22 23:16:36.432 ERRO [0] job id 1231 saw error GenericError("test error")
53763 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53764 Sep 22 23:16:36.432 DEBG up_ds_listen process 1225
53765 Sep 22 23:16:36.432 DEBG [A] ack job 1225:226, : downstairs
53766 Sep 22 23:16:36.432 DEBG up_ds_listen checked 1 jobs, back to waiting
53767 Sep 22 23:16:36.432 ERRO [0] job id 1231 saw error GenericError("test error")
53768 Sep 22 23:16:36.432 DEBG up_ds_listen was notified
53769 Sep 22 23:16:36.432 DEBG up_ds_listen process 1228
53770 Sep 22 23:16:36.432 DEBG [A] ack job 1228:229, : downstairs
53771 Sep 22 23:16:36.432 DEBG up_ds_listen checked 1 jobs, back to waiting
53772 Sep 22 23:16:36.440 INFO [lossy] sleeping 1 second
53773 Sep 22 23:16:36.848 DEBG Write :1230 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53774 Sep 22 23:16:36.849 WARN returning error on flush!
53775 Sep 22 23:16:36.849 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:false f:94 g:1
53776 Sep 22 23:16:36.849 INFO [lossy] skipping 1233
53777 Sep 22 23:16:36.857 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:true f:94 g:1
53778 Sep 22 23:16:36.858 WARN returning error on write!
53779 Sep 22 23:16:36.858 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:false
53780 Sep 22 23:16:36.859 WARN returning error on write!
53781 Sep 22 23:16:36.859 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:false
53782 Sep 22 23:16:36.890 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:true
53783 Sep 22 23:16:36.921 DEBG Write :1232 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53784 Sep 22 23:16:36.922 WARN returning error on flush!
53785 Sep 22 23:16:36.922 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:false f:95 g:1
53786 Sep 22 23:16:36.922 INFO [lossy] skipping 1235
53787 Sep 22 23:16:36.922 INFO [lossy] skipping 1237
53788 Sep 22 23:16:36.922 WARN returning error on flush!
53789 Sep 22 23:16:36.922 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:false f:95 g:1
53790 Sep 22 23:16:36.922 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53791 Sep 22 23:16:36.922 WARN returning error on flush!
53792 Sep 22 23:16:36.922 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:false f:95 g:1
53793 Sep 22 23:16:36.930 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:true f:95 g:1
53794 Sep 22 23:16:36.930 INFO [lossy] sleeping 1 second
53795 Sep 22 23:16:36.931 WARN returning error on write!
53796 Sep 22 23:16:36.932 DEBG Write :1232 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
53797 Sep 22 23:16:36.962 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:true
53798 Sep 22 23:16:36.963 INFO [lossy] skipping 1232
53799 Sep 22 23:16:36.993 DEBG Write :1232 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
53800 Sep 22 23:16:36.995 ERRO [1] job id 1231 saw error GenericError("test error")
53801 Sep 22 23:16:36.995 ERRO [1] job id 1233 saw error GenericError("test error")
53802 Sep 22 23:16:36.995 ERRO [1] job id 1233 saw error GenericError("test error")
53803 Sep 22 23:16:36.995 ERRO [1] job id 1234 saw error GenericError("test error")
53804 Sep 22 23:16:36.995 ERRO [1] job id 1234 saw error GenericError("test error")
53805 Sep 22 23:16:36.995 ERRO [1] job id 1234 saw error GenericError("test error")
53806 Sep 22 23:16:37.003 DEBG up_ds_listen was notified
53807 Sep 22 23:16:37.003 DEBG up_ds_listen process 1231
53808 Sep 22 23:16:37.003 DEBG [A] ack job 1231:232, : downstairs
53809 Sep 22 23:16:37.003 DEBG up_ds_listen checked 1 jobs, back to waiting
53810 Sep 22 23:16:37.003 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:true f:95 g:1
53811 Sep 22 23:16:37.003 INFO [lossy] skipping 1235
53812 Sep 22 23:16:37.003 WARN 1236 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53813 Sep 22 23:16:37.033 DEBG Write :1235 deps:[JobId(1234), JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1164), JobId(1163)] res:true
53814 Sep 22 23:16:37.038 DEBG Flush :1236 extent_limit None deps:[JobId(1235), JobId(1234)] res:true f:96 g:1
53815 Sep 22 23:16:37.044 DEBG Read :1237 deps:[JobId(1236)] res:true
53816 Sep 22 23:16:37.066 ERRO [0] job id 1232 saw error GenericError("test error")
53817 Sep 22 23:16:37.066 DEBG up_ds_listen was notified
53818 Sep 22 23:16:37.066 DEBG up_ds_listen process 1234
53819 Sep 22 23:16:37.066 DEBG [A] ack job 1234:235, : downstairs
53820 Sep 22 23:16:37.066 DEBG up_ds_listen checked 1 jobs, back to waiting
53821 Sep 22 23:16:37.067 DEBG Flush :1238 extent_limit None deps:[JobId(1237), JobId(1236)] res:true f:97 g:1
53822 Sep 22 23:16:37.068 INFO [lossy] sleeping 1 second
53823 Sep 22 23:16:37.443 DEBG [0] Read AckReady 1237, : downstairs
53824 Sep 22 23:16:37.444 DEBG up_ds_listen was notified
53825 Sep 22 23:16:37.444 DEBG up_ds_listen process 1237
53826 Sep 22 23:16:37.444 DEBG [A] ack job 1237:238, : downstairs
53827 Sep 22 23:16:37.496 DEBG up_ds_listen checked 1 jobs, back to waiting
53828 Sep 22 23:16:37.497 INFO [lossy] skipping 1165
53829 Sep 22 23:16:37.497 INFO [lossy] skipping 1169
53830 Sep 22 23:16:37.497 INFO [lossy] skipping 1170
53831 Sep 22 23:16:37.497 INFO [lossy] skipping 1171
53832 Sep 22 23:16:37.497 INFO [lossy] skipping 1177
53833 Sep 22 23:16:37.497 INFO [lossy] skipping 1178
53834 Sep 22 23:16:37.497 INFO [lossy] skipping 1180
53835 Sep 22 23:16:37.497 INFO [lossy] skipping 1183
53836 Sep 22 23:16:37.497 INFO [lossy] skipping 1184
53837 Sep 22 23:16:37.497 INFO [lossy] skipping 1185
53838 Sep 22 23:16:37.497 INFO [lossy] skipping 1186
53839 Sep 22 23:16:37.497 INFO [lossy] skipping 1187
53840 Sep 22 23:16:37.497 INFO [lossy] skipping 1189
53841 Sep 22 23:16:37.497 INFO [lossy] skipping 1191
53842 Sep 22 23:16:37.497 INFO [lossy] skipping 1192
53843 Sep 22 23:16:37.497 INFO [lossy] skipping 1195
53844 Sep 22 23:16:37.497 INFO [lossy] skipping 1200
53845 Sep 22 23:16:37.497 INFO [lossy] skipping 1204
53846 Sep 22 23:16:37.497 INFO [lossy] skipping 1206
53847 Sep 22 23:16:37.497 INFO [lossy] skipping 1208
53848 Sep 22 23:16:37.497 INFO [lossy] skipping 1219
53849 Sep 22 23:16:37.497 INFO [lossy] skipping 1222
53850 Sep 22 23:16:37.497 INFO [lossy] skipping 1227
53851 Sep 22 23:16:37.497 INFO [lossy] skipping 1228
53852 Sep 22 23:16:37.497 INFO [lossy] skipping 1229
53853 Sep 22 23:16:37.498 INFO [lossy] skipping 1234
53854 Sep 22 23:16:37.498 INFO [lossy] skipping 1236
53855 Sep 22 23:16:37.498 INFO [lossy] skipping 1238
53856 Sep 22 23:16:37.498 WARN returning error on flush!
53857 Sep 22 23:16:37.498 DEBG Flush :1165 extent_limit None deps:[JobId(1164), JobId(1163)] res:false f:72 g:1
53858 Sep 22 23:16:37.498 INFO [lossy] skipping 1170
53859 Sep 22 23:16:37.498 INFO [lossy] skipping 1171
53860 Sep 22 23:16:37.498 INFO [lossy] skipping 1178
53861 Sep 22 23:16:37.498 INFO [lossy] skipping 1180
53862 Sep 22 23:16:37.498 INFO [lossy] skipping 1185
53863 Sep 22 23:16:37.498 INFO [lossy] skipping 1191
53864 Sep 22 23:16:37.498 INFO [lossy] skipping 1222
53865 Sep 22 23:16:37.498 INFO [lossy] skipping 1229
53866 Sep 22 23:16:37.498 INFO [lossy] skipping 1238
53867 Sep 22 23:16:37.498 DEBG Flush :1165 extent_limit None deps:[JobId(1164), JobId(1163)] res:true f:72 g:1
53868 Sep 22 23:16:37.498 INFO [lossy] skipping 1171
53869 Sep 22 23:16:37.498 INFO [lossy] skipping 1185
53870 Sep 22 23:16:37.529 DEBG Write :1166 deps:[JobId(1165), JobId(1163)] res:true
53871 Sep 22 23:16:37.560 DEBG Write :1167 deps:[JobId(1165), JobId(1163)] res:true
53872 Sep 22 23:16:37.561 WARN returning error on flush!
53873 Sep 22 23:16:37.561 DEBG Flush :1168 extent_limit None deps:[JobId(1167), JobId(1166), JobId(1165)] res:false f:73 g:1
53874 Sep 22 23:16:37.561 WARN 1169 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53875 Sep 22 23:16:37.561 INFO [lossy] skipping 1172
53876 Sep 22 23:16:37.561 WARN 1173 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53877 Sep 22 23:16:37.561 WARN 1175 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
53878 Sep 22 23:16:37.561 WARN 1176 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
53879 Sep 22 23:16:37.561 WARN 1179 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
53880 Sep 22 23:16:37.561 WARN 1181 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
53881 Sep 22 23:16:37.561 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
53882 Sep 22 23:16:37.561 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
53883 Sep 22 23:16:37.561 WARN 1187 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
53884 Sep 22 23:16:37.561 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
53885 Sep 22 23:16:37.561 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
53886 Sep 22 23:16:37.561 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
53887 Sep 22 23:16:37.561 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
53888 Sep 22 23:16:37.561 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
53889 Sep 22 23:16:37.561 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
53890 Sep 22 23:16:37.561 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
53891 Sep 22 23:16:37.561 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
53892 Sep 22 23:16:37.561 INFO [lossy] skipping 1201
53893 Sep 22 23:16:37.561 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
53894 Sep 22 23:16:37.561 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
53895 Sep 22 23:16:37.561 INFO [lossy] skipping 1204
53896 Sep 22 23:16:37.561 INFO [lossy] skipping 1205
53897 Sep 22 23:16:37.561 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
53898 Sep 22 23:16:37.561 INFO [lossy] skipping 1208
53899 Sep 22 23:16:37.561 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
53900 Sep 22 23:16:37.561 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
53901 Sep 22 23:16:37.561 INFO [lossy] skipping 1212
53902 Sep 22 23:16:37.561 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
53903 Sep 22 23:16:37.561 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
53904 Sep 22 23:16:37.561 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
53905 Sep 22 23:16:37.561 INFO [lossy] skipping 1218
53906 Sep 22 23:16:37.562 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
53907 Sep 22 23:16:37.562 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
53908 Sep 22 23:16:37.562 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
53909 Sep 22 23:16:37.562 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
53910 Sep 22 23:16:37.562 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 20 deps, role: work
53911 Sep 22 23:16:37.562 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 20 deps, role: work
53912 Sep 22 23:16:37.562 INFO [lossy] skipping 1228
53913 Sep 22 23:16:37.562 INFO [lossy] skipping 1229
53914 Sep 22 23:16:37.562 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 21 deps, role: work
53915 Sep 22 23:16:37.562 INFO [lossy] skipping 1231
53916 Sep 22 23:16:37.562 INFO [lossy] skipping 1232
53917 Sep 22 23:16:37.562 INFO [lossy] skipping 1233
53918 Sep 22 23:16:37.562 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 23 deps, role: work
53919 Sep 22 23:16:37.562 INFO [lossy] skipping 1237
53920 Sep 22 23:16:37.570 DEBG Flush :1168 extent_limit None deps:[JobId(1167), JobId(1166), JobId(1165)] res:true f:73 g:1
53921 Sep 22 23:16:37.570 WARN 1172 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53922 Sep 22 23:16:37.570 INFO [lossy] skipping 1204
53923 Sep 22 23:16:37.570 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
53924 Sep 22 23:16:37.570 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
53925 Sep 22 23:16:37.570 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
53926 Sep 22 23:16:37.570 INFO [lossy] skipping 1218
53927 Sep 22 23:16:37.570 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 20 deps, role: work
53928 Sep 22 23:16:37.570 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 21 deps, role: work
53929 Sep 22 23:16:37.570 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 21 deps, role: work
53930 Sep 22 23:16:37.570 INFO [lossy] skipping 1237
53931 Sep 22 23:16:37.570 INFO [lossy] skipping 1218
53932 Sep 22 23:16:37.570 INFO [lossy] skipping 1237
53933 Sep 22 23:16:37.570 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
53934 Sep 22 23:16:37.570 INFO [lossy] skipping 1237
53935 Sep 22 23:16:37.570 INFO [lossy] skipping 1169
53936 Sep 22 23:16:37.600 DEBG Write :1170 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53937 Sep 22 23:16:37.601 WARN 1171 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53938 Sep 22 23:16:37.601 INFO [lossy] skipping 1172
53939 Sep 22 23:16:37.601 WARN 1173 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
53940 Sep 22 23:16:37.601 WARN 1175 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53941 Sep 22 23:16:37.601 WARN 1176 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
53942 Sep 22 23:16:37.601 WARN 1178 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
53943 Sep 22 23:16:37.601 WARN 1179 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
53944 Sep 22 23:16:37.601 INFO [lossy] skipping 1180
53945 Sep 22 23:16:37.601 WARN 1181 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
53946 Sep 22 23:16:37.601 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
53947 Sep 22 23:16:37.601 INFO [lossy] skipping 1184
53948 Sep 22 23:16:37.601 INFO [lossy] skipping 1185
53949 Sep 22 23:16:37.601 WARN 1187 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
53950 Sep 22 23:16:37.601 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
53951 Sep 22 23:16:37.601 INFO [lossy] skipping 1189
53952 Sep 22 23:16:37.601 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
53953 Sep 22 23:16:37.601 INFO [lossy] skipping 1191
53954 Sep 22 23:16:37.601 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
53955 Sep 22 23:16:37.602 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
53956 Sep 22 23:16:37.602 INFO [lossy] skipping 1195
53957 Sep 22 23:16:37.602 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
53958 Sep 22 23:16:37.602 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
53959 Sep 22 23:16:37.602 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
53960 Sep 22 23:16:37.602 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
53961 Sep 22 23:16:37.602 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
53962 Sep 22 23:16:37.602 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
53963 Sep 22 23:16:37.602 INFO [lossy] skipping 1204
53964 Sep 22 23:16:37.602 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
53965 Sep 22 23:16:37.602 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
53966 Sep 22 23:16:37.602 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
53967 Sep 22 23:16:37.602 INFO [lossy] skipping 1213
53968 Sep 22 23:16:37.602 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
53969 Sep 22 23:16:37.602 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
53970 Sep 22 23:16:37.602 INFO [lossy] skipping 1217
53971 Sep 22 23:16:37.602 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
53972 Sep 22 23:16:37.602 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
53973 Sep 22 23:16:37.602 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
53974 Sep 22 23:16:37.602 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
53975 Sep 22 23:16:37.602 INFO [lossy] skipping 1226
53976 Sep 22 23:16:37.602 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
53977 Sep 22 23:16:37.602 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 20 deps, role: work
53978 Sep 22 23:16:37.602 INFO [lossy] skipping 1231
53979 Sep 22 23:16:37.602 INFO [lossy] skipping 1232
53980 Sep 22 23:16:37.602 INFO [lossy] skipping 1233
53981 Sep 22 23:16:37.602 INFO [lossy] skipping 1234
53982 Sep 22 23:16:37.602 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 22 deps, role: work
53983 Sep 22 23:16:37.632 DEBG Write :1169 deps:[JobId(1168), JobId(1165), JobId(1163)] res:true
53984 Sep 22 23:16:37.634 INFO [lossy] skipping 1180
53985 Sep 22 23:16:37.634 INFO [lossy] skipping 1184
53986 Sep 22 23:16:37.634 INFO [lossy] skipping 1185
53987 Sep 22 23:16:37.634 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
53988 Sep 22 23:16:37.634 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
53989 Sep 22 23:16:37.634 INFO [lossy] skipping 1226
53990 Sep 22 23:16:37.634 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
53991 Sep 22 23:16:37.634 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
53992 Sep 22 23:16:37.634 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
53993 Sep 22 23:16:37.634 INFO [lossy] skipping 1171
53994 Sep 22 23:16:37.634 INFO [lossy] skipping 1172
53995 Sep 22 23:16:37.634 INFO [lossy] skipping 1173
53996 Sep 22 23:16:37.634 INFO [lossy] skipping 1175
53997 Sep 22 23:16:37.634 INFO [lossy] skipping 1181
53998 Sep 22 23:16:37.634 INFO [lossy] skipping 1182
53999 Sep 22 23:16:37.634 INFO [lossy] skipping 1185
54000 Sep 22 23:16:37.634 INFO [lossy] skipping 1188
54001 Sep 22 23:16:37.634 INFO [lossy] skipping 1201
54002 Sep 22 23:16:37.634 INFO [lossy] skipping 1202
54003 Sep 22 23:16:37.634 INFO [lossy] skipping 1203
54004 Sep 22 23:16:37.634 INFO [lossy] skipping 1204
54005 Sep 22 23:16:37.634 INFO [lossy] skipping 1210
54006 Sep 22 23:16:37.634 INFO [lossy] skipping 1212
54007 Sep 22 23:16:37.634 INFO [lossy] skipping 1214
54008 Sep 22 23:16:37.634 INFO [lossy] skipping 1222
54009 Sep 22 23:16:37.634 INFO [lossy] skipping 1225
54010 Sep 22 23:16:37.634 INFO [lossy] skipping 1231
54011 Sep 22 23:16:37.634 INFO [lossy] skipping 1234
54012 Sep 22 23:16:37.642 DEBG Flush :1171 extent_limit None deps:[JobId(1170), JobId(1169), JobId(1168)] res:true f:74 g:1
54013 Sep 22 23:16:37.642 INFO [lossy] skipping 1172
54014 Sep 22 23:16:37.642 INFO [lossy] skipping 1173
54015 Sep 22 23:16:37.642 WARN 1175 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54016 Sep 22 23:16:37.642 WARN 1181 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54017 Sep 22 23:16:37.642 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54018 Sep 22 23:16:37.642 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54019 Sep 22 23:16:37.642 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54020 Sep 22 23:16:37.642 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54021 Sep 22 23:16:37.642 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54022 Sep 22 23:16:37.642 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54023 Sep 22 23:16:37.642 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54024 Sep 22 23:16:37.642 INFO [lossy] skipping 1222
54025 Sep 22 23:16:37.642 INFO [lossy] skipping 1231
54026 Sep 22 23:16:37.673 DEBG Write :1172 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54027 Sep 22 23:16:37.704 DEBG Write :1173 deps:[JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54028 Sep 22 23:16:37.713 DEBG Flush :1174 extent_limit None deps:[JobId(1173), JobId(1172), JobId(1171)] res:true f:75 g:1
54029 Sep 22 23:16:37.743 DEBG Write :1175 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54030 Sep 22 23:16:37.744 INFO [lossy] skipping 1176
54031 Sep 22 23:16:37.744 INFO [lossy] skipping 1177
54032 Sep 22 23:16:37.744 WARN 1178 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54033 Sep 22 23:16:37.744 INFO [lossy] skipping 1179
54034 Sep 22 23:16:37.744 INFO [lossy] skipping 1180
54035 Sep 22 23:16:37.744 WARN 1181 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54036 Sep 22 23:16:37.744 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54037 Sep 22 23:16:37.744 INFO [lossy] skipping 1183
54038 Sep 22 23:16:37.744 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54039 Sep 22 23:16:37.744 INFO [lossy] skipping 1185
54040 Sep 22 23:16:37.744 WARN 1187 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54041 Sep 22 23:16:37.744 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54042 Sep 22 23:16:37.744 INFO [lossy] skipping 1189
54043 Sep 22 23:16:37.744 INFO [lossy] skipping 1190
54044 Sep 22 23:16:37.744 INFO [lossy] skipping 1191
54045 Sep 22 23:16:37.744 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54046 Sep 22 23:16:37.744 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54047 Sep 22 23:16:37.744 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54048 Sep 22 23:16:37.744 INFO [lossy] skipping 1197
54049 Sep 22 23:16:37.744 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54050 Sep 22 23:16:37.744 INFO [lossy] skipping 1200
54051 Sep 22 23:16:37.744 INFO [lossy] skipping 1202
54052 Sep 22 23:16:37.744 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54053 Sep 22 23:16:37.744 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54054 Sep 22 23:16:37.744 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54055 Sep 22 23:16:37.744 INFO [lossy] skipping 1208
54056 Sep 22 23:16:37.744 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54057 Sep 22 23:16:37.744 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54058 Sep 22 23:16:37.744 INFO [lossy] skipping 1212
54059 Sep 22 23:16:37.744 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54060 Sep 22 23:16:37.745 INFO [lossy] skipping 1215
54061 Sep 22 23:16:37.745 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54062 Sep 22 23:16:37.745 INFO [lossy] skipping 1218
54063 Sep 22 23:16:37.745 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54064 Sep 22 23:16:37.745 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54065 Sep 22 23:16:37.745 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54066 Sep 22 23:16:37.745 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54067 Sep 22 23:16:37.745 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54068 Sep 22 23:16:37.745 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54069 Sep 22 23:16:37.745 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
54070 Sep 22 23:16:37.745 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
54071 Sep 22 23:16:37.745 INFO [lossy] skipping 1232
54072 Sep 22 23:16:37.745 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
54073 Sep 22 23:16:37.745 INFO [lossy] skipping 1234
54074 Sep 22 23:16:37.745 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 20 deps, role: work
54075 Sep 22 23:16:37.745 INFO [lossy] skipping 1236
54076 Sep 22 23:16:37.775 DEBG Write :1176 deps:[JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54077 Sep 22 23:16:37.776 WARN returning error on flush!
54078 Sep 22 23:16:37.776 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:false f:76 g:1
54079 Sep 22 23:16:37.776 WARN 1179 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54080 Sep 22 23:16:37.776 INFO [lossy] skipping 1180
54081 Sep 22 23:16:37.776 INFO [lossy] skipping 1183
54082 Sep 22 23:16:37.776 INFO [lossy] skipping 1185
54083 Sep 22 23:16:37.776 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54084 Sep 22 23:16:37.776 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54085 Sep 22 23:16:37.776 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54086 Sep 22 23:16:37.776 INFO [lossy] skipping 1200
54087 Sep 22 23:16:37.776 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54088 Sep 22 23:16:37.776 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54089 Sep 22 23:16:37.776 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54090 Sep 22 23:16:37.776 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54091 Sep 22 23:16:37.776 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54092 Sep 22 23:16:37.776 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
54093 Sep 22 23:16:37.776 INFO [lossy] skipping 1234
54094 Sep 22 23:16:37.784 DEBG Flush :1177 extent_limit None deps:[JobId(1176), JobId(1175), JobId(1174)] res:true f:76 g:1
54095 Sep 22 23:16:37.784 WARN 1180 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54096 Sep 22 23:16:37.784 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54097 Sep 22 23:16:37.784 INFO [lossy] skipping 1200
54098 Sep 22 23:16:37.784 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54099 Sep 22 23:16:37.784 INFO [lossy] sleeping 1 second
54100 Sep 22 23:16:37.785 DEBG IO Read 1239 has deps [JobId(1238)]
54101 Sep 22 23:16:37.785 ERRO [2] job id 1165 saw error GenericError("test error")
54102 Sep 22 23:16:37.785 DEBG [rc] retire 1165 clears [JobId(1164), JobId(1165)], : downstairs
54103 Sep 22 23:16:37.785 ERRO [2] job id 1168 saw error GenericError("test error")
54104 Sep 22 23:16:37.788 DEBG [rc] retire 1168 clears [JobId(1166), JobId(1167), JobId(1168)], : downstairs
54105 Sep 22 23:16:37.791 DEBG [rc] retire 1171 clears [JobId(1169), JobId(1170), JobId(1171)], : downstairs
54106 Sep 22 23:16:37.793 DEBG [rc] retire 1174 clears [JobId(1172), JobId(1173), JobId(1174)], : downstairs
54107 Sep 22 23:16:37.793 ERRO [2] job id 1177 saw error GenericError("test error")
54108 Sep 22 23:16:37.796 DEBG [rc] retire 1177 clears [JobId(1175), JobId(1176), JobId(1177)], : downstairs
54109 Sep 22 23:16:37.962 DEBG Write :1235 deps:[JobId(1234), JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1164), JobId(1163)] res:true
54110 Sep 22 23:16:37.967 DEBG Flush :1236 extent_limit None deps:[JobId(1235), JobId(1234)] res:true f:96 g:1
54111 Sep 22 23:16:37.967 WARN returning error on read!
54112 Sep 22 23:16:37.967 DEBG Read :1237 deps:[JobId(1236)] res:false
54113 Sep 22 23:16:37.968 WARN 1238 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54114 Sep 22 23:16:37.973 DEBG Read :1237 deps:[JobId(1236)] res:true
54115 Sep 22 23:16:37.995 ERRO [1] job id 1237 saw error GenericError("test error")
54116 Sep 22 23:16:37.995 DEBG up_ds_listen was notified
54117 Sep 22 23:16:37.995 DEBG up_ds_listen process 1236
54118 Sep 22 23:16:37.995 DEBG [A] ack job 1236:237, : downstairs
54119 Sep 22 23:16:37.995 DEBG up_ds_listen checked 1 jobs, back to waiting
54120 Sep 22 23:16:37.996 DEBG Flush :1238 extent_limit None deps:[JobId(1237), JobId(1236)] res:true f:97 g:1
54121 Sep 22 23:16:38.002 DEBG Read :1239 deps:[JobId(1238)] res:true
54122 Sep 22 23:16:38.071 DEBG IO Flush 1240 has deps [JobId(1239), JobId(1238)]
54123 Sep 22 23:16:38.401 DEBG up_ds_listen was notified
54124 Sep 22 23:16:38.401 DEBG up_ds_listen process 1238
54125 Sep 22 23:16:38.401 DEBG [A] ack job 1238:239, : downstairs
54126 Sep 22 23:16:38.401 DEBG up_ds_listen checked 1 jobs, back to waiting
54127 Sep 22 23:16:38.407 DEBG Read :1239 deps:[JobId(1238)] res:true
54128 Sep 22 23:16:38.430 WARN returning error on flush!
54129 Sep 22 23:16:38.430 DEBG Flush :1240 extent_limit None deps:[JobId(1239), JobId(1238)] res:false f:98 g:1
54130 Sep 22 23:16:38.430 INFO [lossy] skipping 1240
54131 Sep 22 23:16:38.430 DEBG Flush :1240 extent_limit None deps:[JobId(1239), JobId(1238)] res:true f:98 g:1
54132 Sep 22 23:16:38.430 INFO [lossy] sleeping 1 second
54133 Sep 22 23:16:38.805 DEBG [1] Read AckReady 1239, : downstairs
54134 Sep 22 23:16:38.805 ERRO [1] job id 1240 saw error GenericError("test error")
54135 Sep 22 23:16:38.805 DEBG up_ds_listen was notified
54136 Sep 22 23:16:38.806 DEBG up_ds_listen process 1239
54137 Sep 22 23:16:38.806 DEBG [A] ack job 1239:240, : downstairs
54138 Sep 22 23:16:38.858 DEBG up_ds_listen checked 1 jobs, back to waiting
54139 Sep 22 23:16:38.860 DEBG Flush :1240 extent_limit None deps:[JobId(1239), JobId(1238)] res:true f:98 g:1
54140 Sep 22 23:16:38.860 INFO [lossy] sleeping 1 second
54141 Sep 22 23:16:38.891 DEBG Write :1178 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54142 Sep 22 23:16:38.922 DEBG Write :1179 deps:[JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54143 Sep 22 23:16:38.924 INFO [lossy] skipping 1180
54144 Sep 22 23:16:38.924 WARN 1181 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54145 Sep 22 23:16:38.924 WARN 1182 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54146 Sep 22 23:16:38.924 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54147 Sep 22 23:16:38.924 INFO [lossy] skipping 1185
54148 Sep 22 23:16:38.924 INFO [lossy] skipping 1186
54149 Sep 22 23:16:38.924 INFO [lossy] skipping 1187
54150 Sep 22 23:16:38.924 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54151 Sep 22 23:16:38.924 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54152 Sep 22 23:16:38.924 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54153 Sep 22 23:16:38.924 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54154 Sep 22 23:16:38.924 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54155 Sep 22 23:16:38.924 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54156 Sep 22 23:16:38.924 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54157 Sep 22 23:16:38.924 INFO [lossy] skipping 1199
54158 Sep 22 23:16:38.924 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54159 Sep 22 23:16:38.924 INFO [lossy] skipping 1203
54160 Sep 22 23:16:38.924 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54161 Sep 22 23:16:38.924 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54162 Sep 22 23:16:38.924 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54163 Sep 22 23:16:38.924 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54164 Sep 22 23:16:38.924 INFO [lossy] skipping 1210
54165 Sep 22 23:16:38.924 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54166 Sep 22 23:16:38.924 INFO [lossy] skipping 1212
54167 Sep 22 23:16:38.924 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54168 Sep 22 23:16:38.924 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54169 Sep 22 23:16:38.924 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54170 Sep 22 23:16:38.924 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54171 Sep 22 23:16:38.924 INFO [lossy] skipping 1220
54172 Sep 22 23:16:38.924 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54173 Sep 22 23:16:38.924 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54174 Sep 22 23:16:38.924 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54175 Sep 22 23:16:38.924 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54176 Sep 22 23:16:38.924 INFO [lossy] skipping 1227
54177 Sep 22 23:16:38.924 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54178 Sep 22 23:16:38.924 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54179 Sep 22 23:16:38.924 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
54180 Sep 22 23:16:38.924 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
54181 Sep 22 23:16:38.924 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 19 deps, role: work
54182 Sep 22 23:16:38.932 DEBG Flush :1180 extent_limit None deps:[JobId(1179), JobId(1178), JobId(1177)] res:true f:77 g:1
54183 Sep 22 23:16:38.932 WARN 1185 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54184 Sep 22 23:16:38.932 WARN 1187 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54185 Sep 22 23:16:38.932 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54186 Sep 22 23:16:38.933 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54187 Sep 22 23:16:38.933 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54188 Sep 22 23:16:38.933 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54189 Sep 22 23:16:38.933 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54190 Sep 22 23:16:38.933 INFO [lossy] sleeping 1 second
54191 Sep 22 23:16:38.980 DEBG IO Read 1241 has deps [JobId(1240)]
54192 Sep 22 23:16:39.313 DEBG [rc] retire 1180 clears [JobId(1178), JobId(1179), JobId(1180)], : downstairs
54193 Sep 22 23:16:39.318 DEBG up_ds_listen was notified
54194 Sep 22 23:16:39.318 DEBG up_ds_listen process 1240
54195 Sep 22 23:16:39.318 DEBG [A] ack job 1240:241, : downstairs
54196 Sep 22 23:16:39.318 DEBG up_ds_listen checked 1 jobs, back to waiting
54197 Sep 22 23:16:39.361 DEBG IO Flush 1242 has deps [JobId(1241), JobId(1240)]
54198 Sep 22 23:16:39.432 INFO [lossy] skipping 1241
54199 Sep 22 23:16:39.432 INFO [lossy] skipping 1242
54200 Sep 22 23:16:39.438 DEBG Read :1241 deps:[JobId(1240)] res:true
54201 Sep 22 23:16:39.460 INFO [lossy] skipping 1242
54202 Sep 22 23:16:39.460 DEBG Flush :1242 extent_limit None deps:[JobId(1241), JobId(1240)] res:true f:99 g:1
54203 Sep 22 23:16:39.460 INFO [lossy] sleeping 1 second
54204 Sep 22 23:16:39.835 DEBG [1] Read AckReady 1241, : downstairs
54205 Sep 22 23:16:39.836 DEBG up_ds_listen was notified
54206 Sep 22 23:16:39.836 DEBG up_ds_listen process 1241
54207 Sep 22 23:16:39.836 DEBG [A] ack job 1241:242, : downstairs
54208 Sep 22 23:16:39.889 DEBG up_ds_listen checked 1 jobs, back to waiting
54209 Sep 22 23:16:39.895 DEBG Read :1241 deps:[JobId(1240)] res:true
54210 Sep 22 23:16:39.917 DEBG IO Read 1243 has deps [JobId(1242)]
54211 Sep 22 23:16:39.930 DEBG Flush :1242 extent_limit None deps:[JobId(1241), JobId(1240)] res:true f:99 g:1
54212 Sep 22 23:16:39.936 DEBG Read :1243 deps:[JobId(1242)] res:true
54213 Sep 22 23:16:40.035 DEBG Write :1181 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54214 Sep 22 23:16:40.037 WARN returning error on write!
54215 Sep 22 23:16:40.037 DEBG Write :1182 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54216 Sep 22 23:16:40.038 WARN 1183 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54217 Sep 22 23:16:40.038 INFO [lossy] skipping 1184
54218 Sep 22 23:16:40.038 INFO [lossy] skipping 1188
54219 Sep 22 23:16:40.038 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54220 Sep 22 23:16:40.038 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54221 Sep 22 23:16:40.038 INFO [lossy] skipping 1193
54222 Sep 22 23:16:40.038 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54223 Sep 22 23:16:40.038 INFO [lossy] skipping 1195
54224 Sep 22 23:16:40.038 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54225 Sep 22 23:16:40.038 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54226 Sep 22 23:16:40.038 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54227 Sep 22 23:16:40.038 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54228 Sep 22 23:16:40.038 INFO [lossy] skipping 1203
54229 Sep 22 23:16:40.038 INFO [lossy] skipping 1204
54230 Sep 22 23:16:40.038 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54231 Sep 22 23:16:40.038 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54232 Sep 22 23:16:40.038 INFO [lossy] skipping 1207
54233 Sep 22 23:16:40.038 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54234 Sep 22 23:16:40.038 INFO [lossy] skipping 1209
54235 Sep 22 23:16:40.038 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54236 Sep 22 23:16:40.038 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54237 Sep 22 23:16:40.038 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54238 Sep 22 23:16:40.038 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54239 Sep 22 23:16:40.038 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54240 Sep 22 23:16:40.038 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54241 Sep 22 23:16:40.038 INFO [lossy] skipping 1222
54242 Sep 22 23:16:40.038 INFO [lossy] skipping 1223
54243 Sep 22 23:16:40.038 INFO [lossy] skipping 1224
54244 Sep 22 23:16:40.038 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54245 Sep 22 23:16:40.038 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54246 Sep 22 23:16:40.038 INFO [lossy] skipping 1230
54247 Sep 22 23:16:40.038 INFO [lossy] skipping 1231
54248 Sep 22 23:16:40.038 INFO [lossy] skipping 1232
54249 Sep 22 23:16:40.038 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54250 Sep 22 23:16:40.038 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 18 deps, role: work
54251 Sep 22 23:16:40.038 INFO [lossy] skipping 1238
54252 Sep 22 23:16:40.038 INFO [lossy] skipping 1242
54253 Sep 22 23:16:40.038 INFO [lossy] skipping 1182
54254 Sep 22 23:16:40.038 WARN 1184 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54255 Sep 22 23:16:40.038 WARN 1188 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54256 Sep 22 23:16:40.038 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54257 Sep 22 23:16:40.038 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54258 Sep 22 23:16:40.038 INFO [lossy] skipping 1223
54259 Sep 22 23:16:40.038 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54260 Sep 22 23:16:40.038 INFO [lossy] skipping 1230
54261 Sep 22 23:16:40.038 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 17 deps, role: work
54262 Sep 22 23:16:40.039 INFO [lossy] skipping 1242
54263 Sep 22 23:16:40.068 DEBG Write :1182 deps:[JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54264 Sep 22 23:16:40.069 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54265 Sep 22 23:16:40.069 INFO [lossy] skipping 1230
54266 Sep 22 23:16:40.069 INFO [lossy] skipping 1230
54267 Sep 22 23:16:40.069 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54268 Sep 22 23:16:40.069 INFO [lossy] sleeping 1 second
54269 Sep 22 23:16:40.399 DEBG up_ds_listen was notified
54270 Sep 22 23:16:40.399 DEBG up_ds_listen process 1242
54271 Sep 22 23:16:40.399 DEBG [A] ack job 1242:243, : downstairs
54272 Sep 22 23:16:40.399 DEBG up_ds_listen checked 1 jobs, back to waiting
54273 Sep 22 23:16:40.400 ERRO [2] job id 1182 saw error GenericError("test error")
54274 Sep 22 23:16:40.402 INFO [lossy] sleeping 1 second
54275 Sep 22 23:16:40.777 DEBG [0] Read AckReady 1243, : downstairs
54276 Sep 22 23:16:40.777 DEBG up_ds_listen was notified
54277 Sep 22 23:16:40.777 DEBG up_ds_listen process 1243
54278 Sep 22 23:16:40.777 DEBG [A] ack job 1243:244, : downstairs
54279 Sep 22 23:16:40.830 DEBG up_ds_listen checked 1 jobs, back to waiting
54280 Sep 22 23:16:40.831 DEBG IO Flush 1244 has deps [JobId(1243), JobId(1242)]
54281 Sep 22 23:16:40.831 WARN returning error on read!
54282 Sep 22 23:16:40.831 DEBG Read :1243 deps:[JobId(1242)] res:false
54283 Sep 22 23:16:40.837 DEBG Read :1243 deps:[JobId(1242)] res:true
54284 Sep 22 23:16:40.859 DEBG IO Read 1245 has deps [JobId(1244)]
54285 Sep 22 23:16:40.859 ERRO [1] job id 1243 saw error GenericError("test error")
54286 Sep 22 23:16:40.872 INFO [lossy] skipping 1244
54287 Sep 22 23:16:40.872 DEBG Flush :1244 extent_limit None deps:[JobId(1243), JobId(1242)] res:true f:100 g:1
54288 Sep 22 23:16:40.872 WARN returning error on read!
54289 Sep 22 23:16:40.872 DEBG Read :1245 deps:[JobId(1244)] res:false
54290 Sep 22 23:16:40.878 DEBG Read :1245 deps:[JobId(1244)] res:true
54291 Sep 22 23:16:41.277 ERRO [1] job id 1245 saw error GenericError("test error")
54292 Sep 22 23:16:41.285 DEBG Flush :1183 extent_limit None deps:[JobId(1182), JobId(1181), JobId(1180)] res:true f:78 g:1
54293 Sep 22 23:16:41.316 DEBG Write :1184 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54294 Sep 22 23:16:41.347 DEBG Write :1185 deps:[JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54295 Sep 22 23:16:41.356 DEBG Flush :1186 extent_limit None deps:[JobId(1185), JobId(1184), JobId(1183)] res:true f:79 g:1
54296 Sep 22 23:16:41.357 WARN returning error on write!
54297 Sep 22 23:16:41.357 DEBG Write :1187 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54298 Sep 22 23:16:41.388 DEBG Write :1188 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54299 Sep 22 23:16:41.389 WARN 1189 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54300 Sep 22 23:16:41.389 WARN 1190 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54301 Sep 22 23:16:41.389 INFO [lossy] skipping 1191
54302 Sep 22 23:16:41.389 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54303 Sep 22 23:16:41.389 INFO [lossy] skipping 1194
54304 Sep 22 23:16:41.389 INFO [lossy] skipping 1195
54305 Sep 22 23:16:41.389 INFO [lossy] skipping 1196
54306 Sep 22 23:16:41.389 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54307 Sep 22 23:16:41.389 INFO [lossy] skipping 1198
54308 Sep 22 23:16:41.389 INFO [lossy] skipping 1199
54309 Sep 22 23:16:41.389 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54310 Sep 22 23:16:41.389 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54311 Sep 22 23:16:41.389 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54312 Sep 22 23:16:41.389 INFO [lossy] skipping 1204
54313 Sep 22 23:16:41.389 INFO [lossy] skipping 1205
54314 Sep 22 23:16:41.389 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54315 Sep 22 23:16:41.389 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54316 Sep 22 23:16:41.389 INFO [lossy] skipping 1209
54317 Sep 22 23:16:41.389 INFO [lossy] skipping 1210
54318 Sep 22 23:16:41.389 INFO [lossy] skipping 1211
54319 Sep 22 23:16:41.389 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54320 Sep 22 23:16:41.389 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54321 Sep 22 23:16:41.389 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54322 Sep 22 23:16:41.389 INFO [lossy] skipping 1216
54323 Sep 22 23:16:41.389 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54324 Sep 22 23:16:41.389 INFO [lossy] skipping 1218
54325 Sep 22 23:16:41.389 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54326 Sep 22 23:16:41.389 INFO [lossy] skipping 1221
54327 Sep 22 23:16:41.389 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54328 Sep 22 23:16:41.389 INFO [lossy] skipping 1224
54329 Sep 22 23:16:41.389 INFO [lossy] skipping 1225
54330 Sep 22 23:16:41.389 INFO [lossy] skipping 1226
54331 Sep 22 23:16:41.389 INFO [lossy] skipping 1227
54332 Sep 22 23:16:41.389 INFO [lossy] skipping 1228
54333 Sep 22 23:16:41.390 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54334 Sep 22 23:16:41.390 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54335 Sep 22 23:16:41.390 INFO [lossy] skipping 1231
54336 Sep 22 23:16:41.390 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54337 Sep 22 23:16:41.390 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54338 Sep 22 23:16:41.390 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 16 deps, role: work
54339 Sep 22 23:16:41.390 INFO [lossy] skipping 1236
54340 Sep 22 23:16:41.390 INFO [lossy] skipping 1237
54341 Sep 22 23:16:41.390 INFO [lossy] skipping 1242
54342 Sep 22 23:16:41.390 INFO [lossy] skipping 1244
54343 Sep 22 23:16:41.420 DEBG Write :1187 deps:[JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54344 Sep 22 23:16:41.421 INFO [lossy] skipping 1191
54345 Sep 22 23:16:41.421 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54346 Sep 22 23:16:41.421 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54347 Sep 22 23:16:41.421 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54348 Sep 22 23:16:41.421 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54349 Sep 22 23:16:41.421 INFO [lossy] skipping 1209
54350 Sep 22 23:16:41.421 INFO [lossy] skipping 1210
54351 Sep 22 23:16:41.421 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54352 Sep 22 23:16:41.421 INFO [lossy] skipping 1216
54353 Sep 22 23:16:41.421 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54354 Sep 22 23:16:41.421 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54355 Sep 22 23:16:41.421 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54356 Sep 22 23:16:41.421 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54357 Sep 22 23:16:41.421 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54358 Sep 22 23:16:41.421 INFO [lossy] skipping 1191
54359 Sep 22 23:16:41.421 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54360 Sep 22 23:16:41.421 WARN 1191 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54361 Sep 22 23:16:41.421 INFO [lossy] sleeping 1 second
54362 Sep 22 23:16:41.422 DEBG IO Flush 1246 has deps [JobId(1245), JobId(1244)]
54363 Sep 22 23:16:41.422 DEBG Flush :1244 extent_limit None deps:[JobId(1243), JobId(1242)] res:true f:100 g:1
54364 Sep 22 23:16:41.422 WARN returning error on read!
54365 Sep 22 23:16:41.422 DEBG Read :1245 deps:[JobId(1244)] res:false
54366 Sep 22 23:16:41.422 WARN returning error on read!
54367 Sep 22 23:16:41.422 DEBG Read :1245 deps:[JobId(1244)] res:false
54368 Sep 22 23:16:41.428 DEBG Read :1245 deps:[JobId(1244)] res:true
54369 Sep 22 23:16:41.452 DEBG [rc] retire 1183 clears [JobId(1181), JobId(1182), JobId(1183)], : downstairs
54370 Sep 22 23:16:41.455 DEBG [rc] retire 1186 clears [JobId(1184), JobId(1185), JobId(1186)], : downstairs
54371 Sep 22 23:16:41.455 ERRO [2] job id 1187 saw error GenericError("test error")
54372 Sep 22 23:16:41.456 ERRO [0] job id 1245 saw error GenericError("test error")
54373 Sep 22 23:16:41.456 ERRO [0] job id 1245 saw error GenericError("test error")
54374 Sep 22 23:16:41.456 DEBG up_ds_listen was notified
54375 Sep 22 23:16:41.456 DEBG up_ds_listen process 1244
54376 Sep 22 23:16:41.456 DEBG [A] ack job 1244:245, : downstairs
54377 Sep 22 23:16:41.456 DEBG up_ds_listen checked 1 jobs, back to waiting
54378 Sep 22 23:16:41.459 INFO [lossy] skipping 1246
54379 Sep 22 23:16:41.459 INFO [lossy] skipping 1246
54380 Sep 22 23:16:41.459 WARN returning error on flush!
54381 Sep 22 23:16:41.459 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:false f:101 g:1
54382 Sep 22 23:16:41.459 INFO [lossy] skipping 1246
54383 Sep 22 23:16:41.459 INFO [lossy] skipping 1246
54384 Sep 22 23:16:41.459 WARN returning error on flush!
54385 Sep 22 23:16:41.459 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:false f:101 g:1
54386 Sep 22 23:16:41.459 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:true f:101 g:1
54387 Sep 22 23:16:41.459 INFO [lossy] sleeping 1 second
54388 Sep 22 23:16:41.834 DEBG [1] Read AckReady 1245, : downstairs
54389 Sep 22 23:16:41.835 ERRO [1] job id 1246 saw error GenericError("test error")
54390 Sep 22 23:16:41.835 ERRO [1] job id 1246 saw error GenericError("test error")
54391 Sep 22 23:16:41.835 DEBG up_ds_listen was notified
54392 Sep 22 23:16:41.835 DEBG up_ds_listen process 1245
54393 Sep 22 23:16:41.835 DEBG [A] ack job 1245:246, : downstairs
54394 Sep 22 23:16:41.888 DEBG up_ds_listen checked 1 jobs, back to waiting
54395 Sep 22 23:16:41.890 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:true f:101 g:1
54396 Sep 22 23:16:41.890 INFO [lossy] sleeping 1 second
54397 Sep 22 23:16:41.890 DEBG IO Read 1247 has deps [JobId(1246)]
54398 Sep 22 23:16:42.279 DEBG up_ds_listen was notified
54399 Sep 22 23:16:42.279 DEBG up_ds_listen process 1246
54400 Sep 22 23:16:42.279 DEBG [A] ack job 1246:247, : downstairs
54401 Sep 22 23:16:42.279 DEBG up_ds_listen checked 1 jobs, back to waiting
54402 Sep 22 23:16:42.279 DEBG IO Flush 1248 has deps [JobId(1247), JobId(1246)]
54403 Sep 22 23:16:42.430 DEBG Flush :1189 extent_limit None deps:[JobId(1188), JobId(1187), JobId(1186)] res:true f:80 g:1
54404 Sep 22 23:16:42.430 INFO [lossy] skipping 1190
54405 Sep 22 23:16:42.461 DEBG Write :1191 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54406 Sep 22 23:16:42.462 WARN 1192 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54407 Sep 22 23:16:42.462 WARN 1193 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54408 Sep 22 23:16:42.462 WARN 1194 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54409 Sep 22 23:16:42.462 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54410 Sep 22 23:16:42.462 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54411 Sep 22 23:16:42.462 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54412 Sep 22 23:16:42.462 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54413 Sep 22 23:16:42.462 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54414 Sep 22 23:16:42.462 INFO [lossy] skipping 1203
54415 Sep 22 23:16:42.462 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54416 Sep 22 23:16:42.462 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54417 Sep 22 23:16:42.462 INFO [lossy] skipping 1208
54418 Sep 22 23:16:42.462 INFO [lossy] skipping 1209
54419 Sep 22 23:16:42.462 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54420 Sep 22 23:16:42.462 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54421 Sep 22 23:16:42.462 INFO [lossy] skipping 1213
54422 Sep 22 23:16:42.462 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54423 Sep 22 23:16:42.462 INFO [lossy] skipping 1215
54424 Sep 22 23:16:42.462 INFO [lossy] skipping 1216
54425 Sep 22 23:16:42.462 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54426 Sep 22 23:16:42.462 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54427 Sep 22 23:16:42.462 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54428 Sep 22 23:16:42.462 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54429 Sep 22 23:16:42.462 INFO [lossy] skipping 1222
54430 Sep 22 23:16:42.462 INFO [lossy] skipping 1223
54431 Sep 22 23:16:42.462 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54432 Sep 22 23:16:42.462 INFO [lossy] skipping 1225
54433 Sep 22 23:16:42.462 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54434 Sep 22 23:16:42.462 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54435 Sep 22 23:16:42.462 INFO [lossy] skipping 1229
54436 Sep 22 23:16:42.462 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54437 Sep 22 23:16:42.462 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54438 Sep 22 23:16:42.462 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54439 Sep 22 23:16:42.462 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 15 deps, role: work
54440 Sep 22 23:16:42.462 INFO [lossy] skipping 1237
54441 Sep 22 23:16:42.462 INFO [lossy] skipping 1247
54442 Sep 22 23:16:42.462 INFO [lossy] skipping 1190
54443 Sep 22 23:16:42.462 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54444 Sep 22 23:16:42.462 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54445 Sep 22 23:16:42.463 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54446 Sep 22 23:16:42.463 INFO [lossy] skipping 1213
54447 Sep 22 23:16:42.463 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54448 Sep 22 23:16:42.463 INFO [lossy] skipping 1223
54449 Sep 22 23:16:42.463 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54450 Sep 22 23:16:42.463 INFO [lossy] skipping 1190
54451 Sep 22 23:16:42.463 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54452 Sep 22 23:16:42.493 DEBG Write :1190 deps:[JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54453 Sep 22 23:16:42.494 INFO [lossy] sleeping 1 second
54454 Sep 22 23:16:42.494 INFO [lossy] skipping 1247
54455 Sep 22 23:16:42.494 INFO [lossy] skipping 1248
54456 Sep 22 23:16:42.494 INFO [lossy] skipping 1247
54457 Sep 22 23:16:42.494 INFO [lossy] skipping 1248
54458 Sep 22 23:16:42.500 DEBG Read :1247 deps:[JobId(1246)] res:true
54459 Sep 22 23:16:42.524 DEBG [rc] retire 1189 clears [JobId(1187), JobId(1188), JobId(1189)], : downstairs
54460 Sep 22 23:16:42.526 WARN returning error on flush!
54461 Sep 22 23:16:42.526 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:false f:102 g:1
54462 Sep 22 23:16:42.526 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:true f:102 g:1
54463 Sep 22 23:16:42.526 INFO [lossy] sleeping 1 second
54464 Sep 22 23:16:42.901 DEBG [1] Read AckReady 1247, : downstairs
54465 Sep 22 23:16:42.902 ERRO [1] job id 1248 saw error GenericError("test error")
54466 Sep 22 23:16:42.902 DEBG up_ds_listen was notified
54467 Sep 22 23:16:42.902 DEBG up_ds_listen process 1247
54468 Sep 22 23:16:42.902 DEBG [A] ack job 1247:248, : downstairs
54469 Sep 22 23:16:42.955 DEBG up_ds_listen checked 1 jobs, back to waiting
54470 Sep 22 23:16:42.961 DEBG Read :1247 deps:[JobId(1246)] res:true
54471 Sep 22 23:16:42.983 DEBG IO Read 1249 has deps [JobId(1248)]
54472 Sep 22 23:16:42.996 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:true f:102 g:1
54473 Sep 22 23:16:42.996 INFO [lossy] sleeping 1 second
54474 Sep 22 23:16:43.373 DEBG up_ds_listen was notified
54475 Sep 22 23:16:43.373 DEBG up_ds_listen process 1248
54476 Sep 22 23:16:43.373 DEBG [A] ack job 1248:249, : downstairs
54477 Sep 22 23:16:43.373 DEBG up_ds_listen checked 1 jobs, back to waiting
54478 Sep 22 23:16:43.456 DEBG IO Flush 1250 has deps [JobId(1249), JobId(1248)]
54479 Sep 22 23:16:43.495 WARN returning error on flush!
54480 Sep 22 23:16:43.495 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190), JobId(1189)] res:false f:81 g:1
54481 Sep 22 23:16:43.495 INFO [lossy] skipping 1193
54482 Sep 22 23:16:43.496 INFO [lossy] skipping 1196
54483 Sep 22 23:16:43.496 INFO [lossy] skipping 1198
54484 Sep 22 23:16:43.496 INFO [lossy] skipping 1203
54485 Sep 22 23:16:43.496 INFO [lossy] skipping 1204
54486 Sep 22 23:16:43.496 INFO [lossy] skipping 1212
54487 Sep 22 23:16:43.496 INFO [lossy] skipping 1216
54488 Sep 22 23:16:43.496 INFO [lossy] skipping 1217
54489 Sep 22 23:16:43.496 INFO [lossy] skipping 1219
54490 Sep 22 23:16:43.496 INFO [lossy] skipping 1223
54491 Sep 22 23:16:43.496 INFO [lossy] skipping 1228
54492 Sep 22 23:16:43.496 INFO [lossy] skipping 1231
54493 Sep 22 23:16:43.496 INFO [lossy] skipping 1233
54494 Sep 22 23:16:43.496 INFO [lossy] skipping 1240
54495 Sep 22 23:16:43.504 DEBG Flush :1192 extent_limit None deps:[JobId(1191), JobId(1190), JobId(1189)] res:true f:81 g:1
54496 Sep 22 23:16:43.504 INFO [lossy] skipping 1193
54497 Sep 22 23:16:43.504 INFO [lossy] skipping 1196
54498 Sep 22 23:16:43.504 INFO [lossy] skipping 1203
54499 Sep 22 23:16:43.504 INFO [lossy] skipping 1212
54500 Sep 22 23:16:43.504 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54501 Sep 22 23:16:43.504 INFO [lossy] skipping 1223
54502 Sep 22 23:16:43.504 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54503 Sep 22 23:16:43.535 DEBG Write :1193 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54504 Sep 22 23:16:43.536 WARN 1196 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54505 Sep 22 23:16:43.536 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54506 Sep 22 23:16:43.536 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54507 Sep 22 23:16:43.536 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54508 Sep 22 23:16:43.567 DEBG Write :1194 deps:[JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54509 Sep 22 23:16:43.568 INFO [lossy] skipping 1195
54510 Sep 22 23:16:43.568 WARN 1197 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54511 Sep 22 23:16:43.568 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54512 Sep 22 23:16:43.568 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54513 Sep 22 23:16:43.568 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54514 Sep 22 23:16:43.568 INFO [lossy] skipping 1205
54515 Sep 22 23:16:43.568 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54516 Sep 22 23:16:43.568 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54517 Sep 22 23:16:43.568 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54518 Sep 22 23:16:43.568 INFO [lossy] skipping 1210
54519 Sep 22 23:16:43.568 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54520 Sep 22 23:16:43.568 INFO [lossy] skipping 1212
54521 Sep 22 23:16:43.568 INFO [lossy] skipping 1214
54522 Sep 22 23:16:43.568 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54523 Sep 22 23:16:43.568 INFO [lossy] skipping 1217
54524 Sep 22 23:16:43.568 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54525 Sep 22 23:16:43.568 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54526 Sep 22 23:16:43.568 INFO [lossy] skipping 1221
54527 Sep 22 23:16:43.568 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54528 Sep 22 23:16:43.568 INFO [lossy] skipping 1226
54529 Sep 22 23:16:43.568 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54530 Sep 22 23:16:43.568 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54531 Sep 22 23:16:43.568 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54532 Sep 22 23:16:43.568 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54533 Sep 22 23:16:43.568 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 14 deps, role: work
54534 Sep 22 23:16:43.568 INFO [lossy] skipping 1239
54535 Sep 22 23:16:43.568 INFO [lossy] skipping 1242
54536 Sep 22 23:16:43.568 INFO [lossy] skipping 1249
54537 Sep 22 23:16:43.568 WARN returning error on flush!
54538 Sep 22 23:16:43.568 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:false f:82 g:1
54539 Sep 22 23:16:43.568 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54540 Sep 22 23:16:43.568 INFO [lossy] skipping 1212
54541 Sep 22 23:16:43.568 INFO [lossy] skipping 1214
54542 Sep 22 23:16:43.568 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54543 Sep 22 23:16:43.568 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54544 Sep 22 23:16:43.576 DEBG Flush :1195 extent_limit None deps:[JobId(1194), JobId(1193), JobId(1192)] res:true f:82 g:1
54545 Sep 22 23:16:43.576 INFO [lossy] skipping 1212
54546 Sep 22 23:16:43.576 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54547 Sep 22 23:16:43.576 INFO [lossy] skipping 1212
54548 Sep 22 23:16:43.576 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54549 Sep 22 23:16:43.577 INFO [lossy] skipping 1196
54550 Sep 22 23:16:43.607 DEBG Write :1197 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54551 Sep 22 23:16:43.608 WARN 1198 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54552 Sep 22 23:16:43.608 WARN 1199 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54553 Sep 22 23:16:43.608 WARN 1200 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54554 Sep 22 23:16:43.608 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54555 Sep 22 23:16:43.608 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54556 Sep 22 23:16:43.608 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54557 Sep 22 23:16:43.608 INFO [lossy] skipping 1206
54558 Sep 22 23:16:43.608 INFO [lossy] skipping 1207
54559 Sep 22 23:16:43.608 INFO [lossy] skipping 1208
54560 Sep 22 23:16:43.608 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54561 Sep 22 23:16:43.608 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54562 Sep 22 23:16:43.608 INFO [lossy] skipping 1212
54563 Sep 22 23:16:43.608 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54564 Sep 22 23:16:43.608 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54565 Sep 22 23:16:43.608 INFO [lossy] skipping 1218
54566 Sep 22 23:16:43.608 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54567 Sep 22 23:16:43.608 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54568 Sep 22 23:16:43.608 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54569 Sep 22 23:16:43.608 INFO [lossy] skipping 1224
54570 Sep 22 23:16:43.608 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54571 Sep 22 23:16:43.608 INFO [lossy] skipping 1227
54572 Sep 22 23:16:43.608 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54573 Sep 22 23:16:43.608 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54574 Sep 22 23:16:43.608 INFO [lossy] skipping 1232
54575 Sep 22 23:16:43.608 INFO [lossy] skipping 1233
54576 Sep 22 23:16:43.608 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 13 deps, role: work
54577 Sep 22 23:16:43.608 INFO [lossy] skipping 1240
54578 Sep 22 23:16:43.609 INFO [lossy] skipping 1249
54579 Sep 22 23:16:43.609 INFO [lossy] skipping 1250
54580 Sep 22 23:16:43.639 DEBG Write :1196 deps:[JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54581 Sep 22 23:16:43.640 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54582 Sep 22 23:16:43.640 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54583 Sep 22 23:16:43.640 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54584 Sep 22 23:16:43.640 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54585 Sep 22 23:16:43.640 INFO [lossy] skipping 1227
54586 Sep 22 23:16:43.640 INFO [lossy] skipping 1232
54587 Sep 22 23:16:43.640 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54588 Sep 22 23:16:43.640 INFO [lossy] skipping 1240
54589 Sep 22 23:16:43.640 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54590 Sep 22 23:16:43.640 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54591 Sep 22 23:16:43.648 DEBG Flush :1198 extent_limit None deps:[JobId(1197), JobId(1196), JobId(1195)] res:true f:83 g:1
54592 Sep 22 23:16:43.649 WARN returning error on write!
54593 Sep 22 23:16:43.649 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54594 Sep 22 23:16:43.680 DEBG Write :1200 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54595 Sep 22 23:16:43.681 WARN 1201 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54596 Sep 22 23:16:43.681 WARN 1202 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54597 Sep 22 23:16:43.681 WARN 1203 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54598 Sep 22 23:16:43.681 INFO [lossy] skipping 1204
54599 Sep 22 23:16:43.681 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54600 Sep 22 23:16:43.681 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54601 Sep 22 23:16:43.681 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54602 Sep 22 23:16:43.681 INFO [lossy] skipping 1209
54603 Sep 22 23:16:43.681 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54604 Sep 22 23:16:43.681 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54605 Sep 22 23:16:43.681 INFO [lossy] skipping 1214
54606 Sep 22 23:16:43.681 INFO [lossy] skipping 1215
54607 Sep 22 23:16:43.681 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54608 Sep 22 23:16:43.681 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54609 Sep 22 23:16:43.681 INFO [lossy] skipping 1219
54610 Sep 22 23:16:43.681 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54611 Sep 22 23:16:43.681 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54612 Sep 22 23:16:43.681 INFO [lossy] skipping 1222
54613 Sep 22 23:16:43.681 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54614 Sep 22 23:16:43.681 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54615 Sep 22 23:16:43.681 INFO [lossy] skipping 1226
54616 Sep 22 23:16:43.681 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54617 Sep 22 23:16:43.682 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54618 Sep 22 23:16:43.682 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54619 Sep 22 23:16:43.682 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54620 Sep 22 23:16:43.682 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54621 Sep 22 23:16:43.682 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 12 deps, role: work
54622 Sep 22 23:16:43.682 INFO [lossy] skipping 1238
54623 Sep 22 23:16:43.682 INFO [lossy] skipping 1240
54624 Sep 22 23:16:43.682 INFO [lossy] skipping 1241
54625 Sep 22 23:16:43.682 INFO [lossy] skipping 1243
54626 Sep 22 23:16:43.682 INFO [lossy] skipping 1247
54627 Sep 22 23:16:43.682 INFO [lossy] skipping 1249
54628 Sep 22 23:16:43.682 INFO [lossy] skipping 1199
54629 Sep 22 23:16:43.682 INFO [lossy] skipping 1209
54630 Sep 22 23:16:43.682 INFO [lossy] skipping 1214
54631 Sep 22 23:16:43.682 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54632 Sep 22 23:16:43.682 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54633 Sep 22 23:16:43.682 INFO [lossy] skipping 1241
54634 Sep 22 23:16:43.682 INFO [lossy] skipping 1249
54635 Sep 22 23:16:43.712 DEBG Write :1199 deps:[JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54636 Sep 22 23:16:43.713 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54637 Sep 22 23:16:43.713 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54638 Sep 22 23:16:43.713 INFO [lossy] skipping 1241
54639 Sep 22 23:16:43.713 INFO [lossy] sleeping 1 second
54640 Sep 22 23:16:43.719 DEBG Read :1249 deps:[JobId(1248)] res:true
54641 Sep 22 23:16:43.740 ERRO [2] job id 1192 saw error GenericError("test error")
54642 Sep 22 23:16:43.743 DEBG [rc] retire 1192 clears [JobId(1190), JobId(1191), JobId(1192)], : downstairs
54643 Sep 22 23:16:43.743 ERRO [2] job id 1195 saw error GenericError("test error")
54644 Sep 22 23:16:43.746 DEBG [rc] retire 1195 clears [JobId(1193), JobId(1194), JobId(1195)], : downstairs
54645 Sep 22 23:16:43.749 DEBG [rc] retire 1198 clears [JobId(1196), JobId(1197), JobId(1198)], : downstairs
54646 Sep 22 23:16:43.749 ERRO [2] job id 1199 saw error GenericError("test error")
54647 Sep 22 23:16:43.751 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:true f:103 g:1
54648 Sep 22 23:16:43.751 INFO [lossy] sleeping 1 second
54649 Sep 22 23:16:44.126 DEBG [1] Read AckReady 1249, : downstairs
54650 Sep 22 23:16:44.127 DEBG up_ds_listen was notified
54651 Sep 22 23:16:44.127 DEBG up_ds_listen process 1249
54652 Sep 22 23:16:44.127 DEBG [A] ack job 1249:250, : downstairs
54653 Sep 22 23:16:44.180 DEBG up_ds_listen checked 1 jobs, back to waiting
54654 Sep 22 23:16:44.181 WARN returning error on read!
54655 Sep 22 23:16:44.181 DEBG Read :1249 deps:[JobId(1248)] res:false
54656 Sep 22 23:16:44.187 DEBG Read :1249 deps:[JobId(1248)] res:true
54657 Sep 22 23:16:44.209 DEBG IO Read 1251 has deps [JobId(1250)]
54658 Sep 22 23:16:44.209 ERRO [0] job id 1249 saw error GenericError("test error")
54659 Sep 22 23:16:44.222 INFO [lossy] sleeping 1 second
54660 Sep 22 23:16:44.682 DEBG IO Flush 1252 has deps [JobId(1251), JobId(1250)]
54661 Sep 22 23:16:44.722 DEBG Flush :1201 extent_limit None deps:[JobId(1200), JobId(1199), JobId(1198)] res:true f:84 g:1
54662 Sep 22 23:16:44.722 INFO [lossy] skipping 1202
54663 Sep 22 23:16:44.723 WARN returning error on write!
54664 Sep 22 23:16:44.723 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54665 Sep 22 23:16:44.723 WARN 1204 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54666 Sep 22 23:16:44.723 WARN 1205 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54667 Sep 22 23:16:44.724 WARN 1206 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54668 Sep 22 23:16:44.724 WARN 1208 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54669 Sep 22 23:16:44.724 WARN 1209 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54670 Sep 22 23:16:44.724 WARN 1211 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54671 Sep 22 23:16:44.724 WARN 1212 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54672 Sep 22 23:16:44.724 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54673 Sep 22 23:16:44.724 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54674 Sep 22 23:16:44.724 INFO [lossy] skipping 1216
54675 Sep 22 23:16:44.724 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54676 Sep 22 23:16:44.724 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54677 Sep 22 23:16:44.724 INFO [lossy] skipping 1220
54678 Sep 22 23:16:44.724 INFO [lossy] skipping 1221
54679 Sep 22 23:16:44.724 INFO [lossy] skipping 1223
54680 Sep 22 23:16:44.724 INFO [lossy] skipping 1224
54681 Sep 22 23:16:44.724 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54682 Sep 22 23:16:44.724 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54683 Sep 22 23:16:44.724 INFO [lossy] skipping 1229
54684 Sep 22 23:16:44.724 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54685 Sep 22 23:16:44.724 INFO [lossy] skipping 1232
54686 Sep 22 23:16:44.724 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54687 Sep 22 23:16:44.724 INFO [lossy] skipping 1234
54688 Sep 22 23:16:44.724 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 11 deps, role: work
54689 Sep 22 23:16:44.724 INFO [lossy] skipping 1236
54690 Sep 22 23:16:44.724 INFO [lossy] skipping 1237
54691 Sep 22 23:16:44.724 INFO [lossy] skipping 1240
54692 Sep 22 23:16:44.724 INFO [lossy] skipping 1243
54693 Sep 22 23:16:44.724 INFO [lossy] skipping 1244
54694 Sep 22 23:16:44.724 INFO [lossy] skipping 1250
54695 Sep 22 23:16:44.725 WARN returning error on write!
54696 Sep 22 23:16:44.725 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54697 Sep 22 23:16:44.726 WARN returning error on write!
54698 Sep 22 23:16:44.726 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54699 Sep 22 23:16:44.726 INFO [lossy] skipping 1216
54700 Sep 22 23:16:44.726 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54701 Sep 22 23:16:44.726 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54702 Sep 22 23:16:44.726 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54703 Sep 22 23:16:44.726 INFO [lossy] skipping 1224
54704 Sep 22 23:16:44.726 INFO [lossy] skipping 1229
54705 Sep 22 23:16:44.726 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 10 deps, role: work
54706 Sep 22 23:16:44.726 INFO [lossy] skipping 1234
54707 Sep 22 23:16:44.726 INFO [lossy] skipping 1236
54708 Sep 22 23:16:44.726 INFO [lossy] skipping 1237
54709 Sep 22 23:16:44.726 INFO [lossy] skipping 1240
54710 Sep 22 23:16:44.756 DEBG Write :1202 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54711 Sep 22 23:16:44.757 INFO [lossy] skipping 1203
54712 Sep 22 23:16:44.757 INFO [lossy] skipping 1224
54713 Sep 22 23:16:44.757 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 9 deps, role: work
54714 Sep 22 23:16:44.757 INFO [lossy] skipping 1234
54715 Sep 22 23:16:44.757 INFO [lossy] skipping 1237
54716 Sep 22 23:16:44.757 INFO [lossy] skipping 1240
54717 Sep 22 23:16:44.757 INFO [lossy] skipping 1203
54718 Sep 22 23:16:44.757 INFO [lossy] skipping 1224
54719 Sep 22 23:16:44.757 INFO [lossy] skipping 1234
54720 Sep 22 23:16:44.758 WARN returning error on write!
54721 Sep 22 23:16:44.758 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54722 Sep 22 23:16:44.758 INFO [lossy] skipping 1224
54723 Sep 22 23:16:44.759 INFO [lossy] skipping 1234
54724 Sep 22 23:16:44.759 INFO [lossy] skipping 1203
54725 Sep 22 23:16:44.759 INFO [lossy] skipping 1224
54726 Sep 22 23:16:44.788 DEBG Write :1203 deps:[JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54727 Sep 22 23:16:44.789 INFO [lossy] skipping 1224
54728 Sep 22 23:16:44.789 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54729 Sep 22 23:16:44.797 DEBG Flush :1204 extent_limit None deps:[JobId(1203), JobId(1202), JobId(1201)] res:true f:85 g:1
54730 Sep 22 23:16:44.828 DEBG Write :1205 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54731 Sep 22 23:16:44.859 DEBG Write :1206 deps:[JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54732 Sep 22 23:16:44.868 DEBG Flush :1207 extent_limit None deps:[JobId(1206), JobId(1205), JobId(1204)] res:true f:86 g:1
54733 Sep 22 23:16:44.898 DEBG Write :1208 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54734 Sep 22 23:16:44.930 DEBG Write :1209 deps:[JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54735 Sep 22 23:16:44.939 DEBG Flush :1210 extent_limit None deps:[JobId(1209), JobId(1208), JobId(1207)] res:true f:87 g:1
54736 Sep 22 23:16:44.939 INFO [lossy] skipping 1211
54737 Sep 22 23:16:44.969 DEBG Write :1212 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54738 Sep 22 23:16:44.971 WARN 1213 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54739 Sep 22 23:16:44.971 INFO [lossy] skipping 1214
54740 Sep 22 23:16:44.971 WARN 1215 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54741 Sep 22 23:16:44.971 WARN 1217 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54742 Sep 22 23:16:44.971 WARN 1218 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54743 Sep 22 23:16:44.971 INFO [lossy] skipping 1219
54744 Sep 22 23:16:44.971 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54745 Sep 22 23:16:44.971 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54746 Sep 22 23:16:44.971 INFO [lossy] skipping 1222
54747 Sep 22 23:16:44.971 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54748 Sep 22 23:16:44.971 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54749 Sep 22 23:16:44.971 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54750 Sep 22 23:16:44.971 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54751 Sep 22 23:16:44.971 INFO [lossy] skipping 1229
54752 Sep 22 23:16:44.971 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54753 Sep 22 23:16:44.971 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54754 Sep 22 23:16:44.971 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54755 Sep 22 23:16:44.971 INFO [lossy] skipping 1234
54756 Sep 22 23:16:44.971 INFO [lossy] skipping 1235
54757 Sep 22 23:16:44.971 INFO [lossy] skipping 1238
54758 Sep 22 23:16:44.971 INFO [lossy] skipping 1243
54759 Sep 22 23:16:44.971 INFO [lossy] skipping 1244
54760 Sep 22 23:16:44.971 INFO [lossy] skipping 1247
54761 Sep 22 23:16:45.001 DEBG Write :1211 deps:[JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54762 Sep 22 23:16:45.003 INFO [lossy] skipping 1214
54763 Sep 22 23:16:45.003 INFO [lossy] skipping 1222
54764 Sep 22 23:16:45.003 INFO [lossy] skipping 1229
54765 Sep 22 23:16:45.003 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
54766 Sep 22 23:16:45.003 INFO [lossy] skipping 1244
54767 Sep 22 23:16:45.003 WARN 1214 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54768 Sep 22 23:16:45.003 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54769 Sep 22 23:16:45.003 WARN returning error on flush!
54770 Sep 22 23:16:45.003 DEBG Flush :1213 extent_limit None deps:[JobId(1212), JobId(1211), JobId(1210)] res:false f:88 g:1
54771 Sep 22 23:16:45.003 INFO [lossy] skipping 1215
54772 Sep 22 23:16:45.003 INFO [lossy] skipping 1226
54773 Sep 22 23:16:45.003 INFO [lossy] skipping 1232
54774 Sep 22 23:16:45.003 INFO [lossy] skipping 1235
54775 Sep 22 23:16:45.003 INFO [lossy] skipping 1238
54776 Sep 22 23:16:45.003 INFO [lossy] skipping 1241
54777 Sep 22 23:16:45.003 INFO [lossy] skipping 1245
54778 Sep 22 23:16:45.003 INFO [lossy] skipping 1249
54779 Sep 22 23:16:45.003 INFO [lossy] skipping 1250
54780 Sep 22 23:16:45.011 DEBG Flush :1213 extent_limit None deps:[JobId(1212), JobId(1211), JobId(1210)] res:true f:88 g:1
54781 Sep 22 23:16:45.042 DEBG Write :1215 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54782 Sep 22 23:16:45.043 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54783 Sep 22 23:16:45.043 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54784 Sep 22 23:16:45.043 INFO [lossy] skipping 1235
54785 Sep 22 23:16:45.043 INFO [lossy] skipping 1238
54786 Sep 22 23:16:45.043 INFO [lossy] skipping 1249
54787 Sep 22 23:16:45.043 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
54788 Sep 22 23:16:45.074 DEBG Write :1214 deps:[JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54789 Sep 22 23:16:45.083 DEBG Flush :1216 extent_limit None deps:[JobId(1215), JobId(1214), JobId(1213)] res:true f:89 g:1
54790 Sep 22 23:16:45.113 DEBG Write :1217 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54791 Sep 22 23:16:45.145 DEBG Write :1218 deps:[JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54792 Sep 22 23:16:45.146 WARN returning error on flush!
54793 Sep 22 23:16:45.146 DEBG Flush :1219 extent_limit None deps:[JobId(1218), JobId(1217), JobId(1216)] res:false f:90 g:1
54794 Sep 22 23:16:45.146 WARN 1220 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54795 Sep 22 23:16:45.146 WARN 1221 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54796 Sep 22 23:16:45.146 WARN 1223 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54797 Sep 22 23:16:45.146 WARN 1224 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54798 Sep 22 23:16:45.146 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54799 Sep 22 23:16:45.146 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54800 Sep 22 23:16:45.146 INFO [lossy] skipping 1228
54801 Sep 22 23:16:45.146 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54802 Sep 22 23:16:45.146 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54803 Sep 22 23:16:45.146 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54804 Sep 22 23:16:45.146 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
54805 Sep 22 23:16:45.146 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
54806 Sep 22 23:16:45.146 INFO [lossy] skipping 1244
54807 Sep 22 23:16:45.154 DEBG Flush :1219 extent_limit None deps:[JobId(1218), JobId(1217), JobId(1216)] res:true f:90 g:1
54808 Sep 22 23:16:45.154 INFO [lossy] skipping 1228
54809 Sep 22 23:16:45.154 INFO [lossy] sleeping 1 second
54810 Sep 22 23:16:45.155 WARN returning error on read!
54811 Sep 22 23:16:45.155 DEBG Read :1251 deps:[JobId(1250)] res:false
54812 Sep 22 23:16:45.161 DEBG Read :1251 deps:[JobId(1250)] res:true
54813 Sep 22 23:16:45.185 DEBG [rc] retire 1201 clears [JobId(1199), JobId(1200), JobId(1201)], : downstairs
54814 Sep 22 23:16:45.185 ERRO [2] job id 1203 saw error GenericError("test error")
54815 Sep 22 23:16:45.185 ERRO [2] job id 1202 saw error GenericError("test error")
54816 Sep 22 23:16:45.185 ERRO [2] job id 1203 saw error GenericError("test error")
54817 Sep 22 23:16:45.185 ERRO [2] job id 1203 saw error GenericError("test error")
54818 Sep 22 23:16:45.188 DEBG [rc] retire 1204 clears [JobId(1202), JobId(1203), JobId(1204)], : downstairs
54819 Sep 22 23:16:45.191 DEBG [rc] retire 1207 clears [JobId(1205), JobId(1206), JobId(1207)], : downstairs
54820 Sep 22 23:16:45.194 DEBG [rc] retire 1210 clears [JobId(1208), JobId(1209), JobId(1210)], : downstairs
54821 Sep 22 23:16:45.194 ERRO [2] job id 1213 saw error GenericError("test error")
54822 Sep 22 23:16:45.197 DEBG [rc] retire 1213 clears [JobId(1211), JobId(1212), JobId(1213)], : downstairs
54823 Sep 22 23:16:45.200 DEBG [rc] retire 1216 clears [JobId(1214), JobId(1215), JobId(1216)], : downstairs
54824 Sep 22 23:16:45.200 ERRO [2] job id 1219 saw error GenericError("test error")
54825 Sep 22 23:16:45.203 DEBG [rc] retire 1219 clears [JobId(1217), JobId(1218), JobId(1219)], : downstairs
54826 Sep 22 23:16:45.203 ERRO [1] job id 1251 saw error GenericError("test error")
54827 Sep 22 23:16:45.205 DEBG Flush :1252 extent_limit None deps:[JobId(1251), JobId(1250)] res:true f:104 g:1
54828 Sep 22 23:16:45.205 INFO [lossy] sleeping 1 second
54829 Sep 22 23:16:45.581 DEBG [1] Read AckReady 1251, : downstairs
54830 Sep 22 23:16:45.582 DEBG up_ds_listen was notified
54831 Sep 22 23:16:45.582 DEBG up_ds_listen process 1251
54832 Sep 22 23:16:45.582 DEBG [A] ack job 1251:252, : downstairs
54833 Sep 22 23:16:45.635 DEBG up_ds_listen checked 1 jobs, back to waiting
54834 Sep 22 23:16:45.636 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:true f:103 g:1
54835 Sep 22 23:16:45.641 DEBG Read :1251 deps:[JobId(1250)] res:true
54836 Sep 22 23:16:45.663 DEBG IO Read 1253 has deps [JobId(1252)]
54837 Sep 22 23:16:45.668 DEBG up_ds_listen was notified
54838 Sep 22 23:16:45.668 DEBG up_ds_listen process 1250
54839 Sep 22 23:16:45.668 DEBG [A] ack job 1250:251, : downstairs
54840 Sep 22 23:16:45.668 DEBG up_ds_listen checked 1 jobs, back to waiting
54841 Sep 22 23:16:45.676 DEBG Flush :1252 extent_limit None deps:[JobId(1251), JobId(1250)] res:true f:104 g:1
54842 Sep 22 23:16:45.682 DEBG Read :1253 deps:[JobId(1252)] res:true
54843 Sep 22 23:16:46.082 DEBG up_ds_listen was notified
54844 Sep 22 23:16:46.082 DEBG up_ds_listen process 1252
54845 Sep 22 23:16:46.082 DEBG [A] ack job 1252:253, : downstairs
54846 Sep 22 23:16:46.082 DEBG up_ds_listen checked 1 jobs, back to waiting
54847 Sep 22 23:16:46.082 DEBG IO Flush 1254 has deps [JobId(1253), JobId(1252)]
54848 Sep 22 23:16:46.084 INFO [lossy] sleeping 1 second
54849 Sep 22 23:16:46.460 DEBG [0] Read AckReady 1253, : downstairs
54850 Sep 22 23:16:46.460 DEBG up_ds_listen was notified
54851 Sep 22 23:16:46.460 DEBG up_ds_listen process 1253
54852 Sep 22 23:16:46.460 DEBG [A] ack job 1253:254, : downstairs
54853 Sep 22 23:16:46.513 DEBG up_ds_listen checked 1 jobs, back to waiting
54854 Sep 22 23:16:46.544 DEBG Write :1220 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54855 Sep 22 23:16:46.575 DEBG Write :1221 deps:[JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54856 Sep 22 23:16:46.582 DEBG Read :1253 deps:[JobId(1252)] res:true
54857 Sep 22 23:16:46.611 DEBG Flush :1222 extent_limit None deps:[JobId(1221), JobId(1220), JobId(1219)] res:true f:91 g:1
54858 Sep 22 23:16:46.641 DEBG Write :1223 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54859 Sep 22 23:16:46.643 WARN returning error on write!
54860 Sep 22 23:16:46.644 DEBG Write :1224 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54861 Sep 22 23:16:46.644 WARN 1225 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54862 Sep 22 23:16:46.644 INFO [lossy] skipping 1226
54863 Sep 22 23:16:46.644 WARN 1227 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54864 Sep 22 23:16:46.644 INFO [lossy] skipping 1229
54865 Sep 22 23:16:46.644 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54866 Sep 22 23:16:46.644 INFO [lossy] skipping 1231
54867 Sep 22 23:16:46.644 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54868 Sep 22 23:16:46.644 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54869 Sep 22 23:16:46.644 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
54870 Sep 22 23:16:46.644 INFO [lossy] skipping 1236
54871 Sep 22 23:16:46.644 INFO [lossy] skipping 1240
54872 Sep 22 23:16:46.644 INFO [lossy] skipping 1242
54873 Sep 22 23:16:46.644 INFO [lossy] skipping 1248
54874 Sep 22 23:16:46.674 DEBG Write :1224 deps:[JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54875 Sep 22 23:16:46.675 WARN 1226 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54876 Sep 22 23:16:46.675 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54877 Sep 22 23:16:46.675 INFO [lossy] skipping 1236
54878 Sep 22 23:16:46.675 INFO [lossy] skipping 1248
54879 Sep 22 23:16:46.675 INFO [lossy] skipping 1225
54880 Sep 22 23:16:46.675 INFO [lossy] skipping 1228
54881 Sep 22 23:16:46.675 INFO [lossy] skipping 1238
54882 Sep 22 23:16:46.675 INFO [lossy] skipping 1249
54883 Sep 22 23:16:46.675 INFO [lossy] skipping 1251
54884 Sep 22 23:16:46.675 INFO [lossy] skipping 1225
54885 Sep 22 23:16:46.675 INFO [lossy] skipping 1251
54886 Sep 22 23:16:46.675 WARN returning error on flush!
54887 Sep 22 23:16:46.675 DEBG Flush :1225 extent_limit None deps:[JobId(1224), JobId(1223), JobId(1222)] res:false f:92 g:1
54888 Sep 22 23:16:46.675 INFO [lossy] skipping 1251
54889 Sep 22 23:16:46.683 DEBG Flush :1225 extent_limit None deps:[JobId(1224), JobId(1223), JobId(1222)] res:true f:92 g:1
54890 Sep 22 23:16:46.683 INFO [lossy] skipping 1251
54891 Sep 22 23:16:46.683 INFO [lossy] skipping 1251
54892 Sep 22 23:16:46.714 DEBG Write :1226 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54893 Sep 22 23:16:46.715 WARN returning error on write!
54894 Sep 22 23:16:46.716 DEBG Write :1227 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54895 Sep 22 23:16:46.716 WARN 1228 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54896 Sep 22 23:16:46.716 WARN 1229 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54897 Sep 22 23:16:46.716 WARN 1230 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54898 Sep 22 23:16:46.716 INFO [lossy] skipping 1232
54899 Sep 22 23:16:46.716 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54900 Sep 22 23:16:46.716 INFO [lossy] skipping 1235
54901 Sep 22 23:16:46.716 INFO [lossy] skipping 1238
54902 Sep 22 23:16:46.716 INFO [lossy] skipping 1239
54903 Sep 22 23:16:46.716 INFO [lossy] skipping 1243
54904 Sep 22 23:16:46.716 INFO [lossy] skipping 1244
54905 Sep 22 23:16:46.716 INFO [lossy] skipping 1249
54906 Sep 22 23:16:46.716 INFO [lossy] skipping 1250
54907 Sep 22 23:16:46.746 DEBG Write :1227 deps:[JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54908 Sep 22 23:16:46.747 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54909 Sep 22 23:16:46.747 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
54910 Sep 22 23:16:46.747 INFO [lossy] skipping 1239
54911 Sep 22 23:16:46.748 INFO [lossy] skipping 1228
54912 Sep 22 23:16:46.748 INFO [lossy] skipping 1230
54913 Sep 22 23:16:46.748 INFO [lossy] skipping 1231
54914 Sep 22 23:16:46.748 INFO [lossy] skipping 1232
54915 Sep 22 23:16:46.748 INFO [lossy] skipping 1238
54916 Sep 22 23:16:46.748 INFO [lossy] skipping 1239
54917 Sep 22 23:16:46.748 INFO [lossy] skipping 1243
54918 Sep 22 23:16:46.748 INFO [lossy] skipping 1244
54919 Sep 22 23:16:46.748 INFO [lossy] skipping 1247
54920 Sep 22 23:16:46.748 INFO [lossy] skipping 1248
54921 Sep 22 23:16:46.748 INFO [lossy] skipping 1253
54922 Sep 22 23:16:46.756 DEBG Flush :1228 extent_limit None deps:[JobId(1227), JobId(1226), JobId(1225)] res:true f:93 g:1
54923 Sep 22 23:16:46.757 WARN returning error on write!
54924 Sep 22 23:16:46.757 DEBG Write :1230 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:false
54925 Sep 22 23:16:46.757 WARN 1231 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54926 Sep 22 23:16:46.757 WARN 1232 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54927 Sep 22 23:16:46.757 INFO [lossy] skipping 1239
54928 Sep 22 23:16:46.757 INFO [lossy] skipping 1244
54929 Sep 22 23:16:46.757 INFO [lossy] skipping 1247
54930 Sep 22 23:16:46.757 INFO [lossy] skipping 1248
54931 Sep 22 23:16:46.787 DEBG Write :1230 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54932 Sep 22 23:16:46.819 DEBG Write :1229 deps:[JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54933 Sep 22 23:16:46.820 INFO [lossy] skipping 1231
54934 Sep 22 23:16:46.820 WARN 1233 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54935 Sep 22 23:16:46.820 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
54936 Sep 22 23:16:46.820 INFO [lossy] skipping 1236
54937 Sep 22 23:16:46.820 INFO [lossy] skipping 1237
54938 Sep 22 23:16:46.820 INFO [lossy] skipping 1242
54939 Sep 22 23:16:46.820 INFO [lossy] skipping 1243
54940 Sep 22 23:16:46.820 INFO [lossy] skipping 1244
54941 Sep 22 23:16:46.820 INFO [lossy] skipping 1248
54942 Sep 22 23:16:46.820 INFO [lossy] skipping 1249
54943 Sep 22 23:16:46.820 INFO [lossy] skipping 1250
54944 Sep 22 23:16:46.828 DEBG Flush :1231 extent_limit None deps:[JobId(1230), JobId(1229), JobId(1228)] res:true f:94 g:1
54945 Sep 22 23:16:46.828 INFO [lossy] skipping 1236
54946 Sep 22 23:16:46.828 INFO [lossy] skipping 1237
54947 Sep 22 23:16:46.828 INFO [lossy] skipping 1236
54948 Sep 22 23:16:46.828 INFO [lossy] skipping 1237
54949 Sep 22 23:16:46.828 INFO [lossy] skipping 1236
54950 Sep 22 23:16:46.858 DEBG Write :1232 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163)] res:true
54951 Sep 22 23:16:46.861 WARN returning error on write!
54952 Sep 22 23:16:46.861 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:false
54953 Sep 22 23:16:46.861 WARN 1234 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54954 Sep 22 23:16:46.861 WARN 1235 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54955 Sep 22 23:16:46.861 INFO [lossy] skipping 1240
54956 Sep 22 23:16:46.861 INFO [lossy] skipping 1245
54957 Sep 22 23:16:46.861 INFO [lossy] skipping 1253
54958 Sep 22 23:16:46.861 INFO [lossy] skipping 1254
54959 Sep 22 23:16:46.891 DEBG Write :1233 deps:[JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1163), JobId(1162)] res:true
54960 Sep 22 23:16:46.900 DEBG Flush :1234 extent_limit None deps:[JobId(1233), JobId(1232), JobId(1231)] res:true f:95 g:1
54961 Sep 22 23:16:46.931 DEBG Write :1235 deps:[JobId(1234), JobId(1231), JobId(1228), JobId(1225), JobId(1222), JobId(1219), JobId(1216), JobId(1213), JobId(1210), JobId(1207), JobId(1204), JobId(1201), JobId(1198), JobId(1195), JobId(1192), JobId(1189), JobId(1186), JobId(1183), JobId(1180), JobId(1177), JobId(1174), JobId(1171), JobId(1168), JobId(1165), JobId(1164), JobId(1163)] res:true
54962 Sep 22 23:16:46.936 DEBG Flush :1236 extent_limit None deps:[JobId(1235), JobId(1234)] res:true f:96 g:1
54963 Sep 22 23:16:46.942 DEBG Read :1237 deps:[JobId(1236)] res:true
54964 Sep 22 23:16:46.964 DEBG IO Read 1255 has deps [JobId(1254)]
54965 Sep 22 23:16:46.964 DEBG IO Flush 1256 has deps [JobId(1255), JobId(1254)]
54966 Sep 22 23:16:46.966 DEBG [rc] retire 1222 clears [JobId(1220), JobId(1221), JobId(1222)], : downstairs
54967 Sep 22 23:16:46.967 ERRO [2] job id 1224 saw error GenericError("test error")
54968 Sep 22 23:16:46.967 ERRO [2] job id 1225 saw error GenericError("test error")
54969 Sep 22 23:16:46.970 DEBG [rc] retire 1225 clears [JobId(1223), JobId(1224), JobId(1225)], : downstairs
54970 Sep 22 23:16:46.970 ERRO [2] job id 1227 saw error GenericError("test error")
54971 Sep 22 23:16:46.973 DEBG [rc] retire 1228 clears [JobId(1226), JobId(1227), JobId(1228)], : downstairs
54972 Sep 22 23:16:46.973 ERRO [2] job id 1230 saw error GenericError("test error")
54973 Sep 22 23:16:46.977 DEBG [rc] retire 1231 clears [JobId(1229), JobId(1230), JobId(1231)], : downstairs
54974 Sep 22 23:16:46.977 ERRO [2] job id 1233 saw error GenericError("test error")
54975 Sep 22 23:16:46.980 DEBG [rc] retire 1234 clears [JobId(1232), JobId(1233), JobId(1234)], : downstairs
54976 Sep 22 23:16:46.981 DEBG [rc] retire 1236 clears [JobId(1235), JobId(1236)], : downstairs
54977 Sep 22 23:16:46.996 DEBG Flush :1254 extent_limit None deps:[JobId(1253), JobId(1252)] res:true f:105 g:1
54978 Sep 22 23:16:46.996 INFO [lossy] skipping 1255
54979 Sep 22 23:16:46.997 WARN returning error on read!
54980 Sep 22 23:16:46.997 DEBG Read :1255 deps:[JobId(1254)] res:false
54981 Sep 22 23:16:47.002 DEBG Read :1255 deps:[JobId(1254)] res:true
54982 Sep 22 23:16:47.025 WARN returning error on flush!
54983 Sep 22 23:16:47.025 DEBG Flush :1238 extent_limit None deps:[JobId(1237), JobId(1236)] res:false f:97 g:1
54984 Sep 22 23:16:47.025 INFO [lossy] skipping 1246
54985 Sep 22 23:16:47.025 INFO [lossy] skipping 1247
54986 Sep 22 23:16:47.025 INFO [lossy] skipping 1248
54987 Sep 22 23:16:47.025 INFO [lossy] skipping 1251
54988 Sep 22 23:16:47.025 DEBG Flush :1238 extent_limit None deps:[JobId(1237), JobId(1236)] res:true f:97 g:1
54989 Sep 22 23:16:47.025 INFO [lossy] skipping 1246
54990 Sep 22 23:16:47.025 INFO [lossy] skipping 1248
54991 Sep 22 23:16:47.025 INFO [lossy] skipping 1239
54992 Sep 22 23:16:47.025 WARN 1240 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
54993 Sep 22 23:16:47.025 INFO [lossy] skipping 1243
54994 Sep 22 23:16:47.025 INFO [lossy] skipping 1248
54995 Sep 22 23:16:47.025 INFO [lossy] skipping 1251
54996 Sep 22 23:16:47.031 DEBG Read :1239 deps:[JobId(1238)] res:true
54997 Sep 22 23:16:47.480 ERRO [2] job id 1238 saw error GenericError("test error")
54998 Sep 22 23:16:47.480 DEBG [rc] retire 1238 clears [JobId(1237), JobId(1238)], : downstairs
54999 Sep 22 23:16:47.813 ERRO [1] job id 1255 saw error GenericError("test error")
55000 Sep 22 23:16:47.813 INFO [lossy] skipping 1254
55001 Sep 22 23:16:47.813 DEBG Flush :1254 extent_limit None deps:[JobId(1253), JobId(1252)] res:true f:105 g:1
55002 Sep 22 23:16:47.819 DEBG Read :1255 deps:[JobId(1254)] res:true
55003 Sep 22 23:16:47.840 DEBG up_ds_listen was notified
55004 Sep 22 23:16:47.840 DEBG up_ds_listen process 1254
55005 Sep 22 23:16:47.840 DEBG [A] ack job 1254:255, : downstairs
55006 Sep 22 23:16:47.840 DEBG up_ds_listen checked 1 jobs, back to waiting
55007 Sep 22 23:16:47.843 DEBG Flush :1256 extent_limit None deps:[JobId(1255), JobId(1254)] res:true f:106 g:1
55008 Sep 22 23:16:47.843 INFO [lossy] sleeping 1 second
55009 Sep 22 23:16:47.845 INFO [lossy] skipping 1243
55010 Sep 22 23:16:47.845 INFO [lossy] skipping 1248
55011 Sep 22 23:16:47.845 DEBG Flush :1240 extent_limit None deps:[JobId(1239), JobId(1238)] res:true f:98 g:1
55012 Sep 22 23:16:47.851 DEBG Read :1241 deps:[JobId(1240)] res:true
55013 Sep 22 23:16:48.302 DEBG [rc] retire 1240 clears [JobId(1239), JobId(1240)], : downstairs
55014 Sep 22 23:16:48.632 DEBG [1] Read AckReady 1255, : downstairs
55015 Sep 22 23:16:48.633 DEBG up_ds_listen was notified
55016 Sep 22 23:16:48.633 DEBG up_ds_listen process 1255
55017 Sep 22 23:16:48.633 DEBG [A] ack job 1255:256, : downstairs
55018 Sep 22 23:16:48.686 DEBG up_ds_listen checked 1 jobs, back to waiting
55019 Sep 22 23:16:48.688 DEBG Flush :1256 extent_limit None deps:[JobId(1255), JobId(1254)] res:true f:106 g:1
55020 Sep 22 23:16:48.688 INFO [lossy] sleeping 1 second
55021 Sep 22 23:16:48.736 DEBG IO Read 1257 has deps [JobId(1256)]
55022 Sep 22 23:16:49.074 DEBG up_ds_listen was notified
55023 Sep 22 23:16:49.074 DEBG up_ds_listen process 1256
55024 Sep 22 23:16:49.074 DEBG [A] ack job 1256:257, : downstairs
55025 Sep 22 23:16:49.074 DEBG up_ds_listen checked 1 jobs, back to waiting
55026 Sep 22 23:16:49.081 INFO [lossy] sleeping 1 second
55027 Sep 22 23:16:49.083 WARN returning error on flush!
55028 Sep 22 23:16:49.083 DEBG Flush :1242 extent_limit None deps:[JobId(1241), JobId(1240)] res:false f:99 g:1
55029 Sep 22 23:16:49.083 INFO [lossy] skipping 1244
55030 Sep 22 23:16:49.083 INFO [lossy] skipping 1247
55031 Sep 22 23:16:49.083 INFO [lossy] skipping 1252
55032 Sep 22 23:16:49.083 INFO [lossy] skipping 1253
55033 Sep 22 23:16:49.083 INFO [lossy] skipping 1255
55034 Sep 22 23:16:49.083 DEBG Flush :1242 extent_limit None deps:[JobId(1241), JobId(1240)] res:true f:99 g:1
55035 Sep 22 23:16:49.083 WARN 1244 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55036 Sep 22 23:16:49.083 INFO [lossy] skipping 1253
55037 Sep 22 23:16:49.084 INFO [lossy] skipping 1255
55038 Sep 22 23:16:49.084 INFO [lossy] skipping 1253
55039 Sep 22 23:16:49.084 INFO [lossy] skipping 1255
55040 Sep 22 23:16:49.084 INFO [lossy] skipping 1253
55041 Sep 22 23:16:49.090 DEBG Read :1243 deps:[JobId(1242)] res:true
55042 Sep 22 23:16:49.490 ERRO [2] job id 1242 saw error GenericError("test error")
55043 Sep 22 23:16:49.490 DEBG [rc] retire 1242 clears [JobId(1241), JobId(1242)], : downstairs
55044 Sep 22 23:16:49.490 DEBG IO Flush 1258 has deps [JobId(1257), JobId(1256)]
55045 Sep 22 23:16:49.493 WARN returning error on flush!
55046 Sep 22 23:16:49.493 DEBG Flush :1244 extent_limit None deps:[JobId(1243), JobId(1242)] res:false f:100 g:1
55047 Sep 22 23:16:49.493 INFO [lossy] skipping 1245
55048 Sep 22 23:16:49.493 INFO [lossy] skipping 1246
55049 Sep 22 23:16:49.493 INFO [lossy] skipping 1249
55050 Sep 22 23:16:49.493 INFO [lossy] skipping 1254
55051 Sep 22 23:16:49.493 DEBG Flush :1244 extent_limit None deps:[JobId(1243), JobId(1242)] res:true f:100 g:1
55052 Sep 22 23:16:49.499 DEBG Read :1245 deps:[JobId(1244)] res:true
55053 Sep 22 23:16:49.899 ERRO [2] job id 1244 saw error GenericError("test error")
55054 Sep 22 23:16:49.899 DEBG [rc] retire 1244 clears [JobId(1243), JobId(1244)], : downstairs
55055 Sep 22 23:16:49.905 DEBG Read :1257 deps:[JobId(1256)] res:true
55056 Sep 22 23:16:49.929 WARN returning error on flush!
55057 Sep 22 23:16:49.929 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:false f:101 g:1
55058 Sep 22 23:16:49.929 INFO [lossy] skipping 1254
55059 Sep 22 23:16:49.929 WARN returning error on flush!
55060 Sep 22 23:16:49.929 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:false f:101 g:1
55061 Sep 22 23:16:49.929 DEBG Flush :1246 extent_limit None deps:[JobId(1245), JobId(1244)] res:true f:101 g:1
55062 Sep 22 23:16:49.929 INFO [lossy] skipping 1247
55063 Sep 22 23:16:49.929 WARN 1248 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55064 Sep 22 23:16:49.929 INFO [lossy] skipping 1257
55065 Sep 22 23:16:49.929 INFO [lossy] skipping 1258
55066 Sep 22 23:16:49.935 DEBG Read :1247 deps:[JobId(1246)] res:true
55067 Sep 22 23:16:50.336 ERRO [2] job id 1246 saw error GenericError("test error")
55068 Sep 22 23:16:50.336 ERRO [2] job id 1246 saw error GenericError("test error")
55069 Sep 22 23:16:50.336 DEBG [rc] retire 1246 clears [JobId(1245), JobId(1246)], : downstairs
55070 Sep 22 23:16:50.338 DEBG Flush :1258 extent_limit None deps:[JobId(1257), JobId(1256)] res:true f:107 g:1
55071 Sep 22 23:16:50.338 INFO [lossy] sleeping 1 second
55072 Sep 22 23:16:50.344 DEBG Read :1257 deps:[JobId(1256)] res:true
55073 Sep 22 23:16:50.743 DEBG [0] Read AckReady 1257, : downstairs
55074 Sep 22 23:16:50.744 DEBG up_ds_listen was notified
55075 Sep 22 23:16:50.744 DEBG up_ds_listen process 1257
55076 Sep 22 23:16:50.744 DEBG [A] ack job 1257:258, : downstairs
55077 Sep 22 23:16:50.797 DEBG up_ds_listen checked 1 jobs, back to waiting
55078 Sep 22 23:16:50.798 DEBG IO Read 1259 has deps [JobId(1258)]
55079 Sep 22 23:16:50.813 INFO [lossy] skipping 1258
55080 Sep 22 23:16:50.813 INFO [lossy] skipping 1258
55081 Sep 22 23:16:50.813 DEBG Flush :1258 extent_limit None deps:[JobId(1257), JobId(1256)] res:true f:107 g:1
55082 Sep 22 23:16:50.819 DEBG Read :1259 deps:[JobId(1258)] res:true
55083 Sep 22 23:16:50.842 WARN returning error on flush!
55084 Sep 22 23:16:50.842 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:false f:102 g:1
55085 Sep 22 23:16:50.842 INFO [lossy] skipping 1249
55086 Sep 22 23:16:50.842 INFO [lossy] skipping 1252
55087 Sep 22 23:16:50.842 INFO [lossy] skipping 1258
55088 Sep 22 23:16:50.842 DEBG Flush :1248 extent_limit None deps:[JobId(1247), JobId(1246)] res:true f:102 g:1
55089 Sep 22 23:16:50.842 INFO [lossy] skipping 1249
55090 Sep 22 23:16:50.842 INFO [lossy] skipping 1252
55091 Sep 22 23:16:50.842 INFO [lossy] skipping 1249
55092 Sep 22 23:16:50.842 INFO [lossy] skipping 1252
55093 Sep 22 23:16:50.848 DEBG Read :1249 deps:[JobId(1248)] res:true
55094 Sep 22 23:16:50.965 DEBG IO Flush 1260 has deps [JobId(1259), JobId(1258)]
55095 Sep 22 23:16:51.297 ERRO [2] job id 1248 saw error GenericError("test error")
55096 Sep 22 23:16:51.298 DEBG [rc] retire 1248 clears [JobId(1247), JobId(1248)], : downstairs
55097 Sep 22 23:16:51.629 DEBG up_ds_listen was notified
55098 Sep 22 23:16:51.630 DEBG up_ds_listen process 1258
55099 Sep 22 23:16:51.630 DEBG [A] ack job 1258:259, : downstairs
55100 Sep 22 23:16:51.630 DEBG up_ds_listen checked 1 jobs, back to waiting
55101 Sep 22 23:16:51.636 DEBG Read :1259 deps:[JobId(1258)] res:true
55102 Sep 22 23:16:51.660 INFO [lossy] sleeping 1 second
55103 Sep 22 23:16:51.662 DEBG Flush :1250 extent_limit None deps:[JobId(1249), JobId(1248)] res:true f:103 g:1
55104 Sep 22 23:16:51.662 WARN returning error on read!
55105 Sep 22 23:16:51.662 DEBG Read :1251 deps:[JobId(1250)] res:false
55106 Sep 22 23:16:51.662 WARN 1252 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55107 Sep 22 23:16:51.662 INFO [lossy] skipping 1253
55108 Sep 22 23:16:51.662 WARN returning error on read!
55109 Sep 22 23:16:51.662 DEBG Read :1251 deps:[JobId(1250)] res:false
55110 Sep 22 23:16:51.662 INFO [lossy] skipping 1251
55111 Sep 22 23:16:51.663 INFO [lossy] skipping 1251
55112 Sep 22 23:16:51.663 INFO [lossy] skipping 1251
55113 Sep 22 23:16:51.663 WARN returning error on read!
55114 Sep 22 23:16:51.663 DEBG Read :1251 deps:[JobId(1250)] res:false
55115 Sep 22 23:16:51.669 DEBG Read :1251 deps:[JobId(1250)] res:true
55116 Sep 22 23:16:52.118 DEBG [rc] retire 1250 clears [JobId(1249), JobId(1250)], : downstairs
55117 Sep 22 23:16:52.118 ERRO [2] job id 1251 saw error GenericError("test error")
55118 Sep 22 23:16:52.118 ERRO [2] job id 1251 saw error GenericError("test error")
55119 Sep 22 23:16:52.118 ERRO [2] job id 1251 saw error GenericError("test error")
55120 Sep 22 23:16:52.448 DEBG [1] Read AckReady 1259, : downstairs
55121 Sep 22 23:16:52.449 DEBG up_ds_listen was notified
55122 Sep 22 23:16:52.449 DEBG up_ds_listen process 1259
55123 Sep 22 23:16:52.449 DEBG [A] ack job 1259:260, : downstairs
55124 Sep 22 23:16:52.503 DEBG up_ds_listen checked 1 jobs, back to waiting
55125 Sep 22 23:16:52.505 INFO [lossy] sleeping 1 second
55126 Sep 22 23:16:52.553 DEBG IO Read 1261 has deps [JobId(1260)]
55127 Sep 22 23:16:52.896 DEBG Flush :1260 extent_limit None deps:[JobId(1259), JobId(1258)] res:true f:108 g:1
55128 Sep 22 23:16:52.896 INFO [lossy] sleeping 1 second
55129 Sep 22 23:16:52.899 DEBG Flush :1252 extent_limit None deps:[JobId(1251), JobId(1250)] res:true f:104 g:1
55130 Sep 22 23:16:52.905 DEBG Read :1253 deps:[JobId(1252)] res:true
55131 Sep 22 23:16:53.305 DEBG [rc] retire 1252 clears [JobId(1251), JobId(1252)], : downstairs
55132 Sep 22 23:16:53.305 DEBG IO Flush 1262 has deps [JobId(1261), JobId(1260)]
55133 Sep 22 23:16:53.308 DEBG Flush :1254 extent_limit None deps:[JobId(1253), JobId(1252)] res:true f:105 g:1
55134 Sep 22 23:16:53.309 WARN returning error on read!
55135 Sep 22 23:16:53.309 DEBG Read :1255 deps:[JobId(1254)] res:false
55136 Sep 22 23:16:53.309 WARN 1256 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55137 Sep 22 23:16:53.309 INFO [lossy] skipping 1257
55138 Sep 22 23:16:53.309 INFO [lossy] skipping 1258
55139 Sep 22 23:16:53.309 INFO [lossy] skipping 1260
55140 Sep 22 23:16:53.315 DEBG Read :1255 deps:[JobId(1254)] res:true
55141 Sep 22 23:16:53.717 DEBG [rc] retire 1254 clears [JobId(1253), JobId(1254)], : downstairs
55142 Sep 22 23:16:53.717 ERRO [2] job id 1255 saw error GenericError("test error")
55143 Sep 22 23:16:53.717 DEBG Flush :1260 extent_limit None deps:[JobId(1259), JobId(1258)] res:true f:108 g:1
55144 Sep 22 23:16:53.723 DEBG Read :1261 deps:[JobId(1260)] res:true
55145 Sep 22 23:16:53.744 DEBG up_ds_listen was notified
55146 Sep 22 23:16:53.745 DEBG up_ds_listen process 1260
55147 Sep 22 23:16:53.745 DEBG [A] ack job 1260:261, : downstairs
55148 Sep 22 23:16:53.745 DEBG up_ds_listen checked 1 jobs, back to waiting
55149 Sep 22 23:16:53.747 INFO [lossy] skipping 1256
55150 Sep 22 23:16:53.747 INFO [lossy] skipping 1257
55151 Sep 22 23:16:53.747 INFO [lossy] skipping 1262
55152 Sep 22 23:16:53.747 INFO [lossy] skipping 1256
55153 Sep 22 23:16:53.747 INFO [lossy] skipping 1257
55154 Sep 22 23:16:53.747 DEBG Flush :1256 extent_limit None deps:[JobId(1255), JobId(1254)] res:true f:106 g:1
55155 Sep 22 23:16:53.747 INFO [lossy] skipping 1257
55156 Sep 22 23:16:53.747 INFO [lossy] skipping 1257
55157 Sep 22 23:16:53.753 DEBG Read :1257 deps:[JobId(1256)] res:true
55158 Sep 22 23:16:54.153 DEBG [rc] retire 1256 clears [JobId(1255), JobId(1256)], : downstairs
55159 Sep 22 23:16:54.155 DEBG Flush :1262 extent_limit None deps:[JobId(1261), JobId(1260)] res:true f:109 g:1
55160 Sep 22 23:16:54.155 INFO [lossy] sleeping 1 second
55161 Sep 22 23:16:54.161 DEBG Read :1261 deps:[JobId(1260)] res:true
55162 Sep 22 23:16:54.560 DEBG [0] Read AckReady 1261, : downstairs
55163 Sep 22 23:16:54.561 DEBG up_ds_listen was notified
55164 Sep 22 23:16:54.561 DEBG up_ds_listen process 1261
55165 Sep 22 23:16:54.561 DEBG [A] ack job 1261:262, : downstairs
55166 Sep 22 23:16:54.614 DEBG up_ds_listen checked 1 jobs, back to waiting
55167 Sep 22 23:16:54.615 DEBG IO Read 1263 has deps [JobId(1262)]
55168 Sep 22 23:16:54.630 INFO [lossy] skipping 1258
55169 Sep 22 23:16:54.630 INFO [lossy] skipping 1259
55170 Sep 22 23:16:54.630 INFO [lossy] skipping 1263
55171 Sep 22 23:16:54.630 DEBG Flush :1258 extent_limit None deps:[JobId(1257), JobId(1256)] res:true f:107 g:1
55172 Sep 22 23:16:54.636 DEBG Read :1259 deps:[JobId(1258)] res:true
55173 Sep 22 23:16:54.705 DEBG IO Flush 1264 has deps [JobId(1263), JobId(1262)]
55174 Sep 22 23:16:55.037 DEBG [rc] retire 1258 clears [JobId(1257), JobId(1258)], : downstairs
55175 Sep 22 23:16:55.039 DEBG Flush :1262 extent_limit None deps:[JobId(1261), JobId(1260)] res:true f:109 g:1
55176 Sep 22 23:16:55.039 INFO [lossy] skipping 1263
55177 Sep 22 23:16:55.039 WARN returning error on read!
55178 Sep 22 23:16:55.039 DEBG Read :1263 deps:[JobId(1262)] res:false
55179 Sep 22 23:16:55.045 DEBG Read :1263 deps:[JobId(1262)] res:true
55180 Sep 22 23:16:55.445 ERRO [1] job id 1263 saw error GenericError("test error")
55181 Sep 22 23:16:55.445 DEBG up_ds_listen was notified
55182 Sep 22 23:16:55.445 DEBG up_ds_listen process 1262
55183 Sep 22 23:16:55.445 DEBG [A] ack job 1262:263, : downstairs
55184 Sep 22 23:16:55.445 DEBG up_ds_listen checked 1 jobs, back to waiting
55185 Sep 22 23:16:55.446 INFO [lossy] skipping 1263
55186 Sep 22 23:16:55.446 WARN returning error on read!
55187 Sep 22 23:16:55.446 DEBG Read :1263 deps:[JobId(1262)] res:false
55188 Sep 22 23:16:55.446 WARN returning error on read!
55189 Sep 22 23:16:55.446 DEBG Read :1263 deps:[JobId(1262)] res:false
55190 Sep 22 23:16:55.452 DEBG Read :1263 deps:[JobId(1262)] res:true
55191 Sep 22 23:16:55.474 ERRO [0] job id 1263 saw error GenericError("test error")
55192 Sep 22 23:16:55.474 ERRO [0] job id 1263 saw error GenericError("test error")
55193 Sep 22 23:16:55.476 INFO [lossy] sleeping 1 second
55194 Sep 22 23:16:55.477 INFO [lossy] skipping 1260
55195 Sep 22 23:16:55.477 INFO [lossy] skipping 1262
55196 Sep 22 23:16:55.478 DEBG Flush :1260 extent_limit None deps:[JobId(1259), JobId(1258)] res:true f:108 g:1
55197 Sep 22 23:16:55.478 INFO [lossy] skipping 1262
55198 Sep 22 23:16:55.478 WARN 1262 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55199 Sep 22 23:16:55.478 INFO [lossy] skipping 1261
55200 Sep 22 23:16:55.478 INFO [lossy] skipping 1264
55201 Sep 22 23:16:55.483 DEBG Read :1261 deps:[JobId(1260)] res:true
55202 Sep 22 23:16:55.935 DEBG [rc] retire 1260 clears [JobId(1259), JobId(1260)], : downstairs
55203 Sep 22 23:16:56.269 DEBG [1] Read AckReady 1263, : downstairs
55204 Sep 22 23:16:56.270 DEBG up_ds_listen was notified
55205 Sep 22 23:16:56.270 DEBG up_ds_listen process 1263
55206 Sep 22 23:16:56.270 DEBG [A] ack job 1263:264, : downstairs
55207 Sep 22 23:16:56.324 DEBG up_ds_listen checked 1 jobs, back to waiting
55208 Sep 22 23:16:56.327 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:true f:110 g:1
55209 Sep 22 23:16:56.327 INFO [lossy] sleeping 1 second
55210 Sep 22 23:16:56.375 DEBG IO Read 1265 has deps [JobId(1264)]
55211 Sep 22 23:16:56.722 WARN returning error on flush!
55212 Sep 22 23:16:56.722 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:false f:110 g:1
55213 Sep 22 23:16:56.722 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:true f:110 g:1
55214 Sep 22 23:16:56.722 INFO [lossy] sleeping 1 second
55215 Sep 22 23:16:56.723 ERRO [1] job id 1264 saw error GenericError("test error")
55216 Sep 22 23:16:56.723 DEBG up_ds_listen was notified
55217 Sep 22 23:16:56.723 DEBG up_ds_listen process 1264
55218 Sep 22 23:16:56.723 DEBG [A] ack job 1264:265, : downstairs
55219 Sep 22 23:16:56.723 DEBG up_ds_listen checked 1 jobs, back to waiting
55220 Sep 22 23:16:56.725 DEBG Flush :1262 extent_limit None deps:[JobId(1261), JobId(1260)] res:true f:109 g:1
55221 Sep 22 23:16:56.731 DEBG Read :1263 deps:[JobId(1262)] res:true
55222 Sep 22 23:16:57.135 DEBG [rc] retire 1262 clears [JobId(1261), JobId(1262)], : downstairs
55223 Sep 22 23:16:57.135 DEBG IO Flush 1266 has deps [JobId(1265), JobId(1264)]
55224 Sep 22 23:16:57.138 INFO [lossy] skipping 1264
55225 Sep 22 23:16:57.138 INFO [lossy] skipping 1264
55226 Sep 22 23:16:57.138 INFO [lossy] skipping 1264
55227 Sep 22 23:16:57.138 WARN returning error on flush!
55228 Sep 22 23:16:57.138 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:false f:110 g:1
55229 Sep 22 23:16:57.138 INFO [lossy] skipping 1264
55230 Sep 22 23:16:57.138 DEBG Flush :1264 extent_limit None deps:[JobId(1263), JobId(1262)] res:true f:110 g:1
55231 Sep 22 23:16:57.144 DEBG Read :1265 deps:[JobId(1264)] res:true
55232 Sep 22 23:16:57.547 ERRO [2] job id 1264 saw error GenericError("test error")
55233 Sep 22 23:16:57.547 DEBG [rc] retire 1264 clears [JobId(1263), JobId(1264)], : downstairs
55234 Sep 22 23:16:57.553 DEBG Read :1265 deps:[JobId(1264)] res:true
55235 Sep 22 23:16:57.577 DEBG Flush :1266 extent_limit None deps:[JobId(1265), JobId(1264)] res:true f:111 g:1
55236 Sep 22 23:16:57.577 INFO [lossy] sleeping 1 second
55237 Sep 22 23:16:57.579 INFO [lossy] skipping 1266
55238 Sep 22 23:16:57.579 WARN returning error on flush!
55239 Sep 22 23:16:57.579 DEBG Flush :1266 extent_limit None deps:[JobId(1265), JobId(1264)] res:false f:111 g:1
55240 Sep 22 23:16:57.579 DEBG Flush :1266 extent_limit None deps:[JobId(1265), JobId(1264)] res:true f:111 g:1
55241 Sep 22 23:16:57.579 INFO [lossy] sleeping 1 second
55242 Sep 22 23:16:58.009 DEBG [0] Read AckReady 1265, : downstairs
55243 Sep 22 23:16:58.010 ERRO [0] job id 1266 saw error GenericError("test error")
55244 Sep 22 23:16:58.343 DEBG [2] Read already AckReady 1265, : downstairs
55245 Sep 22 23:16:58.345 DEBG up_ds_listen was notified
55246 Sep 22 23:16:58.345 DEBG up_ds_listen process 1265
55247 Sep 22 23:16:58.345 DEBG [A] ack job 1265:266, : downstairs
55248 Sep 22 23:16:58.399 DEBG up_ds_listen process 1266
55249 Sep 22 23:16:58.399 DEBG [A] ack job 1266:267, : downstairs
55250 Sep 22 23:16:58.399 DEBG up_ds_listen checked 2 jobs, back to waiting
55251 Sep 22 23:16:58.399 DEBG up_ds_listen was notified
55252 Sep 22 23:16:58.399 DEBG up_ds_listen checked 0 jobs, back to waiting
55253 Sep 22 23:16:58.406 DEBG Read :1265 deps:[JobId(1264)] res:true
55254 Sep 22 23:16:58.428 DEBG IO Read 1267 has deps [JobId(1266)]
55255 Sep 22 23:16:58.441 DEBG Flush :1266 extent_limit None deps:[JobId(1265), JobId(1264)] res:true f:111 g:1
55256 Sep 22 23:16:58.447 DEBG Read :1267 deps:[JobId(1266)] res:true
55257 Sep 22 23:16:58.852 DEBG [rc] retire 1266 clears [JobId(1265), JobId(1266)], : downstairs
55258 Sep 22 23:16:58.852 INFO [lossy] skipping 1267
55259 Sep 22 23:16:58.852 WARN returning error on read!
55260 Sep 22 23:16:58.852 DEBG Read :1267 deps:[JobId(1266)] res:false
55261 Sep 22 23:16:58.858 DEBG Read :1267 deps:[JobId(1266)] res:true
55262 Sep 22 23:16:58.880 INFO [lossy] skipping 1267
55263 Sep 22 23:16:58.880 INFO [lossy] skipping 1267
55264 Sep 22 23:16:58.880 INFO [lossy] skipping 1267
55265 Sep 22 23:16:58.886 DEBG Read :1267 deps:[JobId(1266)] res:true
55266 Sep 22 23:16:58.907 DEBG IO Flush 1268 has deps [JobId(1267)]
55267 Sep 22 23:16:58.908 ERRO [2] job id 1267 saw error GenericError("test error")
55268 Sep 22 23:16:58.910 INFO [lossy] sleeping 1 second
55269 Sep 22 23:16:59.291 DEBG [1] Read AckReady 1267, : downstairs
55270 Sep 22 23:16:59.292 DEBG up_ds_listen was notified
55271 Sep 22 23:16:59.292 DEBG up_ds_listen process 1267
55272 Sep 22 23:16:59.292 DEBG [A] ack job 1267:268, : downstairs
55273 Sep 22 23:16:59.345 DEBG up_ds_listen checked 1 jobs, back to waiting
55274 Sep 22 23:16:59.347 INFO [lossy] sleeping 1 second
55275 Sep 22 23:16:59.349 WARN returning error on flush!
55276 Sep 22 23:16:59.349 DEBG Flush :1268 extent_limit None deps:[JobId(1267)] res:false f:112 g:1
55277 Sep 22 23:16:59.349 DEBG Flush :1268 extent_limit None deps:[JobId(1267)] res:true f:112 g:1
55278 Sep 22 23:16:59.349 INFO [lossy] sleeping 1 second
55279 Sep 22 23:16:59.445 DEBG IO Read 1269 has deps [JobId(1268)]
55280 Sep 22 23:16:59.778 ERRO [0] job id 1268 saw error GenericError("test error")
55281 Sep 22 23:17:00.126 DEBG IO Flush 1270 has deps [JobId(1269), JobId(1268)]
55282 Sep 22 23:17:00.126 DEBG Flush :1268 extent_limit None deps:[JobId(1267)] res:true f:112 g:1
55283 Sep 22 23:17:00.126 INFO [lossy] sleeping 1 second
55284 Sep 22 23:17:00.127 DEBG up_ds_listen was notified
55285 Sep 22 23:17:00.127 DEBG up_ds_listen process 1268
55286 Sep 22 23:17:00.127 DEBG [A] ack job 1268:269, : downstairs
55287 Sep 22 23:17:00.127 DEBG up_ds_listen checked 1 jobs, back to waiting
55288 Sep 22 23:17:00.348 DEBG Flush :1268 extent_limit None deps:[JobId(1267)] res:true f:112 g:1
55289 Sep 22 23:17:00.348 INFO [lossy] skipping 1269
55290 Sep 22 23:17:00.355 DEBG Read :1269 deps:[JobId(1268)] res:true
55291 Sep 22 23:17:00.377 WARN returning error on read!
55292 Sep 22 23:17:00.377 DEBG Read :1269 deps:[JobId(1268)] res:false
55293 Sep 22 23:17:00.383 DEBG Read :1269 deps:[JobId(1268)] res:true
55294 Sep 22 23:17:00.405 DEBG [rc] retire 1268 clears [JobId(1267), JobId(1268)], : downstairs
55295 Sep 22 23:17:00.405 ERRO [0] job id 1269 saw error GenericError("test error")
55296 Sep 22 23:17:00.407 WARN returning error on flush!
55297 Sep 22 23:17:00.407 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:false f:113 g:1
55298 Sep 22 23:17:00.408 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:true f:113 g:1
55299 Sep 22 23:17:00.408 INFO [lossy] sleeping 1 second
55300 Sep 22 23:17:00.457 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:true f:113 g:1
55301 Sep 22 23:17:00.457 INFO [lossy] sleeping 1 second
55302 Sep 22 23:17:00.789 DEBG [2] Read AckReady 1269, : downstairs
55303 Sep 22 23:17:00.790 ERRO [2] job id 1270 saw error GenericError("test error")
55304 Sep 22 23:17:00.790 DEBG up_ds_listen was notified
55305 Sep 22 23:17:00.790 DEBG up_ds_listen process 1269
55306 Sep 22 23:17:00.790 DEBG [A] ack job 1269:270, : downstairs
55307 Sep 22 23:17:00.844 DEBG up_ds_listen checked 1 jobs, back to waiting
55308 Sep 22 23:17:01.226 DEBG up_ds_listen was notified
55309 Sep 22 23:17:01.226 DEBG up_ds_listen process 1270
55310 Sep 22 23:17:01.226 DEBG [A] ack job 1270:271, : downstairs
55311 Sep 22 23:17:01.226 DEBG up_ds_listen checked 1 jobs, back to waiting
55312 Sep 22 23:17:01.233 DEBG Read :1269 deps:[JobId(1268)] res:true
55313 Sep 22 23:17:01.254 DEBG IO Read 1271 has deps [JobId(1270)]
55314 Sep 22 23:17:01.268 INFO [lossy] skipping 1270
55315 Sep 22 23:17:01.268 DEBG Flush :1270 extent_limit None deps:[JobId(1269), JobId(1268)] res:true f:113 g:1
55316 Sep 22 23:17:01.274 DEBG Read :1271 deps:[JobId(1270)] res:true
55317 Sep 22 23:17:01.678 DEBG [rc] retire 1270 clears [JobId(1269), JobId(1270)], : downstairs
55318 Sep 22 23:17:01.678 DEBG IO Flush 1272 has deps [JobId(1271)]
55319 Sep 22 23:17:01.678 WARN returning error on read!
55320 Sep 22 23:17:01.678 DEBG Read :1271 deps:[JobId(1270)] res:false
55321 Sep 22 23:17:01.684 DEBG Read :1271 deps:[JobId(1270)] res:true
55322 Sep 22 23:17:01.712 DEBG Read :1271 deps:[JobId(1270)] res:true
55323 Sep 22 23:17:01.734 ERRO [2] job id 1271 saw error GenericError("test error")
55324 Sep 22 23:17:01.737 INFO [lossy] sleeping 1 second
55325 Sep 22 23:17:02.119 DEBG [1] Read AckReady 1271, : downstairs
55326 Sep 22 23:17:02.120 DEBG up_ds_listen was notified
55327 Sep 22 23:17:02.120 DEBG up_ds_listen process 1271
55328 Sep 22 23:17:02.120 DEBG [A] ack job 1271:272, : downstairs
55329 Sep 22 23:17:02.174 DEBG up_ds_listen checked 1 jobs, back to waiting
55330 Sep 22 23:17:02.177 DEBG Flush :1272 extent_limit None deps:[JobId(1271)] res:true f:114 g:1
55331 Sep 22 23:17:02.177 INFO [lossy] sleeping 1 second
55332 Sep 22 23:17:02.178 INFO [lossy] skipping 1272
55333 Sep 22 23:17:02.178 INFO [lossy] skipping 1272
55334 Sep 22 23:17:02.178 INFO [lossy] skipping 1272
55335 Sep 22 23:17:02.178 DEBG Flush :1272 extent_limit None deps:[JobId(1271)] res:true f:114 g:1
55336 Sep 22 23:17:02.178 INFO [lossy] sleeping 1 second
55337 Sep 22 23:17:02.276 DEBG IO Read 1273 has deps [JobId(1272)]
55338 Sep 22 23:17:02.952 DEBG up_ds_listen was notified
55339 Sep 22 23:17:02.952 DEBG up_ds_listen process 1272
55340 Sep 22 23:17:02.952 DEBG [A] ack job 1272:273, : downstairs
55341 Sep 22 23:17:02.952 DEBG up_ds_listen checked 1 jobs, back to waiting
55342 Sep 22 23:17:02.959 DEBG IO Flush 1274 has deps [JobId(1273), JobId(1272)]
55343 Sep 22 23:17:02.959 DEBG Flush :1272 extent_limit None deps:[JobId(1271)] res:true f:114 g:1
55344 Sep 22 23:17:02.959 INFO [lossy] sleeping 1 second
55345 Sep 22 23:17:02.960 DEBG [rc] retire 1272 clears [JobId(1271), JobId(1272)], : downstairs
55346 Sep 22 23:17:03.178 WARN returning error on read!
55347 Sep 22 23:17:03.178 DEBG Read :1273 deps:[JobId(1272)] res:false
55348 Sep 22 23:17:03.178 INFO [lossy] skipping 1274
55349 Sep 22 23:17:03.179 WARN returning error on read!
55350 Sep 22 23:17:03.179 DEBG Read :1273 deps:[JobId(1272)] res:false
55351 Sep 22 23:17:03.185 DEBG Read :1273 deps:[JobId(1272)] res:true
55352 Sep 22 23:17:03.214 DEBG Read :1273 deps:[JobId(1272)] res:true
55353 Sep 22 23:17:03.236 ERRO [2] job id 1273 saw error GenericError("test error")
55354 Sep 22 23:17:03.236 ERRO [2] job id 1273 saw error GenericError("test error")
55355 Sep 22 23:17:03.239 DEBG Flush :1274 extent_limit None deps:[JobId(1273), JobId(1272)] res:true f:115 g:1
55356 Sep 22 23:17:03.239 INFO [lossy] sleeping 1 second
55357 Sep 22 23:17:03.288 DEBG Flush :1274 extent_limit None deps:[JobId(1273), JobId(1272)] res:true f:115 g:1
55358 Sep 22 23:17:03.288 INFO [lossy] sleeping 1 second
55359 Sep 22 23:17:03.621 DEBG [2] Read AckReady 1273, : downstairs
55360 Sep 22 23:17:03.622 DEBG up_ds_listen was notified
55361 Sep 22 23:17:03.622 DEBG up_ds_listen process 1273
55362 Sep 22 23:17:03.622 DEBG [A] ack job 1273:274, : downstairs
55363 Sep 22 23:17:03.675 DEBG up_ds_listen checked 1 jobs, back to waiting
55364 Sep 22 23:17:04.060 DEBG up_ds_listen was notified
55365 Sep 22 23:17:04.061 DEBG up_ds_listen process 1274
55366 Sep 22 23:17:04.061 DEBG [A] ack job 1274:275, : downstairs
55367 Sep 22 23:17:04.061 DEBG up_ds_listen checked 1 jobs, back to waiting
55368 Sep 22 23:17:04.061 INFO [lossy] skipping 1273
55369 Sep 22 23:17:04.067 DEBG Read :1273 deps:[JobId(1272)] res:true
55370 Sep 22 23:17:04.088 DEBG IO Read 1275 has deps [JobId(1274)]
55371 Sep 22 23:17:04.102 INFO [lossy] skipping 1274
55372 Sep 22 23:17:04.102 DEBG Flush :1274 extent_limit None deps:[JobId(1273), JobId(1272)] res:true f:115 g:1
55373 Sep 22 23:17:04.102 INFO [lossy] sleeping 1 second
55374 Sep 22 23:17:04.484 DEBG [rc] retire 1274 clears [JobId(1273), JobId(1274)], : downstairs
55375 Sep 22 23:17:04.484 DEBG IO Flush 1276 has deps [JobId(1275)]
55376 Sep 22 23:17:04.485 INFO [lossy] skipping 1275
55377 Sep 22 23:17:04.490 DEBG Read :1275 deps:[JobId(1274)] res:true
55378 Sep 22 23:17:04.512 INFO [lossy] skipping 1275
55379 Sep 22 23:17:04.518 DEBG Read :1275 deps:[JobId(1274)] res:true
55380 Sep 22 23:17:04.542 DEBG Flush :1276 extent_limit None deps:[JobId(1275)] res:true f:116 g:1
55381 Sep 22 23:17:04.542 INFO [lossy] sleeping 1 second
55382 Sep 22 23:17:04.543 INFO [lossy] sleeping 1 second
55383 Sep 22 23:17:04.972 DEBG [0] Read AckReady 1275, : downstairs
55384 Sep 22 23:17:05.305 DEBG [2] Read already AckReady 1275, : downstairs
55385 Sep 22 23:17:05.307 DEBG up_ds_listen was notified
55386 Sep 22 23:17:05.307 DEBG up_ds_listen process 1275
55387 Sep 22 23:17:05.307 DEBG [A] ack job 1275:276, : downstairs
55388 Sep 22 23:17:05.361 DEBG up_ds_listen checked 1 jobs, back to waiting
55389 Sep 22 23:17:05.367 DEBG Read :1275 deps:[JobId(1274)] res:true
55390 Sep 22 23:17:05.389 DEBG IO Read 1277 has deps [JobId(1276)]
55391 Sep 22 23:17:05.403 DEBG Flush :1276 extent_limit None deps:[JobId(1275)] res:true f:116 g:1
55392 Sep 22 23:17:05.408 DEBG Read :1277 deps:[JobId(1276)] res:true
55393 Sep 22 23:17:05.812 DEBG up_ds_listen was notified
55394 Sep 22 23:17:05.813 DEBG up_ds_listen process 1276
55395 Sep 22 23:17:05.813 DEBG [A] ack job 1276:277, : downstairs
55396 Sep 22 23:17:05.813 DEBG up_ds_listen checked 1 jobs, back to waiting
55397 Sep 22 23:17:05.813 INFO [lossy] skipping 1277
55398 Sep 22 23:17:05.813 INFO [lossy] skipping 1277
55399 Sep 22 23:17:05.819 DEBG Read :1277 deps:[JobId(1276)] res:true
55400 Sep 22 23:17:05.841 DEBG Flush :1276 extent_limit None deps:[JobId(1275)] res:true f:116 g:1
55401 Sep 22 23:17:05.848 DEBG Read :1277 deps:[JobId(1276)] res:true
55402 Sep 22 23:17:05.870 DEBG IO Flush 1278 has deps [JobId(1277), JobId(1276)]
55403 Sep 22 23:17:05.870 DEBG [rc] retire 1276 clears [JobId(1275), JobId(1276)], : downstairs
55404 Sep 22 23:17:05.873 INFO [lossy] sleeping 1 second
55405 Sep 22 23:17:06.254 DEBG [1] Read AckReady 1277, : downstairs
55406 Sep 22 23:17:06.255 DEBG up_ds_listen was notified
55407 Sep 22 23:17:06.255 DEBG up_ds_listen process 1277
55408 Sep 22 23:17:06.255 DEBG [A] ack job 1277:278, : downstairs
55409 Sep 22 23:17:06.308 DEBG up_ds_listen checked 1 jobs, back to waiting
55410 Sep 22 23:17:06.311 INFO [lossy] sleeping 1 second
55411 Sep 22 23:17:06.312 INFO [lossy] sleeping 1 second
55412 Sep 22 23:17:06.409 DEBG IO Read 1279 has deps [JobId(1278)]
55413 Sep 22 23:17:07.091 DEBG IO Flush 1280 has deps [JobId(1279), JobId(1278)]
55414 Sep 22 23:17:07.091 DEBG Flush :1278 extent_limit None deps:[JobId(1277), JobId(1276)] res:true f:117 g:1
55415 Sep 22 23:17:07.091 INFO [lossy] sleeping 1 second
55416 Sep 22 23:17:07.312 DEBG Flush :1278 extent_limit None deps:[JobId(1277), JobId(1276)] res:true f:117 g:1
55417 Sep 22 23:17:07.312 INFO [lossy] skipping 1279
55418 Sep 22 23:17:07.312 INFO [lossy] skipping 1280
55419 Sep 22 23:17:07.312 INFO [lossy] skipping 1279
55420 Sep 22 23:17:07.319 DEBG Read :1279 deps:[JobId(1278)] res:true
55421 Sep 22 23:17:07.340 DEBG Flush :1278 extent_limit None deps:[JobId(1277), JobId(1276)] res:true f:117 g:1
55422 Sep 22 23:17:07.340 INFO [lossy] skipping 1279
55423 Sep 22 23:17:07.341 WARN returning error on read!
55424 Sep 22 23:17:07.341 DEBG Read :1279 deps:[JobId(1278)] res:false
55425 Sep 22 23:17:07.341 INFO [lossy] skipping 1279
55426 Sep 22 23:17:07.347 DEBG Read :1279 deps:[JobId(1278)] res:true
55427 Sep 22 23:17:07.368 DEBG up_ds_listen was notified
55428 Sep 22 23:17:07.368 DEBG up_ds_listen process 1278
55429 Sep 22 23:17:07.368 DEBG [A] ack job 1278:279, : downstairs
55430 Sep 22 23:17:07.368 DEBG up_ds_listen checked 1 jobs, back to waiting
55431 Sep 22 23:17:07.369 DEBG [rc] retire 1278 clears [JobId(1277), JobId(1278)], : downstairs
55432 Sep 22 23:17:07.369 ERRO [0] job id 1279 saw error GenericError("test error")
55433 Sep 22 23:17:07.371 INFO [lossy] sleeping 1 second
55434 Sep 22 23:17:07.419 INFO [lossy] skipping 1280
55435 Sep 22 23:17:07.419 DEBG Flush :1280 extent_limit None deps:[JobId(1279), JobId(1278)] res:true f:118 g:1
55436 Sep 22 23:17:07.420 INFO [lossy] sleeping 1 second
55437 Sep 22 23:17:07.752 DEBG [2] Read AckReady 1279, : downstairs
55438 Sep 22 23:17:07.753 DEBG up_ds_listen was notified
55439 Sep 22 23:17:07.753 DEBG up_ds_listen process 1279
55440 Sep 22 23:17:07.753 DEBG [A] ack job 1279:280, : downstairs
55441 Sep 22 23:17:07.806 DEBG up_ds_listen checked 1 jobs, back to waiting
55442 Sep 22 23:17:08.191 INFO [lossy] skipping 1279
55443 Sep 22 23:17:08.191 INFO [lossy] skipping 1279
55444 Sep 22 23:17:08.197 DEBG Read :1279 deps:[JobId(1278)] res:true
55445 Sep 22 23:17:08.219 DEBG IO Read 1281 has deps [JobId(1280)]
55446 Sep 22 23:17:08.233 DEBG Flush :1280 extent_limit None deps:[JobId(1279), JobId(1278)] res:true f:118 g:1
55447 Sep 22 23:17:08.239 DEBG Read :1281 deps:[JobId(1280)] res:true
55448 Sep 22 23:17:08.643 DEBG up_ds_listen was notified
55449 Sep 22 23:17:08.643 DEBG up_ds_listen process 1280
55450 Sep 22 23:17:08.643 DEBG [A] ack job 1280:281, : downstairs
55451 Sep 22 23:17:08.643 DEBG up_ds_listen checked 1 jobs, back to waiting
55452 Sep 22 23:17:08.643 DEBG IO Flush 1282 has deps [JobId(1281), JobId(1280)]
55453 Sep 22 23:17:08.644 WARN returning error on flush!
55454 Sep 22 23:17:08.644 DEBG Flush :1280 extent_limit None deps:[JobId(1279), JobId(1278)] res:false f:118 g:1
55455 Sep 22 23:17:08.644 INFO [lossy] skipping 1281
55456 Sep 22 23:17:08.644 INFO [lossy] skipping 1280
55457 Sep 22 23:17:08.644 INFO [lossy] skipping 1280
55458 Sep 22 23:17:08.644 INFO [lossy] skipping 1280
55459 Sep 22 23:17:08.644 INFO [lossy] skipping 1280
55460 Sep 22 23:17:08.644 DEBG Flush :1280 extent_limit None deps:[JobId(1279), JobId(1278)] res:true f:118 g:1
55461 Sep 22 23:17:08.650 DEBG Read :1281 deps:[JobId(1280)] res:true
55462 Sep 22 23:17:08.678 DEBG Read :1281 deps:[JobId(1280)] res:true
55463 Sep 22 23:17:08.700 ERRO [2] job id 1280 saw error GenericError("test error")
55464 Sep 22 23:17:08.700 DEBG [rc] retire 1280 clears [JobId(1279), JobId(1280)], : downstairs
55465 Sep 22 23:17:08.703 INFO [lossy] sleeping 1 second
55466 Sep 22 23:17:09.086 DEBG [1] Read AckReady 1281, : downstairs
55467 Sep 22 23:17:09.087 DEBG up_ds_listen was notified
55468 Sep 22 23:17:09.087 DEBG up_ds_listen process 1281
55469 Sep 22 23:17:09.087 DEBG [A] ack job 1281:282, : downstairs
55470 Sep 22 23:17:09.141 DEBG up_ds_listen checked 1 jobs, back to waiting
55471 Sep 22 23:17:09.143 INFO [lossy] sleeping 1 second
55472 Sep 22 23:17:09.144 INFO [lossy] sleeping 1 second
55473 Sep 22 23:17:09.241 DEBG IO Read 1283 has deps [JobId(1282)]
55474 Sep 22 23:17:09.924 DEBG IO Flush 1284 has deps [JobId(1283), JobId(1282)]
55475 Sep 22 23:17:09.924 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:true f:119 g:1
55476 Sep 22 23:17:09.924 INFO [lossy] sleeping 1 second
55477 Sep 22 23:17:10.143 WARN returning error on flush!
55478 Sep 22 23:17:10.143 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:false f:119 g:1
55479 Sep 22 23:17:10.143 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:true f:119 g:1
55480 Sep 22 23:17:10.150 DEBG Read :1283 deps:[JobId(1282)] res:true
55481 Sep 22 23:17:10.172 WARN returning error on flush!
55482 Sep 22 23:17:10.172 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:false f:119 g:1
55483 Sep 22 23:17:10.172 INFO [lossy] skipping 1284
55484 Sep 22 23:17:10.172 INFO [lossy] skipping 1282
55485 Sep 22 23:17:10.172 DEBG Flush :1282 extent_limit None deps:[JobId(1281), JobId(1280)] res:true f:119 g:1
55486 Sep 22 23:17:10.173 WARN returning error on read!
55487 Sep 22 23:17:10.173 DEBG Read :1283 deps:[JobId(1282)] res:false
55488 Sep 22 23:17:10.173 WARN 1284 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55489 Sep 22 23:17:10.173 INFO [lossy] skipping 1283
55490 Sep 22 23:17:10.179 DEBG Read :1283 deps:[JobId(1282)] res:true
55491 Sep 22 23:17:10.200 ERRO [2] job id 1282 saw error GenericError("test error")
55492 Sep 22 23:17:10.200 DEBG up_ds_listen was notified
55493 Sep 22 23:17:10.200 DEBG up_ds_listen process 1282
55494 Sep 22 23:17:10.200 DEBG [A] ack job 1282:283, : downstairs
55495 Sep 22 23:17:10.200 DEBG up_ds_listen checked 1 jobs, back to waiting
55496 Sep 22 23:17:10.200 ERRO [0] job id 1282 saw error GenericError("test error")
55497 Sep 22 23:17:10.201 DEBG [rc] retire 1282 clears [JobId(1281), JobId(1282)], : downstairs
55498 Sep 22 23:17:10.201 ERRO [0] job id 1283 saw error GenericError("test error")
55499 Sep 22 23:17:10.203 DEBG Flush :1284 extent_limit None deps:[JobId(1283), JobId(1282)] res:true f:120 g:1
55500 Sep 22 23:17:10.203 INFO [lossy] sleeping 1 second
55501 Sep 22 23:17:10.252 INFO [lossy] skipping 1284
55502 Sep 22 23:17:10.252 INFO [lossy] skipping 1284
55503 Sep 22 23:17:10.252 INFO [lossy] skipping 1284
55504 Sep 22 23:17:10.252 DEBG Flush :1284 extent_limit None deps:[JobId(1283), JobId(1282)] res:true f:120 g:1
55505 Sep 22 23:17:10.252 INFO [lossy] sleeping 1 second
55506 Sep 22 23:17:10.585 DEBG [2] Read AckReady 1283, : downstairs
55507 Sep 22 23:17:10.586 DEBG up_ds_listen was notified
55508 Sep 22 23:17:10.586 DEBG up_ds_listen process 1283
55509 Sep 22 23:17:10.586 DEBG [A] ack job 1283:284, : downstairs
55510 Sep 22 23:17:10.639 DEBG up_ds_listen checked 1 jobs, back to waiting
55511 Sep 22 23:17:11.023 DEBG up_ds_listen was notified
55512 Sep 22 23:17:11.023 DEBG up_ds_listen process 1284
55513 Sep 22 23:17:11.023 DEBG [A] ack job 1284:285, : downstairs
55514 Sep 22 23:17:11.023 DEBG up_ds_listen checked 1 jobs, back to waiting
55515 Sep 22 23:17:11.023 INFO [lossy] skipping 1283
55516 Sep 22 23:17:11.029 DEBG Read :1283 deps:[JobId(1282)] res:true
55517 Sep 22 23:17:11.051 DEBG IO Read 1285 has deps [JobId(1284)]
55518 Sep 22 23:17:11.065 INFO [lossy] sleeping 1 second
55519 Sep 22 23:17:11.447 DEBG IO Flush 1286 has deps [JobId(1285), JobId(1284)]
55520 Sep 22 23:17:11.454 DEBG Read :1285 deps:[JobId(1284)] res:true
55521 Sep 22 23:17:11.481 DEBG Read :1285 deps:[JobId(1284)] res:true
55522 Sep 22 23:17:11.505 WARN returning error on flush!
55523 Sep 22 23:17:11.505 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:false f:121 g:1
55524 Sep 22 23:17:11.505 INFO [lossy] skipping 1286
55525 Sep 22 23:17:11.505 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:true f:121 g:1
55526 Sep 22 23:17:11.505 INFO [lossy] sleeping 1 second
55527 Sep 22 23:17:11.506 WARN returning error on flush!
55528 Sep 22 23:17:11.506 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:false f:121 g:1
55529 Sep 22 23:17:11.506 WARN returning error on flush!
55530 Sep 22 23:17:11.506 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:false f:121 g:1
55531 Sep 22 23:17:11.506 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:true f:121 g:1
55532 Sep 22 23:17:11.506 INFO [lossy] sleeping 1 second
55533 Sep 22 23:17:11.934 DEBG [0] Read AckReady 1285, : downstairs
55534 Sep 22 23:17:11.934 ERRO [0] job id 1286 saw error GenericError("test error")
55535 Sep 22 23:17:11.934 ERRO [0] job id 1286 saw error GenericError("test error")
55536 Sep 22 23:17:12.266 DEBG [2] Read already AckReady 1285, : downstairs
55537 Sep 22 23:17:12.268 ERRO [2] job id 1286 saw error GenericError("test error")
55538 Sep 22 23:17:12.268 DEBG up_ds_listen was notified
55539 Sep 22 23:17:12.268 DEBG up_ds_listen process 1285
55540 Sep 22 23:17:12.268 DEBG [A] ack job 1285:286, : downstairs
55541 Sep 22 23:17:12.322 DEBG up_ds_listen process 1286
55542 Sep 22 23:17:12.322 DEBG [A] ack job 1286:287, : downstairs
55543 Sep 22 23:17:12.322 DEBG up_ds_listen checked 2 jobs, back to waiting
55544 Sep 22 23:17:12.322 DEBG up_ds_listen was notified
55545 Sep 22 23:17:12.322 DEBG up_ds_listen checked 0 jobs, back to waiting
55546 Sep 22 23:17:12.323 WARN returning error on flush!
55547 Sep 22 23:17:12.323 DEBG Flush :1284 extent_limit None deps:[JobId(1283), JobId(1282)] res:false f:120 g:1
55548 Sep 22 23:17:12.323 INFO [lossy] skipping 1286
55549 Sep 22 23:17:12.323 INFO [lossy] skipping 1284
55550 Sep 22 23:17:12.323 INFO [lossy] skipping 1284
55551 Sep 22 23:17:12.323 INFO [lossy] skipping 1284
55552 Sep 22 23:17:12.323 DEBG Flush :1284 extent_limit None deps:[JobId(1283), JobId(1282)] res:true f:120 g:1
55553 Sep 22 23:17:12.329 DEBG Read :1285 deps:[JobId(1284)] res:true
55554 Sep 22 23:17:12.351 DEBG IO Read 1287 has deps [JobId(1286)]
55555 Sep 22 23:17:12.351 ERRO [1] job id 1284 saw error GenericError("test error")
55556 Sep 22 23:17:12.351 DEBG [rc] retire 1284 clears [JobId(1283), JobId(1284)], : downstairs
55557 Sep 22 23:17:12.364 DEBG Flush :1286 extent_limit None deps:[JobId(1285), JobId(1284)] res:true f:121 g:1
55558 Sep 22 23:17:12.365 INFO [lossy] skipping 1287
55559 Sep 22 23:17:12.365 INFO [lossy] skipping 1287
55560 Sep 22 23:17:12.370 DEBG Read :1287 deps:[JobId(1286)] res:true
55561 Sep 22 23:17:12.773 DEBG [rc] retire 1286 clears [JobId(1285), JobId(1286)], : downstairs
55562 Sep 22 23:17:12.773 WARN returning error on read!
55563 Sep 22 23:17:12.774 DEBG Read :1287 deps:[JobId(1286)] res:false
55564 Sep 22 23:17:12.779 DEBG Read :1287 deps:[JobId(1286)] res:true
55565 Sep 22 23:17:12.801 WARN returning error on read!
55566 Sep 22 23:17:12.801 DEBG Read :1287 deps:[JobId(1286)] res:false
55567 Sep 22 23:17:12.806 DEBG Read :1287 deps:[JobId(1286)] res:true
55568 Sep 22 23:17:12.828 DEBG IO Flush 1288 has deps [JobId(1287)]
55569 Sep 22 23:17:12.828 ERRO [0] job id 1287 saw error GenericError("test error")
55570 Sep 22 23:17:12.828 ERRO [2] job id 1287 saw error GenericError("test error")
55571 Sep 22 23:17:12.831 DEBG Flush :1288 extent_limit None deps:[JobId(1287)] res:true f:122 g:1
55572 Sep 22 23:17:12.831 INFO [lossy] sleeping 1 second
55573 Sep 22 23:17:13.211 DEBG [1] Read AckReady 1287, : downstairs
55574 Sep 22 23:17:13.212 DEBG up_ds_listen was notified
55575 Sep 22 23:17:13.212 DEBG up_ds_listen process 1287
55576 Sep 22 23:17:13.212 DEBG [A] ack job 1287:288, : downstairs
55577 Sep 22 23:17:13.265 DEBG up_ds_listen checked 1 jobs, back to waiting
55578 Sep 22 23:17:13.267 WARN returning error on flush!
55579 Sep 22 23:17:13.267 DEBG Flush :1288 extent_limit None deps:[JobId(1287)] res:false f:122 g:1
55580 Sep 22 23:17:13.267 DEBG Flush :1288 extent_limit None deps:[JobId(1287)] res:true f:122 g:1
55581 Sep 22 23:17:13.267 INFO [lossy] sleeping 1 second
55582 Sep 22 23:17:13.269 DEBG Flush :1288 extent_limit None deps:[JobId(1287)] res:true f:122 g:1
55583 Sep 22 23:17:13.269 INFO [lossy] sleeping 1 second
55584 Sep 22 23:17:13.364 DEBG IO Read 1289 has deps [JobId(1288)]
55585 Sep 22 23:17:14.032 ERRO [2] job id 1288 saw error GenericError("test error")
55586 Sep 22 23:17:14.037 DEBG up_ds_listen was notified
55587 Sep 22 23:17:14.037 DEBG up_ds_listen process 1288
55588 Sep 22 23:17:14.037 DEBG [A] ack job 1288:289, : downstairs
55589 Sep 22 23:17:14.037 DEBG [rc] retire 1288 clears [JobId(1287), JobId(1288)], : downstairs
55590 Sep 22 23:17:14.037 DEBG up_ds_listen checked 1 jobs, back to waiting
55591 Sep 22 23:17:14.044 DEBG IO Flush 1290 has deps [JobId(1289)]
55592 Sep 22 23:17:14.050 DEBG Read :1289 deps:[JobId(1288)] res:true
55593 Sep 22 23:17:14.073 INFO [lossy] skipping 1290
55594 Sep 22 23:17:14.073 DEBG Flush :1290 extent_limit None deps:[JobId(1289)] res:true f:123 g:1
55595 Sep 22 23:17:14.452 DEBG [1] Read AckReady 1289, : downstairs
55596 Sep 22 23:17:14.453 DEBG up_ds_listen was notified
55597 Sep 22 23:17:14.453 DEBG up_ds_listen process 1289
55598 Sep 22 23:17:14.453 DEBG [A] ack job 1289:290, : downstairs
55599 Sep 22 23:17:14.506 DEBG up_ds_listen checked 1 jobs, back to waiting
55600 Sep 22 23:17:14.507 INFO [lossy] skipping 1289
55601 Sep 22 23:17:14.513 DEBG Read :1289 deps:[JobId(1288)] res:true
55602 Sep 22 23:17:14.540 DEBG Read :1289 deps:[JobId(1288)] res:true
55603 Sep 22 23:17:14.562 DEBG IO Read 1291 has deps [JobId(1290)]
55604 Sep 22 23:17:14.562 DEBG IO Flush 1292 has deps [JobId(1291), JobId(1290)]
55605 Sep 22 23:17:14.574 INFO [lossy] skipping 1291
55606 Sep 22 23:17:14.581 DEBG Read :1291 deps:[JobId(1290)] res:true
55607 Sep 22 23:17:14.605 INFO [lossy] sleeping 1 second
55608 Sep 22 23:17:14.605 INFO [lossy] skipping 1290
55609 Sep 22 23:17:14.605 DEBG Flush :1290 extent_limit None deps:[JobId(1289)] res:true f:123 g:1
55610 Sep 22 23:17:14.606 INFO [lossy] sleeping 1 second
55611 Sep 22 23:17:15.368 DEBG up_ds_listen was notified
55612 Sep 22 23:17:15.368 DEBG up_ds_listen process 1290
55613 Sep 22 23:17:15.368 DEBG [A] ack job 1290:291, : downstairs
55614 Sep 22 23:17:15.368 DEBG up_ds_listen checked 1 jobs, back to waiting
55615 Sep 22 23:17:15.370 INFO [lossy] sleeping 1 second
55616 Sep 22 23:17:15.749 DEBG [1] Read AckReady 1291, : downstairs
55617 Sep 22 23:17:15.750 DEBG up_ds_listen was notified
55618 Sep 22 23:17:15.750 DEBG up_ds_listen process 1291
55619 Sep 22 23:17:15.750 DEBG [A] ack job 1291:292, : downstairs
55620 Sep 22 23:17:15.803 DEBG up_ds_listen checked 1 jobs, back to waiting
55621 Sep 22 23:17:15.804 WARN returning error on flush!
55622 Sep 22 23:17:15.804 DEBG Flush :1290 extent_limit None deps:[JobId(1289)] res:false f:123 g:1
55623 Sep 22 23:17:15.804 INFO [lossy] skipping 1291
55624 Sep 22 23:17:15.804 INFO [lossy] skipping 1292
55625 Sep 22 23:17:15.804 DEBG Flush :1290 extent_limit None deps:[JobId(1289)] res:true f:123 g:1
55626 Sep 22 23:17:15.810 DEBG Read :1291 deps:[JobId(1290)] res:true
55627 Sep 22 23:17:15.837 DEBG Read :1291 deps:[JobId(1290)] res:true
55628 Sep 22 23:17:15.859 DEBG IO Read 1293 has deps [JobId(1292)]
55629 Sep 22 23:17:15.859 ERRO [2] job id 1290 saw error GenericError("test error")
55630 Sep 22 23:17:15.859 DEBG [rc] retire 1290 clears [JobId(1289), JobId(1290)], : downstairs
55631 Sep 22 23:17:15.872 DEBG IO Flush 1294 has deps [JobId(1293), JobId(1292)]
55632 Sep 22 23:17:15.874 DEBG Flush :1292 extent_limit None deps:[JobId(1291), JobId(1290)] res:true f:124 g:1
55633 Sep 22 23:17:15.874 WARN returning error on read!
55634 Sep 22 23:17:15.874 DEBG Read :1293 deps:[JobId(1292)] res:false
55635 Sep 22 23:17:15.880 DEBG Read :1293 deps:[JobId(1292)] res:true
55636 Sep 22 23:17:15.902 DEBG Flush :1292 extent_limit None deps:[JobId(1291), JobId(1290)] res:true f:124 g:1
55637 Sep 22 23:17:15.908 DEBG Read :1293 deps:[JobId(1292)] res:true
55638 Sep 22 23:17:16.693 ERRO [2] job id 1293 saw error GenericError("test error")
55639 Sep 22 23:17:16.693 DEBG up_ds_listen was notified
55640 Sep 22 23:17:16.693 DEBG up_ds_listen process 1292
55641 Sep 22 23:17:16.693 DEBG [A] ack job 1292:293, : downstairs
55642 Sep 22 23:17:16.693 DEBG up_ds_listen checked 1 jobs, back to waiting
55643 Sep 22 23:17:16.693 DEBG Flush :1292 extent_limit None deps:[JobId(1291), JobId(1290)] res:true f:124 g:1
55644 Sep 22 23:17:16.699 DEBG Read :1293 deps:[JobId(1292)] res:true
55645 Sep 22 23:17:16.721 DEBG [rc] retire 1292 clears [JobId(1291), JobId(1292)], : downstairs
55646 Sep 22 23:17:16.724 WARN returning error on flush!
55647 Sep 22 23:17:16.724 DEBG Flush :1294 extent_limit None deps:[JobId(1293), JobId(1292)] res:false f:125 g:1
55648 Sep 22 23:17:16.724 DEBG Flush :1294 extent_limit None deps:[JobId(1293), JobId(1292)] res:true f:125 g:1
55649 Sep 22 23:17:16.724 INFO [lossy] sleeping 1 second
55650 Sep 22 23:17:16.725 DEBG Flush :1294 extent_limit None deps:[JobId(1293), JobId(1292)] res:true f:125 g:1
55651 Sep 22 23:17:16.725 INFO [lossy] sleeping 1 second
55652 Sep 22 23:17:17.154 DEBG [0] Read AckReady 1293, : downstairs
55653 Sep 22 23:17:17.487 DEBG [2] Read already AckReady 1293, : downstairs
55654 Sep 22 23:17:17.488 ERRO [2] job id 1294 saw error GenericError("test error")
55655 Sep 22 23:17:17.488 DEBG up_ds_listen was notified
55656 Sep 22 23:17:17.488 DEBG up_ds_listen process 1293
55657 Sep 22 23:17:17.488 DEBG [A] ack job 1293:294, : downstairs
55658 Sep 22 23:17:17.542 DEBG up_ds_listen process 1294
55659 Sep 22 23:17:17.542 DEBG [A] ack job 1294:295, : downstairs
55660 Sep 22 23:17:17.542 DEBG up_ds_listen checked 2 jobs, back to waiting
55661 Sep 22 23:17:17.542 DEBG up_ds_listen was notified
55662 Sep 22 23:17:17.542 DEBG up_ds_listen checked 0 jobs, back to waiting
55663 Sep 22 23:17:17.544 DEBG Flush :1294 extent_limit None deps:[JobId(1293), JobId(1292)] res:true f:125 g:1
55664 Sep 22 23:17:17.544 INFO [lossy] sleeping 1 second
55665 Sep 22 23:17:17.592 DEBG IO Read 1295 has deps [JobId(1294)]
55666 Sep 22 23:17:17.926 DEBG [rc] retire 1294 clears [JobId(1293), JobId(1294)], : downstairs
55667 Sep 22 23:17:17.938 INFO [lossy] sleeping 1 second
55668 Sep 22 23:17:17.938 INFO [lossy] sleeping 1 second
55669 Sep 22 23:17:18.045 DEBG IO Flush 1296 has deps [JobId(1295)]
55670 Sep 22 23:17:18.552 DEBG Read :1295 deps:[JobId(1294)] res:true
55671 Sep 22 23:17:18.575 DEBG Flush :1296 extent_limit None deps:[JobId(1295)] res:true f:126 g:1
55672 Sep 22 23:17:18.575 INFO [lossy] sleeping 1 second
55673 Sep 22 23:17:18.951 DEBG [1] Read AckReady 1295, : downstairs
55674 Sep 22 23:17:18.952 DEBG up_ds_listen was notified
55675 Sep 22 23:17:18.952 DEBG up_ds_listen process 1295
55676 Sep 22 23:17:18.952 DEBG [A] ack job 1295:296, : downstairs
55677 Sep 22 23:17:19.004 DEBG up_ds_listen checked 1 jobs, back to waiting
55678 Sep 22 23:17:19.011 DEBG Read :1295 deps:[JobId(1294)] res:true
55679 Sep 22 23:17:19.039 DEBG Read :1295 deps:[JobId(1294)] res:true
55680 Sep 22 23:17:19.061 DEBG IO Read 1297 has deps [JobId(1296)]
55681 Sep 22 23:17:19.074 DEBG IO Flush 1298 has deps [JobId(1297), JobId(1296)]
55682 Sep 22 23:17:19.075 DEBG Flush :1296 extent_limit None deps:[JobId(1295)] res:true f:126 g:1
55683 Sep 22 23:17:19.081 DEBG Read :1297 deps:[JobId(1296)] res:true
55684 Sep 22 23:17:19.103 DEBG Flush :1296 extent_limit None deps:[JobId(1295)] res:true f:126 g:1
55685 Sep 22 23:17:19.109 DEBG Read :1297 deps:[JobId(1296)] res:true
55686 Sep 22 23:17:19.886 DEBG up_ds_listen was notified
55687 Sep 22 23:17:19.886 DEBG up_ds_listen process 1296
55688 Sep 22 23:17:19.887 DEBG [A] ack job 1296:297, : downstairs
55689 Sep 22 23:17:19.887 DEBG [rc] retire 1296 clears [JobId(1295), JobId(1296)], : downstairs
55690 Sep 22 23:17:19.887 DEBG up_ds_listen checked 1 jobs, back to waiting
55691 Sep 22 23:17:19.887 INFO [lossy] skipping 1297
55692 Sep 22 23:17:19.893 DEBG Read :1297 deps:[JobId(1296)] res:true
55693 Sep 22 23:17:19.917 DEBG Flush :1298 extent_limit None deps:[JobId(1297), JobId(1296)] res:true f:127 g:1
55694 Sep 22 23:17:19.917 INFO [lossy] sleeping 1 second
55695 Sep 22 23:17:19.919 INFO [lossy] skipping 1298
55696 Sep 22 23:17:19.919 INFO [lossy] skipping 1298
55697 Sep 22 23:17:19.919 DEBG Flush :1298 extent_limit None deps:[JobId(1297), JobId(1296)] res:true f:127 g:1
55698 Sep 22 23:17:20.345 DEBG [0] Read AckReady 1297, : downstairs
55699 Sep 22 23:17:20.674 DEBG [2] Read already AckReady 1297, : downstairs
55700 Sep 22 23:17:20.676 DEBG up_ds_listen was notified
55701 Sep 22 23:17:20.676 DEBG up_ds_listen process 1297
55702 Sep 22 23:17:20.676 DEBG [A] ack job 1297:298, : downstairs
55703 Sep 22 23:17:20.730 DEBG up_ds_listen process 1298
55704 Sep 22 23:17:20.730 DEBG [A] ack job 1298:299, : downstairs
55705 Sep 22 23:17:20.730 DEBG up_ds_listen checked 2 jobs, back to waiting
55706 Sep 22 23:17:20.730 DEBG up_ds_listen was notified
55707 Sep 22 23:17:20.730 DEBG up_ds_listen checked 0 jobs, back to waiting
55708 Sep 22 23:17:20.732 INFO [lossy] skipping 1298
55709 Sep 22 23:17:20.732 WARN returning error on flush!
55710 Sep 22 23:17:20.732 DEBG Flush :1298 extent_limit None deps:[JobId(1297), JobId(1296)] res:false f:127 g:1
55711 Sep 22 23:17:20.732 DEBG Flush :1298 extent_limit None deps:[JobId(1297), JobId(1296)] res:true f:127 g:1
55712 Sep 22 23:17:20.780 DEBG IO Read 1299 has deps [JobId(1298)]
55713 Sep 22 23:17:21.111 ERRO [1] job id 1298 saw error GenericError("test error")
55714 Sep 22 23:17:21.111 DEBG [rc] retire 1298 clears [JobId(1297), JobId(1298)], : downstairs
55715 Sep 22 23:17:21.123 INFO [lossy] sleeping 1 second
55716 Sep 22 23:17:21.129 DEBG Read :1299 deps:[JobId(1298)] res:true
55717 Sep 22 23:17:21.156 DEBG Read :1299 deps:[JobId(1298)] res:true
55718 Sep 22 23:17:21.604 DEBG [0] Read AckReady 1299, : downstairs
55719 Sep 22 23:17:21.934 DEBG [1] Read already AckReady 1299, : downstairs
55720 Sep 22 23:17:21.936 DEBG up_ds_listen was notified
55721 Sep 22 23:17:21.936 DEBG up_ds_listen process 1299
55722 Sep 22 23:17:21.936 DEBG [A] ack job 1299:300, : downstairs
55723 Sep 22 23:17:21.989 DEBG up_ds_listen checked 1 jobs, back to waiting
55724 Sep 22 23:17:21.990 DEBG IO Flush 1300 has deps [JobId(1299)]
55725 Sep 22 23:17:21.990 INFO [lossy] sleeping 1 second
55726 Sep 22 23:17:21.991 DEBG IO Read 1301 has deps [JobId(1300)]
55727 Sep 22 23:17:21.996 INFO [lossy] sleeping 1 second
55728 Sep 22 23:17:22.130 DEBG Read :1299 deps:[JobId(1298)] res:true
55729 Sep 22 23:17:22.153 DEBG Flush :1300 extent_limit None deps:[JobId(1299)] res:true f:128 g:1
55730 Sep 22 23:17:22.158 DEBG Read :1301 deps:[JobId(1300)] res:true
55731 Sep 22 23:17:22.559 DEBG IO Flush 1302 has deps [JobId(1301), JobId(1300)]
55732 Sep 22 23:17:22.561 DEBG Flush :1302 extent_limit None deps:[JobId(1301), JobId(1300)] res:true f:129 g:1
55733 Sep 22 23:17:22.562 INFO [lossy] sleeping 1 second
55734 Sep 22 23:17:22.937 DEBG [2] Read AckReady 1301, : downstairs
55735 Sep 22 23:17:22.938 DEBG up_ds_listen was notified
55736 Sep 22 23:17:22.938 DEBG up_ds_listen process 1301
55737 Sep 22 23:17:22.938 DEBG [A] ack job 1301:302, : downstairs
55738 Sep 22 23:17:22.991 DEBG up_ds_listen checked 1 jobs, back to waiting
55739 Sep 22 23:17:22.992 INFO [lossy] skipping 1300
55740 Sep 22 23:17:22.992 DEBG Flush :1300 extent_limit None deps:[JobId(1299)] res:true f:128 g:1
55741 Sep 22 23:17:22.992 WARN returning error on read!
55742 Sep 22 23:17:22.992 DEBG Read :1301 deps:[JobId(1300)] res:false
55743 Sep 22 23:17:22.992 INFO [lossy] skipping 1302
55744 Sep 22 23:17:22.992 INFO [lossy] skipping 1301
55745 Sep 22 23:17:22.992 WARN 1302 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
55746 Sep 22 23:17:22.992 INFO [lossy] skipping 1301
55747 Sep 22 23:17:22.992 WARN returning error on read!
55748 Sep 22 23:17:22.992 DEBG Read :1301 deps:[JobId(1300)] res:false
55749 Sep 22 23:17:22.992 INFO [lossy] skipping 1301
55750 Sep 22 23:17:22.992 INFO [lossy] skipping 1301
55751 Sep 22 23:17:22.993 WARN returning error on read!
55752 Sep 22 23:17:22.993 DEBG Read :1301 deps:[JobId(1300)] res:false
55753 Sep 22 23:17:22.999 DEBG Read :1301 deps:[JobId(1300)] res:true
55754 Sep 22 23:17:23.020 DEBG Flush :1300 extent_limit None deps:[JobId(1299)] res:true f:128 g:1
55755 Sep 22 23:17:23.020 WARN returning error on read!
55756 Sep 22 23:17:23.020 DEBG Read :1301 deps:[JobId(1300)] res:false
55757 Sep 22 23:17:23.027 DEBG Read :1301 deps:[JobId(1300)] res:true
55758 Sep 22 23:17:23.049 DEBG IO Read 1303 has deps [JobId(1302)]
55759 Sep 22 23:17:23.049 ERRO [1] job id 1301 saw error GenericError("test error")
55760 Sep 22 23:17:23.049 ERRO [1] job id 1301 saw error GenericError("test error")
55761 Sep 22 23:17:23.049 ERRO [1] job id 1301 saw error GenericError("test error")
55762 Sep 22 23:17:23.054 DEBG up_ds_listen was notified
55763 Sep 22 23:17:23.054 DEBG up_ds_listen process 1300
55764 Sep 22 23:17:23.054 DEBG [A] ack job 1300:301, : downstairs
55765 Sep 22 23:17:23.054 DEBG up_ds_listen checked 1 jobs, back to waiting
55766 Sep 22 23:17:23.061 DEBG [rc] retire 1300 clears [JobId(1299), JobId(1300)], : downstairs
55767 Sep 22 23:17:23.061 ERRO [0] job id 1301 saw error GenericError("test error")
55768 Sep 22 23:17:23.061 DEBG IO Flush 1304 has deps [JobId(1303), JobId(1302)]
55769 Sep 22 23:17:23.064 DEBG Flush :1302 extent_limit None deps:[JobId(1301), JobId(1300)] res:true f:129 g:1
55770 Sep 22 23:17:23.064 WARN returning error on read!
55771 Sep 22 23:17:23.064 DEBG Read :1303 deps:[JobId(1302)] res:false
55772 Sep 22 23:17:23.064 WARN returning error on read!
55773 Sep 22 23:17:23.064 DEBG Read :1303 deps:[JobId(1302)] res:false
55774 Sep 22 23:17:23.064 WARN returning error on read!
55775 Sep 22 23:17:23.064 DEBG Read :1303 deps:[JobId(1302)] res:false
55776 Sep 22 23:17:23.070 DEBG Read :1303 deps:[JobId(1302)] res:true
55777 Sep 22 23:17:23.140 DEBG Flush :1302 extent_limit None deps:[JobId(1301), JobId(1300)] res:true f:129 g:1
55778 Sep 22 23:17:23.140 INFO [lossy] skipping 1303
55779 Sep 22 23:17:23.140 WARN returning error on read!
55780 Sep 22 23:17:23.140 DEBG Read :1303 deps:[JobId(1302)] res:false
55781 Sep 22 23:17:23.140 INFO [lossy] skipping 1303
55782 Sep 22 23:17:23.146 DEBG Read :1303 deps:[JobId(1302)] res:true
55783 Sep 22 23:17:23.498 ERRO [1] job id 1303 saw error GenericError("test error")
55784 Sep 22 23:17:23.499 ERRO [1] job id 1303 saw error GenericError("test error")
55785 Sep 22 23:17:23.499 ERRO [1] job id 1303 saw error GenericError("test error")
55786 Sep 22 23:17:23.499 DEBG up_ds_listen was notified
55787 Sep 22 23:17:23.499 DEBG up_ds_listen process 1302
55788 Sep 22 23:17:23.499 DEBG [A] ack job 1302:303, : downstairs
55789 Sep 22 23:17:23.499 DEBG up_ds_listen checked 1 jobs, back to waiting
55790 Sep 22 23:17:23.877 DEBG [rc] retire 1302 clears [JobId(1301), JobId(1302)], : downstairs
55791 Sep 22 23:17:23.877 ERRO [0] job id 1303 saw error GenericError("test error")
55792 Sep 22 23:17:23.883 DEBG Read :1303 deps:[JobId(1302)] res:true
55793 Sep 22 23:17:23.907 INFO [lossy] skipping 1304
55794 Sep 22 23:17:23.907 DEBG Flush :1304 extent_limit None deps:[JobId(1303), JobId(1302)] res:true f:130 g:1
55795 Sep 22 23:17:23.955 INFO [lossy] skipping 1304
55796 Sep 22 23:17:23.955 WARN returning error on flush!
55797 Sep 22 23:17:23.955 DEBG Flush :1304 extent_limit None deps:[JobId(1303), JobId(1302)] res:false f:130 g:1
55798 Sep 22 23:17:23.955 DEBG Flush :1304 extent_limit None deps:[JobId(1303), JobId(1302)] res:true f:130 g:1
55799 Sep 22 23:17:23.956 INFO [lossy] sleeping 1 second
55800 Sep 22 23:17:24.284 DEBG [1] Read AckReady 1303, : downstairs
55801 Sep 22 23:17:24.285 DEBG up_ds_listen was notified
55802 Sep 22 23:17:24.285 DEBG up_ds_listen process 1303
55803 Sep 22 23:17:24.286 DEBG [A] ack job 1303:304, : downstairs
55804 Sep 22 23:17:24.339 DEBG up_ds_listen checked 1 jobs, back to waiting
55805 Sep 22 23:17:24.718 ERRO [0] job id 1304 saw error GenericError("test error")
55806 Sep 22 23:17:24.719 DEBG up_ds_listen was notified
55807 Sep 22 23:17:24.719 DEBG up_ds_listen process 1304
55808 Sep 22 23:17:24.719 DEBG [A] ack job 1304:305, : downstairs
55809 Sep 22 23:17:24.719 DEBG up_ds_listen checked 1 jobs, back to waiting
55810 Sep 22 23:17:24.720 DEBG Flush :1304 extent_limit None deps:[JobId(1303), JobId(1302)] res:true f:130 g:1
55811 Sep 22 23:17:24.720 INFO [lossy] sleeping 1 second
55812 Sep 22 23:17:24.720 DEBG IO Read 1305 has deps [JobId(1304)]
55813 Sep 22 23:17:25.111 DEBG [rc] retire 1304 clears [JobId(1303), JobId(1304)], : downstairs
55814 Sep 22 23:17:25.117 DEBG Read :1305 deps:[JobId(1304)] res:true
55815 Sep 22 23:17:25.139 INFO [lossy] skipping 1305
55816 Sep 22 23:17:25.139 INFO [lossy] skipping 1305
55817 Sep 22 23:17:25.144 DEBG Read :1305 deps:[JobId(1304)] res:true
55818 Sep 22 23:17:25.545 DEBG [1] Read AckReady 1305, : downstairs
55819 Sep 22 23:17:25.545 DEBG up_ds_listen was notified
55820 Sep 22 23:17:25.545 DEBG up_ds_listen process 1305
55821 Sep 22 23:17:25.545 DEBG [A] ack job 1305:306, : downstairs
55822 Sep 22 23:17:25.599 DEBG up_ds_listen checked 1 jobs, back to waiting
55823 Sep 22 23:17:25.647 DEBG IO Flush 1306 has deps [JobId(1305)]
55824 Sep 22 23:17:25.984 DEBG Read :1305 deps:[JobId(1304)] res:true
55825 Sep 22 23:17:26.006 DEBG IO Read 1307 has deps [JobId(1306)]
55826 Sep 22 23:17:26.011 DEBG Flush :1306 extent_limit None deps:[JobId(1305)] res:true f:131 g:1
55827 Sep 22 23:17:26.011 DEBG Flush :1306 extent_limit None deps:[JobId(1305)] res:true f:131 g:1
55828 Sep 22 23:17:26.018 DEBG up_ds_listen was notified
55829 Sep 22 23:17:26.018 DEBG up_ds_listen process 1306
55830 Sep 22 23:17:26.018 DEBG [A] ack job 1306:307, : downstairs
55831 Sep 22 23:17:26.018 DEBG up_ds_listen checked 1 jobs, back to waiting
55832 Sep 22 23:17:26.018 INFO [lossy] sleeping 1 second
55833 Sep 22 23:17:26.024 DEBG Read :1307 deps:[JobId(1306)] res:true
55834 Sep 22 23:17:26.048 WARN returning error on flush!
55835 Sep 22 23:17:26.048 DEBG Flush :1306 extent_limit None deps:[JobId(1305)] res:false f:131 g:1
55836 Sep 22 23:17:26.048 INFO [lossy] skipping 1307
55837 Sep 22 23:17:26.048 DEBG Flush :1306 extent_limit None deps:[JobId(1305)] res:true f:131 g:1
55838 Sep 22 23:17:26.054 DEBG Read :1307 deps:[JobId(1306)] res:true
55839 Sep 22 23:17:26.454 ERRO [2] job id 1306 saw error GenericError("test error")
55840 Sep 22 23:17:26.454 DEBG [rc] retire 1306 clears [JobId(1305), JobId(1306)], : downstairs
55841 Sep 22 23:17:26.501 DEBG IO Flush 1308 has deps [JobId(1307)]
55842 Sep 22 23:17:26.829 DEBG [0] Read AckReady 1307, : downstairs
55843 Sep 22 23:17:26.830 DEBG up_ds_listen was notified
55844 Sep 22 23:17:26.830 DEBG up_ds_listen process 1307
55845 Sep 22 23:17:26.831 DEBG [A] ack job 1307:308, : downstairs
55846 Sep 22 23:17:26.883 DEBG up_ds_listen checked 1 jobs, back to waiting
55847 Sep 22 23:17:26.884 DEBG Flush :1308 extent_limit None deps:[JobId(1307)] res:true f:132 g:1
55848 Sep 22 23:17:26.885 DEBG IO Read 1309 has deps [JobId(1308)]
55849 Sep 22 23:17:26.903 DEBG Read :1309 deps:[JobId(1308)] res:true
55850 Sep 22 23:17:26.926 INFO [lossy] skipping 1308
55851 Sep 22 23:17:26.926 INFO [lossy] skipping 1309
55852 Sep 22 23:17:26.926 DEBG Flush :1308 extent_limit None deps:[JobId(1307)] res:true f:132 g:1
55853 Sep 22 23:17:26.932 DEBG Read :1309 deps:[JobId(1308)] res:true
55854 Sep 22 23:17:27.333 DEBG up_ds_listen was notified
55855 Sep 22 23:17:27.333 DEBG up_ds_listen process 1308
55856 Sep 22 23:17:27.333 DEBG [A] ack job 1308:309, : downstairs
55857 Sep 22 23:17:27.333 DEBG up_ds_listen checked 1 jobs, back to waiting
55858 Sep 22 23:17:27.333 DEBG IO Flush 1310 has deps [JobId(1309), JobId(1308)]
55859 Sep 22 23:17:27.339 DEBG Read :1307 deps:[JobId(1306)] res:true
55860 Sep 22 23:17:27.362 DEBG Flush :1310 extent_limit None deps:[JobId(1309), JobId(1308)] res:true f:133 g:1
55861 Sep 22 23:17:27.738 DEBG [0] Read AckReady 1309, : downstairs
55862 Sep 22 23:17:27.739 DEBG up_ds_listen was notified
55863 Sep 22 23:17:27.739 DEBG up_ds_listen process 1309
55864 Sep 22 23:17:27.739 DEBG [A] ack job 1309:310, : downstairs
55865 Sep 22 23:17:27.791 DEBG up_ds_listen checked 1 jobs, back to waiting
55866 Sep 22 23:17:27.793 INFO [lossy] sleeping 1 second
55867 Sep 22 23:17:27.793 DEBG IO Read 1311 has deps [JobId(1310)]
55868 Sep 22 23:17:27.807 INFO [lossy] skipping 1308
55869 Sep 22 23:17:27.807 WARN returning error on flush!
55870 Sep 22 23:17:27.807 DEBG Flush :1308 extent_limit None deps:[JobId(1307)] res:false f:132 g:1
55871 Sep 22 23:17:27.807 INFO [lossy] skipping 1308
55872 Sep 22 23:17:27.807 DEBG Flush :1308 extent_limit None deps:[JobId(1307)] res:true f:132 g:1
55873 Sep 22 23:17:27.807 INFO [lossy] skipping 1309
55874 Sep 22 23:17:27.807 INFO [lossy] skipping 1309
55875 Sep 22 23:17:27.807 INFO [lossy] skipping 1309
55876 Sep 22 23:17:27.814 DEBG Read :1309 deps:[JobId(1308)] res:true
55877 Sep 22 23:17:27.836 INFO [lossy] sleeping 1 second
55878 Sep 22 23:17:27.931 DEBG IO Flush 1312 has deps [JobId(1311), JobId(1310)]
55879 Sep 22 23:17:28.594 ERRO [1] job id 1308 saw error GenericError("test error")
55880 Sep 22 23:17:28.594 DEBG [rc] retire 1308 clears [JobId(1307), JobId(1308)], : downstairs
55881 Sep 22 23:17:28.597 INFO [lossy] skipping 1310
55882 Sep 22 23:17:28.597 INFO [lossy] skipping 1312
55883 Sep 22 23:17:28.597 DEBG Flush :1310 extent_limit None deps:[JobId(1309), JobId(1308)] res:true f:133 g:1
55884 Sep 22 23:17:28.597 INFO [lossy] skipping 1312
55885 Sep 22 23:17:28.597 INFO [lossy] sleeping 1 second
55886 Sep 22 23:17:28.975 DEBG up_ds_listen was notified
55887 Sep 22 23:17:28.975 DEBG up_ds_listen process 1310
55888 Sep 22 23:17:28.975 DEBG [A] ack job 1310:311, : downstairs
55889 Sep 22 23:17:28.975 DEBG up_ds_listen checked 1 jobs, back to waiting
55890 Sep 22 23:17:28.981 DEBG Read :1311 deps:[JobId(1310)] res:true
55891 Sep 22 23:17:29.002 DEBG Flush :1310 extent_limit None deps:[JobId(1309), JobId(1308)] res:true f:133 g:1
55892 Sep 22 23:17:29.008 DEBG Read :1311 deps:[JobId(1310)] res:true
55893 Sep 22 23:17:29.030 DEBG [rc] retire 1310 clears [JobId(1309), JobId(1310)], : downstairs
55894 Sep 22 23:17:29.032 WARN returning error on flush!
55895 Sep 22 23:17:29.032 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:false f:134 g:1
55896 Sep 22 23:17:29.032 INFO [lossy] skipping 1312
55897 Sep 22 23:17:29.032 INFO [lossy] skipping 1312
55898 Sep 22 23:17:29.032 WARN returning error on flush!
55899 Sep 22 23:17:29.032 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:false f:134 g:1
55900 Sep 22 23:17:29.032 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:true f:134 g:1
55901 Sep 22 23:17:29.032 INFO [lossy] sleeping 1 second
55902 Sep 22 23:17:29.033 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:true f:134 g:1
55903 Sep 22 23:17:29.456 DEBG [0] Read AckReady 1311, : downstairs
55904 Sep 22 23:17:29.785 DEBG [2] Read already AckReady 1311, : downstairs
55905 Sep 22 23:17:29.786 ERRO [2] job id 1312 saw error GenericError("test error")
55906 Sep 22 23:17:29.786 ERRO [2] job id 1312 saw error GenericError("test error")
55907 Sep 22 23:17:29.786 DEBG up_ds_listen was notified
55908 Sep 22 23:17:29.786 DEBG up_ds_listen process 1311
55909 Sep 22 23:17:29.786 DEBG [A] ack job 1311:312, : downstairs
55910 Sep 22 23:17:29.839 DEBG up_ds_listen process 1312
55911 Sep 22 23:17:29.839 DEBG [A] ack job 1312:313, : downstairs
55912 Sep 22 23:17:29.839 DEBG up_ds_listen checked 2 jobs, back to waiting
55913 Sep 22 23:17:29.839 DEBG up_ds_listen was notified
55914 Sep 22 23:17:29.839 DEBG up_ds_listen checked 0 jobs, back to waiting
55915 Sep 22 23:17:29.846 DEBG Read :1311 deps:[JobId(1310)] res:true
55916 Sep 22 23:17:29.868 DEBG IO Read 1313 has deps [JobId(1312)]
55917 Sep 22 23:17:29.880 WARN returning error on read!
55918 Sep 22 23:17:29.880 DEBG Read :1313 deps:[JobId(1312)] res:false
55919 Sep 22 23:17:29.880 WARN returning error on read!
55920 Sep 22 23:17:29.880 DEBG Read :1313 deps:[JobId(1312)] res:false
55921 Sep 22 23:17:29.886 DEBG Read :1313 deps:[JobId(1312)] res:true
55922 Sep 22 23:17:29.907 ERRO [0] job id 1313 saw error GenericError("test error")
55923 Sep 22 23:17:29.908 ERRO [0] job id 1313 saw error GenericError("test error")
55924 Sep 22 23:17:29.909 DEBG Flush :1312 extent_limit None deps:[JobId(1311), JobId(1310)] res:true f:134 g:1
55925 Sep 22 23:17:29.915 DEBG Read :1313 deps:[JobId(1312)] res:true
55926 Sep 22 23:17:30.315 DEBG [rc] retire 1312 clears [JobId(1311), JobId(1312)], : downstairs
55927 Sep 22 23:17:30.317 WARN returning error on read!
55928 Sep 22 23:17:30.317 DEBG Read :1313 deps:[JobId(1312)] res:false
55929 Sep 22 23:17:30.317 INFO [lossy] skipping 1313
55930 Sep 22 23:17:30.323 DEBG Read :1313 deps:[JobId(1312)] res:true
55931 Sep 22 23:17:30.391 DEBG IO Flush 1314 has deps [JobId(1313)]
55932 Sep 22 23:17:30.720 DEBG [0] Read AckReady 1313, : downstairs
55933 Sep 22 23:17:30.721 ERRO [2] job id 1313 saw error GenericError("test error")
55934 Sep 22 23:17:30.721 DEBG up_ds_listen was notified
55935 Sep 22 23:17:30.721 DEBG up_ds_listen process 1313
55936 Sep 22 23:17:30.721 DEBG [A] ack job 1313:314, : downstairs
55937 Sep 22 23:17:30.774 DEBG up_ds_listen checked 1 jobs, back to waiting
55938 Sep 22 23:17:30.775 INFO [lossy] sleeping 1 second
55939 Sep 22 23:17:30.776 DEBG IO Read 1315 has deps [JobId(1314)]
55940 Sep 22 23:17:30.790 INFO [lossy] sleeping 1 second
55941 Sep 22 23:17:31.170 DEBG Flush :1314 extent_limit None deps:[JobId(1313)] res:true f:135 g:1
55942 Sep 22 23:17:31.176 DEBG Read :1315 deps:[JobId(1314)] res:true
55943 Sep 22 23:17:31.198 DEBG IO Flush 1316 has deps [JobId(1315), JobId(1314)]
55944 Sep 22 23:17:31.577 INFO [lossy] sleeping 1 second
55945 Sep 22 23:17:31.952 DEBG [2] Read AckReady 1315, : downstairs
55946 Sep 22 23:17:31.953 DEBG up_ds_listen was notified
55947 Sep 22 23:17:31.953 DEBG up_ds_listen process 1315
55948 Sep 22 23:17:31.953 DEBG [A] ack job 1315:316, : downstairs
55949 Sep 22 23:17:32.005 DEBG up_ds_listen checked 1 jobs, back to waiting
55950 Sep 22 23:17:32.007 DEBG Flush :1314 extent_limit None deps:[JobId(1313)] res:true f:135 g:1
55951 Sep 22 23:17:32.013 DEBG Read :1315 deps:[JobId(1314)] res:true
55952 Sep 22 23:17:32.034 WARN returning error on flush!
55953 Sep 22 23:17:32.034 DEBG Flush :1314 extent_limit None deps:[JobId(1313)] res:false f:135 g:1
55954 Sep 22 23:17:32.034 INFO [lossy] skipping 1316
55955 Sep 22 23:17:32.034 DEBG Flush :1314 extent_limit None deps:[JobId(1313)] res:true f:135 g:1
55956 Sep 22 23:17:32.034 INFO [lossy] skipping 1316
55957 Sep 22 23:17:32.040 DEBG Read :1315 deps:[JobId(1314)] res:true
55958 Sep 22 23:17:32.062 DEBG IO Read 1317 has deps [JobId(1316)]
55959 Sep 22 23:17:32.062 ERRO [1] job id 1314 saw error GenericError("test error")
55960 Sep 22 23:17:32.067 DEBG up_ds_listen was notified
55961 Sep 22 23:17:32.067 DEBG up_ds_listen process 1314
55962 Sep 22 23:17:32.067 DEBG [A] ack job 1314:315, : downstairs
55963 Sep 22 23:17:32.068 DEBG [rc] retire 1314 clears [JobId(1313), JobId(1314)], : downstairs
55964 Sep 22 23:17:32.068 DEBG up_ds_listen checked 1 jobs, back to waiting
55965 Sep 22 23:17:32.076 DEBG Flush :1316 extent_limit None deps:[JobId(1315), JobId(1314)] res:true f:136 g:1
55966 Sep 22 23:17:32.082 DEBG Read :1317 deps:[JobId(1316)] res:true
55967 Sep 22 23:17:32.103 DEBG Flush :1316 extent_limit None deps:[JobId(1315), JobId(1314)] res:true f:136 g:1
55968 Sep 22 23:17:32.104 WARN returning error on read!
55969 Sep 22 23:17:32.104 DEBG Read :1317 deps:[JobId(1316)] res:false
55970 Sep 22 23:17:32.109 DEBG Read :1317 deps:[JobId(1316)] res:true
55971 Sep 22 23:17:32.557 ERRO [0] job id 1317 saw error GenericError("test error")
55972 Sep 22 23:17:32.886 DEBG up_ds_listen was notified
55973 Sep 22 23:17:32.886 DEBG up_ds_listen process 1316
55974 Sep 22 23:17:32.886 DEBG [A] ack job 1316:317, : downstairs
55975 Sep 22 23:17:32.886 DEBG up_ds_listen checked 1 jobs, back to waiting
55976 Sep 22 23:17:32.886 DEBG IO Flush 1318 has deps [JobId(1317), JobId(1316)]
55977 Sep 22 23:17:32.887 DEBG Flush :1316 extent_limit None deps:[JobId(1315), JobId(1314)] res:true f:136 g:1
55978 Sep 22 23:17:32.892 DEBG Read :1317 deps:[JobId(1316)] res:true
55979 Sep 22 23:17:32.914 DEBG [rc] retire 1316 clears [JobId(1315), JobId(1316)], : downstairs
55980 Sep 22 23:17:32.917 INFO [lossy] skipping 1318
55981 Sep 22 23:17:32.917 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:true f:137 g:1
55982 Sep 22 23:17:32.917 INFO [lossy] sleeping 1 second
55983 Sep 22 23:17:32.918 WARN returning error on flush!
55984 Sep 22 23:17:32.918 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:false f:137 g:1
55985 Sep 22 23:17:32.918 INFO [lossy] skipping 1318
55986 Sep 22 23:17:32.918 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:true f:137 g:1
55987 Sep 22 23:17:32.918 INFO [lossy] sleeping 1 second
55988 Sep 22 23:17:33.342 DEBG [0] Read AckReady 1317, : downstairs
55989 Sep 22 23:17:33.342 ERRO [0] job id 1318 saw error GenericError("test error")
55990 Sep 22 23:17:33.670 DEBG [1] Read already AckReady 1317, : downstairs
55991 Sep 22 23:17:33.672 DEBG up_ds_listen was notified
55992 Sep 22 23:17:33.672 DEBG up_ds_listen process 1317
55993 Sep 22 23:17:33.672 DEBG [A] ack job 1317:318, : downstairs
55994 Sep 22 23:17:33.725 DEBG up_ds_listen process 1318
55995 Sep 22 23:17:33.725 DEBG [A] ack job 1318:319, : downstairs
55996 Sep 22 23:17:33.725 DEBG up_ds_listen checked 2 jobs, back to waiting
55997 Sep 22 23:17:33.725 DEBG up_ds_listen was notified
55998 Sep 22 23:17:33.725 DEBG up_ds_listen checked 0 jobs, back to waiting
55999 Sep 22 23:17:33.727 INFO [lossy] sleeping 1 second
56000 Sep 22 23:17:33.775 DEBG IO Read 1319 has deps [JobId(1318)]
56001 Sep 22 23:17:34.123 DEBG Read :1319 deps:[JobId(1318)] res:true
56002 Sep 22 23:17:34.144 INFO [lossy] sleeping 1 second
56003 Sep 22 23:17:34.521 DEBG [1] Read AckReady 1319, : downstairs
56004 Sep 22 23:17:34.521 DEBG up_ds_listen was notified
56005 Sep 22 23:17:34.521 DEBG up_ds_listen process 1319
56006 Sep 22 23:17:34.521 DEBG [A] ack job 1319:320, : downstairs
56007 Sep 22 23:17:34.574 DEBG up_ds_listen checked 1 jobs, back to waiting
56008 Sep 22 23:17:34.575 DEBG IO Flush 1320 has deps [JobId(1319), JobId(1318)]
56009 Sep 22 23:17:34.575 DEBG IO Read 1321 has deps [JobId(1320)]
56010 Sep 22 23:17:34.581 INFO [lossy] sleeping 1 second
56011 Sep 22 23:17:34.728 DEBG Flush :1318 extent_limit None deps:[JobId(1317), JobId(1316)] res:true f:137 g:1
56012 Sep 22 23:17:34.729 WARN returning error on read!
56013 Sep 22 23:17:34.729 DEBG Read :1319 deps:[JobId(1318)] res:false
56014 Sep 22 23:17:34.729 INFO [lossy] skipping 1319
56015 Sep 22 23:17:34.734 DEBG Read :1319 deps:[JobId(1318)] res:true
56016 Sep 22 23:17:34.756 DEBG [rc] retire 1318 clears [JobId(1317), JobId(1318)], : downstairs
56017 Sep 22 23:17:34.756 ERRO [2] job id 1319 saw error GenericError("test error")
56018 Sep 22 23:17:34.757 DEBG Flush :1320 extent_limit None deps:[JobId(1319), JobId(1318)] res:true f:138 g:1
56019 Sep 22 23:17:34.763 DEBG Read :1321 deps:[JobId(1320)] res:true
56020 Sep 22 23:17:35.162 DEBG IO Flush 1322 has deps [JobId(1321), JobId(1320)]
56021 Sep 22 23:17:35.168 DEBG Read :1319 deps:[JobId(1318)] res:true
56022 Sep 22 23:17:35.192 INFO [lossy] sleeping 1 second
56023 Sep 22 23:17:35.568 DEBG [2] Read AckReady 1321, : downstairs
56024 Sep 22 23:17:35.568 DEBG up_ds_listen was notified
56025 Sep 22 23:17:35.568 DEBG up_ds_listen process 1321
56026 Sep 22 23:17:35.568 DEBG [A] ack job 1321:322, : downstairs
56027 Sep 22 23:17:35.621 DEBG up_ds_listen checked 1 jobs, back to waiting
56028 Sep 22 23:17:35.623 DEBG Flush :1320 extent_limit None deps:[JobId(1319), JobId(1318)] res:true f:138 g:1
56029 Sep 22 23:17:35.623 INFO [lossy] skipping 1321
56030 Sep 22 23:17:35.629 DEBG Read :1321 deps:[JobId(1320)] res:true
56031 Sep 22 23:17:35.650 DEBG Flush :1320 extent_limit None deps:[JobId(1319), JobId(1318)] res:true f:138 g:1
56032 Sep 22 23:17:35.650 INFO [lossy] skipping 1321
56033 Sep 22 23:17:35.650 INFO [lossy] skipping 1322
56034 Sep 22 23:17:35.650 INFO [lossy] skipping 1321
56035 Sep 22 23:17:35.656 DEBG Read :1321 deps:[JobId(1320)] res:true
56036 Sep 22 23:17:35.725 DEBG IO Read 1323 has deps [JobId(1322)]
56037 Sep 22 23:17:35.725 DEBG IO Flush 1324 has deps [JobId(1323), JobId(1322)]
56038 Sep 22 23:17:36.061 DEBG up_ds_listen was notified
56039 Sep 22 23:17:36.061 DEBG up_ds_listen process 1320
56040 Sep 22 23:17:36.061 DEBG [A] ack job 1320:321, : downstairs
56041 Sep 22 23:17:36.061 DEBG [rc] retire 1320 clears [JobId(1319), JobId(1320)], : downstairs
56042 Sep 22 23:17:36.061 DEBG up_ds_listen checked 1 jobs, back to waiting
56043 Sep 22 23:17:36.070 DEBG Flush :1322 extent_limit None deps:[JobId(1321), JobId(1320)] res:true f:139 g:1
56044 Sep 22 23:17:36.070 INFO [lossy] skipping 1323
56045 Sep 22 23:17:36.071 WARN returning error on read!
56046 Sep 22 23:17:36.071 DEBG Read :1323 deps:[JobId(1322)] res:false
56047 Sep 22 23:17:36.076 DEBG Read :1323 deps:[JobId(1322)] res:true
56048 Sep 22 23:17:36.099 DEBG Flush :1322 extent_limit None deps:[JobId(1321), JobId(1320)] res:true f:139 g:1
56049 Sep 22 23:17:36.099 WARN returning error on read!
56050 Sep 22 23:17:36.099 DEBG Read :1323 deps:[JobId(1322)] res:false
56051 Sep 22 23:17:36.105 DEBG Read :1323 deps:[JobId(1322)] res:true
56052 Sep 22 23:17:36.552 ERRO [0] job id 1323 saw error GenericError("test error")
56053 Sep 22 23:17:36.882 ERRO [1] job id 1323 saw error GenericError("test error")
56054 Sep 22 23:17:36.882 DEBG up_ds_listen was notified
56055 Sep 22 23:17:36.882 DEBG up_ds_listen process 1322
56056 Sep 22 23:17:36.882 DEBG [A] ack job 1322:323, : downstairs
56057 Sep 22 23:17:36.882 DEBG up_ds_listen checked 1 jobs, back to waiting
56058 Sep 22 23:17:36.882 DEBG Flush :1322 extent_limit None deps:[JobId(1321), JobId(1320)] res:true f:139 g:1
56059 Sep 22 23:17:36.882 WARN returning error on read!
56060 Sep 22 23:17:36.882 DEBG Read :1323 deps:[JobId(1322)] res:false
56061 Sep 22 23:17:36.882 INFO [lossy] skipping 1323
56062 Sep 22 23:17:36.888 DEBG Read :1323 deps:[JobId(1322)] res:true
56063 Sep 22 23:17:36.910 DEBG [rc] retire 1322 clears [JobId(1321), JobId(1322)], : downstairs
56064 Sep 22 23:17:36.910 ERRO [2] job id 1323 saw error GenericError("test error")
56065 Sep 22 23:17:36.912 INFO [lossy] skipping 1324
56066 Sep 22 23:17:36.912 WARN returning error on flush!
56067 Sep 22 23:17:36.912 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:false f:140 g:1
56068 Sep 22 23:17:36.912 INFO [lossy] skipping 1324
56069 Sep 22 23:17:36.912 WARN returning error on flush!
56070 Sep 22 23:17:36.912 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:false f:140 g:1
56071 Sep 22 23:17:36.912 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:true f:140 g:1
56072 Sep 22 23:17:36.912 INFO [lossy] sleeping 1 second
56073 Sep 22 23:17:37.288 DEBG [1] Read AckReady 1323, : downstairs
56074 Sep 22 23:17:37.289 ERRO [1] job id 1324 saw error GenericError("test error")
56075 Sep 22 23:17:37.289 ERRO [1] job id 1324 saw error GenericError("test error")
56076 Sep 22 23:17:37.289 DEBG up_ds_listen was notified
56077 Sep 22 23:17:37.289 DEBG up_ds_listen process 1323
56078 Sep 22 23:17:37.289 DEBG [A] ack job 1323:324, : downstairs
56079 Sep 22 23:17:37.342 DEBG up_ds_listen checked 1 jobs, back to waiting
56080 Sep 22 23:17:37.344 INFO [lossy] sleeping 1 second
56081 Sep 22 23:17:37.346 INFO [lossy] sleeping 1 second
56082 Sep 22 23:17:37.441 DEBG IO Read 1325 has deps [JobId(1324)]
56083 Sep 22 23:17:38.113 DEBG IO Flush 1326 has deps [JobId(1325), JobId(1324)]
56084 Sep 22 23:17:38.113 INFO [lossy] sleeping 1 second
56085 Sep 22 23:17:38.346 WARN returning error on flush!
56086 Sep 22 23:17:38.346 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:false f:140 g:1
56087 Sep 22 23:17:38.346 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:true f:140 g:1
56088 Sep 22 23:17:38.352 DEBG Read :1325 deps:[JobId(1324)] res:true
56089 Sep 22 23:17:38.373 WARN returning error on flush!
56090 Sep 22 23:17:38.373 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:false f:140 g:1
56091 Sep 22 23:17:38.373 INFO [lossy] skipping 1325
56092 Sep 22 23:17:38.373 DEBG Flush :1324 extent_limit None deps:[JobId(1323), JobId(1322)] res:true f:140 g:1
56093 Sep 22 23:17:38.379 DEBG Read :1325 deps:[JobId(1324)] res:true
56094 Sep 22 23:17:38.401 ERRO [2] job id 1324 saw error GenericError("test error")
56095 Sep 22 23:17:38.401 DEBG up_ds_listen was notified
56096 Sep 22 23:17:38.401 DEBG up_ds_listen process 1324
56097 Sep 22 23:17:38.401 DEBG [A] ack job 1324:325, : downstairs
56098 Sep 22 23:17:38.401 DEBG up_ds_listen checked 1 jobs, back to waiting
56099 Sep 22 23:17:38.401 ERRO [0] job id 1324 saw error GenericError("test error")
56100 Sep 22 23:17:38.401 DEBG [rc] retire 1324 clears [JobId(1323), JobId(1324)], : downstairs
56101 Sep 22 23:17:38.403 DEBG Flush :1326 extent_limit None deps:[JobId(1325), JobId(1324)] res:true f:141 g:1
56102 Sep 22 23:17:38.403 INFO [lossy] sleeping 1 second
56103 Sep 22 23:17:38.451 DEBG Flush :1326 extent_limit None deps:[JobId(1325), JobId(1324)] res:true f:141 g:1
56104 Sep 22 23:17:38.779 DEBG [2] Read AckReady 1325, : downstairs
56105 Sep 22 23:17:38.780 DEBG up_ds_listen was notified
56106 Sep 22 23:17:38.780 DEBG up_ds_listen process 1325
56107 Sep 22 23:17:38.780 DEBG [A] ack job 1325:326, : downstairs
56108 Sep 22 23:17:38.832 DEBG up_ds_listen checked 1 jobs, back to waiting
56109 Sep 22 23:17:39.210 DEBG up_ds_listen was notified
56110 Sep 22 23:17:39.210 DEBG up_ds_listen process 1326
56111 Sep 22 23:17:39.210 DEBG [A] ack job 1326:327, : downstairs
56112 Sep 22 23:17:39.210 DEBG up_ds_listen checked 1 jobs, back to waiting
56113 Sep 22 23:17:39.210 INFO [lossy] skipping 1325
56114 Sep 22 23:17:39.210 WARN returning error on read!
56115 Sep 22 23:17:39.211 DEBG Read :1325 deps:[JobId(1324)] res:false
56116 Sep 22 23:17:39.211 INFO [lossy] skipping 1325
56117 Sep 22 23:17:39.211 WARN returning error on read!
56118 Sep 22 23:17:39.211 DEBG Read :1325 deps:[JobId(1324)] res:false
56119 Sep 22 23:17:39.211 WARN returning error on read!
56120 Sep 22 23:17:39.211 DEBG Read :1325 deps:[JobId(1324)] res:false
56121 Sep 22 23:17:39.217 DEBG Read :1325 deps:[JobId(1324)] res:true
56122 Sep 22 23:17:39.238 DEBG IO Read 1327 has deps [JobId(1326)]
56123 Sep 22 23:17:39.250 ERRO [1] job id 1325 saw error GenericError("test error")
56124 Sep 22 23:17:39.250 ERRO [1] job id 1325 saw error GenericError("test error")
56125 Sep 22 23:17:39.250 ERRO [1] job id 1325 saw error GenericError("test error")
56126 Sep 22 23:17:39.251 INFO [lossy] skipping 1327
56127 Sep 22 23:17:39.251 INFO [lossy] skipping 1327
56128 Sep 22 23:17:39.256 DEBG Read :1327 deps:[JobId(1326)] res:true
56129 Sep 22 23:17:39.280 INFO [lossy] skipping 1326
56130 Sep 22 23:17:39.280 INFO [lossy] skipping 1326
56131 Sep 22 23:17:39.280 DEBG Flush :1326 extent_limit None deps:[JobId(1325), JobId(1324)] res:true f:141 g:1
56132 Sep 22 23:17:39.280 INFO [lossy] sleeping 1 second
56133 Sep 22 23:17:39.657 DEBG [rc] retire 1326 clears [JobId(1325), JobId(1326)], : downstairs
56134 Sep 22 23:17:39.704 DEBG IO Flush 1328 has deps [JobId(1327)]
56135 Sep 22 23:17:39.704 WARN returning error on read!
56136 Sep 22 23:17:39.704 DEBG Read :1327 deps:[JobId(1326)] res:false
56137 Sep 22 23:17:39.704 WARN returning error on read!
56138 Sep 22 23:17:39.704 DEBG Read :1327 deps:[JobId(1326)] res:false
56139 Sep 22 23:17:39.710 DEBG Read :1327 deps:[JobId(1326)] res:true
56140 Sep 22 23:17:40.061 DEBG [0] Read AckReady 1327, : downstairs
56141 Sep 22 23:17:40.062 DEBG up_ds_listen was notified
56142 Sep 22 23:17:40.062 DEBG up_ds_listen process 1327
56143 Sep 22 23:17:40.062 DEBG [A] ack job 1327:328, : downstairs
56144 Sep 22 23:17:40.115 DEBG up_ds_listen checked 1 jobs, back to waiting
56145 Sep 22 23:17:40.116 ERRO [2] job id 1327 saw error GenericError("test error")
56146 Sep 22 23:17:40.116 ERRO [2] job id 1327 saw error GenericError("test error")
56147 Sep 22 23:17:40.116 WARN returning error on flush!
56148 Sep 22 23:17:40.116 DEBG Flush :1328 extent_limit None deps:[JobId(1327)] res:false f:142 g:1
56149 Sep 22 23:17:40.116 INFO [lossy] skipping 1328
56150 Sep 22 23:17:40.116 DEBG Flush :1328 extent_limit None deps:[JobId(1327)] res:true f:142 g:1
56151 Sep 22 23:17:40.117 DEBG IO Read 1329 has deps [JobId(1328)]
56152 Sep 22 23:17:40.117 ERRO [0] job id 1328 saw error GenericError("test error")
56153 Sep 22 23:17:40.134 DEBG Read :1329 deps:[JobId(1328)] res:true
56154 Sep 22 23:17:40.158 DEBG Flush :1328 extent_limit None deps:[JobId(1327)] res:true f:142 g:1
56155 Sep 22 23:17:40.163 DEBG Read :1329 deps:[JobId(1328)] res:true
56156 Sep 22 23:17:40.563 DEBG up_ds_listen was notified
56157 Sep 22 23:17:40.563 DEBG up_ds_listen process 1328
56158 Sep 22 23:17:40.563 DEBG [A] ack job 1328:329, : downstairs
56159 Sep 22 23:17:40.563 DEBG up_ds_listen checked 1 jobs, back to waiting
56160 Sep 22 23:17:40.563 DEBG IO Flush 1330 has deps [JobId(1329), JobId(1328)]
56161 Sep 22 23:17:40.569 DEBG Read :1327 deps:[JobId(1326)] res:true
56162 Sep 22 23:17:40.592 INFO [lossy] sleeping 1 second
56163 Sep 22 23:17:40.971 DEBG [0] Read AckReady 1329, : downstairs
56164 Sep 22 23:17:40.971 DEBG up_ds_listen was notified
56165 Sep 22 23:17:40.971 DEBG up_ds_listen process 1329
56166 Sep 22 23:17:40.971 DEBG [A] ack job 1329:330, : downstairs
56167 Sep 22 23:17:41.025 DEBG up_ds_listen checked 1 jobs, back to waiting
56168 Sep 22 23:17:43.145 DEBG IO Write 1331 has deps [JobId(1330), JobId(1328)]
56169 Sep 22 23:17:43.145 DEBG IO Flush 1332 has deps [JobId(1331), JobId(1330)]
56170 Sep 22 23:17:43.145 DEBG Flush :1330 extent_limit None deps:[JobId(1329), JobId(1328)] res:true f:143 g:1
56171 Sep 22 23:17:43.146 DEBG up_ds_listen was notified
56172 Sep 22 23:17:43.146 DEBG up_ds_listen process 1331
56173 Sep 22 23:17:43.146 DEBG [A] ack job 1331:332, : downstairs
56174 Sep 22 23:17:43.146 DEBG up_ds_listen checked 1 jobs, back to waiting
56175 Sep 22 23:17:43.538 DEBG IO Write 1333 has deps [JobId(1332), JobId(1330), JobId(1328)]
56176 Sep 22 23:17:43.538 DEBG up_ds_listen was notified
56177 Sep 22 23:17:43.538 DEBG up_ds_listen process 1333
56178 Sep 22 23:17:43.538 DEBG [A] ack job 1333:334, : downstairs
56179 Sep 22 23:17:43.538 DEBG up_ds_listen checked 1 jobs, back to waiting
56180 Sep 22 23:17:43.868 DEBG IO Write 1334 has deps [JobId(1332), JobId(1330), JobId(1328)]
56181 Sep 22 23:17:43.868 DEBG up_ds_listen was notified
56182 Sep 22 23:17:43.868 DEBG up_ds_listen process 1334
56183 Sep 22 23:17:43.868 DEBG [A] ack job 1334:335, : downstairs
56184 Sep 22 23:17:43.868 DEBG up_ds_listen checked 1 jobs, back to waiting
56185 Sep 22 23:17:43.868 DEBG IO Flush 1335 has deps [JobId(1334), JobId(1333), JobId(1332)]
56186 Sep 22 23:17:44.198 DEBG IO Write 1336 has deps [JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56187 Sep 22 23:17:44.198 DEBG up_ds_listen was notified
56188 Sep 22 23:17:44.198 DEBG up_ds_listen process 1336
56189 Sep 22 23:17:44.198 DEBG [A] ack job 1336:337, : downstairs
56190 Sep 22 23:17:44.198 DEBG up_ds_listen checked 1 jobs, back to waiting
56191 Sep 22 23:17:44.528 DEBG IO Write 1337 has deps [JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56192 Sep 22 23:17:44.529 DEBG up_ds_listen was notified
56193 Sep 22 23:17:44.529 DEBG up_ds_listen process 1337
56194 Sep 22 23:17:44.529 DEBG [A] ack job 1337:338, : downstairs
56195 Sep 22 23:17:44.529 DEBG up_ds_listen checked 1 jobs, back to waiting
56196 Sep 22 23:17:44.529 DEBG IO Flush 1338 has deps [JobId(1337), JobId(1336), JobId(1335)]
56197 Sep 22 23:17:44.858 DEBG IO Write 1339 has deps [JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56198 Sep 22 23:17:44.859 DEBG up_ds_listen was notified
56199 Sep 22 23:17:44.859 DEBG up_ds_listen process 1339
56200 Sep 22 23:17:44.859 DEBG [A] ack job 1339:340, : downstairs
56201 Sep 22 23:17:44.859 DEBG up_ds_listen checked 1 jobs, back to waiting
56202 Sep 22 23:17:45.189 DEBG IO Write 1340 has deps [JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56203 Sep 22 23:17:45.189 DEBG up_ds_listen was notified
56204 Sep 22 23:17:45.189 DEBG up_ds_listen process 1340
56205 Sep 22 23:17:45.189 DEBG [A] ack job 1340:341, : downstairs
56206 Sep 22 23:17:45.189 DEBG up_ds_listen checked 1 jobs, back to waiting
56207 Sep 22 23:17:45.190 DEBG IO Flush 1341 has deps [JobId(1340), JobId(1339), JobId(1338)]
56208 Sep 22 23:17:45.519 DEBG IO Write 1342 has deps [JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56209 Sep 22 23:17:45.520 DEBG up_ds_listen was notified
56210 Sep 22 23:17:45.520 DEBG up_ds_listen process 1342
56211 Sep 22 23:17:45.520 DEBG [A] ack job 1342:343, : downstairs
56212 Sep 22 23:17:45.520 DEBG up_ds_listen checked 1 jobs, back to waiting
56213 Sep 22 23:17:45.850 DEBG IO Write 1343 has deps [JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56214 Sep 22 23:17:45.850 DEBG up_ds_listen was notified
56215 Sep 22 23:17:45.850 DEBG up_ds_listen process 1343
56216 Sep 22 23:17:45.850 DEBG [A] ack job 1343:344, : downstairs
56217 Sep 22 23:17:45.850 DEBG up_ds_listen checked 1 jobs, back to waiting
56218 Sep 22 23:17:45.850 DEBG IO Flush 1344 has deps [JobId(1343), JobId(1342), JobId(1341)]
56219 Sep 22 23:17:46.244 DEBG IO Write 1345 has deps [JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56220 Sep 22 23:17:46.244 DEBG up_ds_listen was notified
56221 Sep 22 23:17:46.244 DEBG up_ds_listen process 1345
56222 Sep 22 23:17:46.244 DEBG [A] ack job 1345:346, : downstairs
56223 Sep 22 23:17:46.244 DEBG up_ds_listen checked 1 jobs, back to waiting
56224 Sep 22 23:17:46.385 INFO [lossy] sleeping 1 second
56225 Sep 22 23:17:46.715 DEBG IO Write 1346 has deps [JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56226 Sep 22 23:17:46.715 DEBG IO Flush 1347 has deps [JobId(1346), JobId(1345), JobId(1344)]
56227 Sep 22 23:17:46.715 DEBG up_ds_listen was notified
56228 Sep 22 23:17:46.715 DEBG up_ds_listen process 1346
56229 Sep 22 23:17:46.715 DEBG [A] ack job 1346:347, : downstairs
56230 Sep 22 23:17:46.715 DEBG up_ds_listen checked 1 jobs, back to waiting
56231 Sep 22 23:17:47.045 DEBG IO Write 1348 has deps [JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56232 Sep 22 23:17:47.045 DEBG up_ds_listen was notified
56233 Sep 22 23:17:47.045 DEBG up_ds_listen process 1348
56234 Sep 22 23:17:47.045 DEBG [A] ack job 1348:349, : downstairs
56235 Sep 22 23:17:47.045 DEBG up_ds_listen checked 1 jobs, back to waiting
56236 Sep 22 23:17:47.375 DEBG IO Write 1349 has deps [JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56237 Sep 22 23:17:47.375 DEBG up_ds_listen was notified
56238 Sep 22 23:17:47.375 DEBG up_ds_listen process 1349
56239 Sep 22 23:17:47.375 DEBG [A] ack job 1349:350, : downstairs
56240 Sep 22 23:17:47.375 DEBG up_ds_listen checked 1 jobs, back to waiting
56241 Sep 22 23:17:47.375 DEBG IO Flush 1350 has deps [JobId(1349), JobId(1348), JobId(1347)]
56242 Sep 22 23:17:47.704 DEBG IO Write 1351 has deps [JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56243 Sep 22 23:17:47.704 DEBG up_ds_listen was notified
56244 Sep 22 23:17:47.704 DEBG up_ds_listen process 1351
56245 Sep 22 23:17:47.704 DEBG [A] ack job 1351:352, : downstairs
56246 Sep 22 23:17:47.705 DEBG up_ds_listen checked 1 jobs, back to waiting
56247 Sep 22 23:17:47.735 DEBG Write :1331 deps:[JobId(1330), JobId(1328)] res:true
56248 Sep 22 23:17:47.740 DEBG Flush :1332 extent_limit None deps:[JobId(1331), JobId(1330)] res:true f:144 g:1
56249 Sep 22 23:17:48.069 DEBG IO Write 1352 has deps [JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56250 Sep 22 23:17:48.069 DEBG up_ds_listen was notified
56251 Sep 22 23:17:48.069 DEBG up_ds_listen process 1352
56252 Sep 22 23:17:48.069 DEBG [A] ack job 1352:353, : downstairs
56253 Sep 22 23:17:48.069 DEBG up_ds_listen checked 1 jobs, back to waiting
56254 Sep 22 23:17:48.070 DEBG IO Flush 1353 has deps [JobId(1352), JobId(1351), JobId(1350)]
56255 Sep 22 23:17:48.398 DEBG IO Write 1354 has deps [JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56256 Sep 22 23:17:48.398 DEBG up_ds_listen was notified
56257 Sep 22 23:17:48.399 DEBG up_ds_listen process 1354
56258 Sep 22 23:17:48.399 DEBG [A] ack job 1354:355, : downstairs
56259 Sep 22 23:17:48.399 DEBG up_ds_listen checked 1 jobs, back to waiting
56260 Sep 22 23:17:48.728 DEBG IO Write 1355 has deps [JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56261 Sep 22 23:17:48.728 DEBG up_ds_listen was notified
56262 Sep 22 23:17:48.728 DEBG up_ds_listen process 1355
56263 Sep 22 23:17:48.728 DEBG [A] ack job 1355:356, : downstairs
56264 Sep 22 23:17:48.728 DEBG up_ds_listen checked 1 jobs, back to waiting
56265 Sep 22 23:17:48.729 DEBG IO Flush 1356 has deps [JobId(1355), JobId(1354), JobId(1353)]
56266 Sep 22 23:17:49.058 DEBG IO Write 1357 has deps [JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56267 Sep 22 23:17:49.058 DEBG up_ds_listen was notified
56268 Sep 22 23:17:49.058 DEBG up_ds_listen process 1357
56269 Sep 22 23:17:49.058 DEBG [A] ack job 1357:358, : downstairs
56270 Sep 22 23:17:49.058 DEBG up_ds_listen checked 1 jobs, back to waiting
56271 Sep 22 23:17:49.451 DEBG IO Write 1358 has deps [JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56272 Sep 22 23:17:49.451 DEBG up_ds_listen was notified
56273 Sep 22 23:17:49.451 DEBG up_ds_listen process 1358
56274 Sep 22 23:17:49.451 DEBG [A] ack job 1358:359, : downstairs
56275 Sep 22 23:17:49.451 DEBG up_ds_listen checked 1 jobs, back to waiting
56276 Sep 22 23:17:49.594 DEBG IO Flush 1359 has deps [JobId(1358), JobId(1357), JobId(1356)]
56277 Sep 22 23:17:49.625 DEBG Write :1333 deps:[JobId(1332), JobId(1330), JobId(1328)] res:true
56278 Sep 22 23:17:49.958 DEBG IO Write 1360 has deps [JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56279 Sep 22 23:17:49.958 DEBG up_ds_listen was notified
56280 Sep 22 23:17:49.959 DEBG up_ds_listen process 1360
56281 Sep 22 23:17:49.959 DEBG [A] ack job 1360:361, : downstairs
56282 Sep 22 23:17:49.959 DEBG up_ds_listen checked 1 jobs, back to waiting
56283 Sep 22 23:17:50.289 DEBG IO Write 1361 has deps [JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56284 Sep 22 23:17:50.289 DEBG up_ds_listen was notified
56285 Sep 22 23:17:50.289 DEBG up_ds_listen process 1361
56286 Sep 22 23:17:50.289 DEBG [A] ack job 1361:362, : downstairs
56287 Sep 22 23:17:50.289 DEBG up_ds_listen checked 1 jobs, back to waiting
56288 Sep 22 23:17:50.290 DEBG IO Flush 1362 has deps [JobId(1361), JobId(1360), JobId(1359)]
56289 Sep 22 23:17:50.619 DEBG IO Write 1363 has deps [JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56290 Sep 22 23:17:50.619 DEBG up_ds_listen was notified
56291 Sep 22 23:17:50.619 DEBG up_ds_listen process 1363
56292 Sep 22 23:17:50.619 DEBG [A] ack job 1363:364, : downstairs
56293 Sep 22 23:17:50.620 DEBG up_ds_listen checked 1 jobs, back to waiting
56294 Sep 22 23:17:50.950 DEBG IO Write 1364 has deps [JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56295 Sep 22 23:17:50.950 DEBG up_ds_listen was notified
56296 Sep 22 23:17:50.950 DEBG up_ds_listen process 1364
56297 Sep 22 23:17:50.950 DEBG [A] ack job 1364:365, : downstairs
56298 Sep 22 23:17:50.950 DEBG up_ds_listen checked 1 jobs, back to waiting
56299 Sep 22 23:17:50.951 DEBG IO Flush 1365 has deps [JobId(1364), JobId(1363), JobId(1362)]
56300 Sep 22 23:17:51.281 DEBG IO Write 1366 has deps [JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56301 Sep 22 23:17:51.281 DEBG up_ds_listen was notified
56302 Sep 22 23:17:51.281 DEBG up_ds_listen process 1366
56303 Sep 22 23:17:51.281 DEBG [A] ack job 1366:367, : downstairs
56304 Sep 22 23:17:51.281 DEBG up_ds_listen checked 1 jobs, back to waiting
56305 Sep 22 23:17:51.611 DEBG IO Write 1367 has deps [JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56306 Sep 22 23:17:51.611 DEBG up_ds_listen was notified
56307 Sep 22 23:17:51.611 DEBG up_ds_listen process 1367
56308 Sep 22 23:17:51.612 DEBG [A] ack job 1367:368, : downstairs
56309 Sep 22 23:17:51.612 DEBG up_ds_listen checked 1 jobs, back to waiting
56310 Sep 22 23:17:51.612 DEBG IO Flush 1368 has deps [JobId(1367), JobId(1366), JobId(1365)]
56311 Sep 22 23:17:51.941 DEBG IO Write 1369 has deps [JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56312 Sep 22 23:17:51.941 DEBG up_ds_listen was notified
56313 Sep 22 23:17:51.941 DEBG up_ds_listen process 1369
56314 Sep 22 23:17:51.941 DEBG [A] ack job 1369:370, : downstairs
56315 Sep 22 23:17:51.942 DEBG up_ds_listen checked 1 jobs, back to waiting
56316 Sep 22 23:17:52.271 DEBG IO Write 1370 has deps [JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56317 Sep 22 23:17:52.272 DEBG up_ds_listen was notified
56318 Sep 22 23:17:52.272 DEBG up_ds_listen process 1370
56319 Sep 22 23:17:52.272 DEBG [A] ack job 1370:371, : downstairs
56320 Sep 22 23:17:52.272 DEBG up_ds_listen checked 1 jobs, back to waiting
56321 Sep 22 23:17:52.272 DEBG IO Flush 1371 has deps [JobId(1370), JobId(1369), JobId(1368)]
56322 Sep 22 23:17:52.664 DEBG IO Write 1372 has deps [JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56323 Sep 22 23:17:52.664 DEBG up_ds_listen was notified
56324 Sep 22 23:17:52.664 DEBG up_ds_listen process 1372
56325 Sep 22 23:17:52.665 DEBG [A] ack job 1372:373, : downstairs
56326 Sep 22 23:17:52.665 DEBG up_ds_listen checked 1 jobs, back to waiting
56327 Sep 22 23:17:52.805 INFO [lossy] sleeping 1 second
56328 Sep 22 23:17:53.135 DEBG IO Write 1373 has deps [JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56329 Sep 22 23:17:53.135 DEBG IO Flush 1374 has deps [JobId(1373), JobId(1372), JobId(1371)]
56330 Sep 22 23:17:53.135 DEBG up_ds_listen was notified
56331 Sep 22 23:17:53.135 DEBG up_ds_listen process 1373
56332 Sep 22 23:17:53.135 DEBG [A] ack job 1373:374, : downstairs
56333 Sep 22 23:17:53.135 DEBG up_ds_listen checked 1 jobs, back to waiting
56334 Sep 22 23:17:53.465 DEBG IO Write 1375 has deps [JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56335 Sep 22 23:17:53.465 DEBG up_ds_listen was notified
56336 Sep 22 23:17:53.465 DEBG up_ds_listen process 1375
56337 Sep 22 23:17:53.465 DEBG [A] ack job 1375:376, : downstairs
56338 Sep 22 23:17:53.465 DEBG up_ds_listen checked 1 jobs, back to waiting
56339 Sep 22 23:17:53.795 DEBG IO Write 1376 has deps [JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56340 Sep 22 23:17:53.796 DEBG up_ds_listen was notified
56341 Sep 22 23:17:53.796 DEBG up_ds_listen process 1376
56342 Sep 22 23:17:53.796 DEBG [A] ack job 1376:377, : downstairs
56343 Sep 22 23:17:53.796 DEBG up_ds_listen checked 1 jobs, back to waiting
56344 Sep 22 23:17:53.796 DEBG IO Flush 1377 has deps [JobId(1376), JobId(1375), JobId(1374)]
56345 Sep 22 23:17:54.126 DEBG IO Write 1378 has deps [JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56346 Sep 22 23:17:54.126 DEBG up_ds_listen was notified
56347 Sep 22 23:17:54.126 DEBG up_ds_listen process 1378
56348 Sep 22 23:17:54.126 DEBG [A] ack job 1378:379, : downstairs
56349 Sep 22 23:17:54.127 DEBG up_ds_listen checked 1 jobs, back to waiting
56350 Sep 22 23:17:54.157 DEBG Write :1334 deps:[JobId(1332), JobId(1330), JobId(1328)] res:true
56351 Sep 22 23:17:54.166 DEBG Flush :1335 extent_limit None deps:[JobId(1334), JobId(1333), JobId(1332)] res:true f:145 g:1
56352 Sep 22 23:17:54.166 INFO [lossy] sleeping 1 second
56353 Sep 22 23:17:54.495 DEBG IO Write 1379 has deps [JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56354 Sep 22 23:17:54.495 DEBG up_ds_listen was notified
56355 Sep 22 23:17:54.496 DEBG up_ds_listen process 1379
56356 Sep 22 23:17:54.496 DEBG [A] ack job 1379:380, : downstairs
56357 Sep 22 23:17:54.496 DEBG up_ds_listen checked 1 jobs, back to waiting
56358 Sep 22 23:17:54.496 DEBG IO Flush 1380 has deps [JobId(1379), JobId(1378), JobId(1377)]
56359 Sep 22 23:17:54.826 DEBG IO Write 1381 has deps [JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56360 Sep 22 23:17:54.826 DEBG up_ds_listen was notified
56361 Sep 22 23:17:54.826 DEBG up_ds_listen process 1381
56362 Sep 22 23:17:54.826 DEBG [A] ack job 1381:382, : downstairs
56363 Sep 22 23:17:54.826 DEBG up_ds_listen checked 1 jobs, back to waiting
56364 Sep 22 23:17:55.156 DEBG IO Write 1382 has deps [JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56365 Sep 22 23:17:55.156 DEBG up_ds_listen was notified
56366 Sep 22 23:17:55.156 DEBG up_ds_listen process 1382
56367 Sep 22 23:17:55.156 DEBG [A] ack job 1382:383, : downstairs
56368 Sep 22 23:17:55.156 DEBG up_ds_listen checked 1 jobs, back to waiting
56369 Sep 22 23:17:55.157 DEBG IO Flush 1383 has deps [JobId(1382), JobId(1381), JobId(1380)]
56370 Sep 22 23:17:55.486 DEBG IO Write 1384 has deps [JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56371 Sep 22 23:17:55.487 DEBG up_ds_listen was notified
56372 Sep 22 23:17:55.487 DEBG up_ds_listen process 1384
56373 Sep 22 23:17:55.487 DEBG [A] ack job 1384:385, : downstairs
56374 Sep 22 23:17:55.487 DEBG up_ds_listen checked 1 jobs, back to waiting
56375 Sep 22 23:17:55.817 DEBG IO Write 1385 has deps [JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56376 Sep 22 23:17:55.817 DEBG up_ds_listen was notified
56377 Sep 22 23:17:55.817 DEBG up_ds_listen process 1385
56378 Sep 22 23:17:55.817 DEBG [A] ack job 1385:386, : downstairs
56379 Sep 22 23:17:55.817 DEBG up_ds_listen checked 1 jobs, back to waiting
56380 Sep 22 23:17:55.818 DEBG IO Flush 1386 has deps [JobId(1385), JobId(1384), JobId(1383)]
56381 Sep 22 23:17:56.210 DEBG IO Write 1387 has deps [JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56382 Sep 22 23:17:56.210 DEBG up_ds_listen was notified
56383 Sep 22 23:17:56.210 DEBG up_ds_listen process 1387
56384 Sep 22 23:17:56.210 DEBG [A] ack job 1387:388, : downstairs
56385 Sep 22 23:17:56.210 DEBG up_ds_listen checked 1 jobs, back to waiting
56386 Sep 22 23:17:56.380 DEBG Write :1336 deps:[JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56387 Sep 22 23:17:56.381 DEBG IO Flush 1388 has deps [JobId(1387), JobId(1386)]
56388 Sep 22 23:17:56.710 DEBG IO Write 1389 has deps [JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56389 Sep 22 23:17:56.710 DEBG up_ds_listen was notified
56390 Sep 22 23:17:56.710 DEBG up_ds_listen process 1389
56391 Sep 22 23:17:56.710 DEBG [A] ack job 1389:390, : downstairs
56392 Sep 22 23:17:56.710 DEBG up_ds_listen checked 1 jobs, back to waiting
56393 Sep 22 23:17:57.039 DEBG IO Write 1390 has deps [JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56394 Sep 22 23:17:57.040 DEBG up_ds_listen was notified
56395 Sep 22 23:17:57.040 DEBG up_ds_listen process 1390
56396 Sep 22 23:17:57.040 DEBG [A] ack job 1390:391, : downstairs
56397 Sep 22 23:17:57.040 DEBG up_ds_listen checked 1 jobs, back to waiting
56398 Sep 22 23:17:57.040 DEBG IO Flush 1391 has deps [JobId(1390), JobId(1389), JobId(1388)]
56399 Sep 22 23:17:57.369 DEBG IO Write 1392 has deps [JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56400 Sep 22 23:17:57.370 DEBG up_ds_listen was notified
56401 Sep 22 23:17:57.370 DEBG up_ds_listen process 1392
56402 Sep 22 23:17:57.370 DEBG [A] ack job 1392:393, : downstairs
56403 Sep 22 23:17:57.370 DEBG up_ds_listen checked 1 jobs, back to waiting
56404 Sep 22 23:17:57.699 DEBG IO Write 1393 has deps [JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56405 Sep 22 23:17:57.699 DEBG up_ds_listen was notified
56406 Sep 22 23:17:57.699 DEBG up_ds_listen process 1393
56407 Sep 22 23:17:57.699 DEBG [A] ack job 1393:394, : downstairs
56408 Sep 22 23:17:57.700 DEBG up_ds_listen checked 1 jobs, back to waiting
56409 Sep 22 23:17:57.700 DEBG IO Flush 1394 has deps [JobId(1393), JobId(1392), JobId(1391)]
56410 Sep 22 23:17:58.029 DEBG IO Write 1395 has deps [JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56411 Sep 22 23:17:58.030 DEBG up_ds_listen was notified
56412 Sep 22 23:17:58.030 DEBG up_ds_listen process 1395
56413 Sep 22 23:17:58.030 DEBG [A] ack job 1395:396, : downstairs
56414 Sep 22 23:17:58.030 DEBG up_ds_listen checked 1 jobs, back to waiting
56415 Sep 22 23:17:58.359 DEBG IO Write 1396 has deps [JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56416 Sep 22 23:17:58.359 DEBG up_ds_listen was notified
56417 Sep 22 23:17:58.359 DEBG up_ds_listen process 1396
56418 Sep 22 23:17:58.360 DEBG [A] ack job 1396:397, : downstairs
56419 Sep 22 23:17:58.360 DEBG up_ds_listen checked 1 jobs, back to waiting
56420 Sep 22 23:17:58.360 DEBG IO Flush 1397 has deps [JobId(1396), JobId(1395), JobId(1394)]
56421 Sep 22 23:17:58.689 DEBG IO Write 1398 has deps [JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)]
56422 Sep 22 23:17:58.689 DEBG up_ds_listen was notified
56423 Sep 22 23:17:58.689 DEBG up_ds_listen process 1398
56424 Sep 22 23:17:58.690 DEBG [A] ack job 1398:399, : downstairs
56425 Sep 22 23:17:58.690 DEBG up_ds_listen checked 1 jobs, back to waiting
56426 Sep 22 23:17:59.020 DEBG IO Write 1399 has deps [JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328), JobId(1327)]
56427 Sep 22 23:17:59.020 DEBG up_ds_listen was notified
56428 Sep 22 23:17:59.020 DEBG up_ds_listen process 1399
56429 Sep 22 23:17:59.020 DEBG [A] ack job 1399:400, : downstairs
56430 Sep 22 23:17:59.020 DEBG up_ds_listen checked 1 jobs, back to waiting
56431 Sep 22 23:17:59.021 DEBG IO Flush 1400 has deps [JobId(1399), JobId(1398), JobId(1397)]
56432 Sep 22 23:17:59.412 DEBG IO Write 1401 has deps [JobId(1400), JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1329), JobId(1328)]
56433 Sep 22 23:17:59.413 DEBG up_ds_listen was notified
56434 Sep 22 23:17:59.413 DEBG up_ds_listen process 1401
56435 Sep 22 23:17:59.413 DEBG [A] ack job 1401:402, : downstairs
56436 Sep 22 23:17:59.413 DEBG up_ds_listen checked 1 jobs, back to waiting
56437 Sep 22 23:17:59.584 DEBG Write :1337 deps:[JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56438 Sep 22 23:17:59.585 WARN returning error on flush!
56439 Sep 22 23:17:59.585 DEBG Flush :1338 extent_limit None deps:[JobId(1337), JobId(1336), JobId(1335)] res:false f:146 g:1
56440 Sep 22 23:17:59.593 DEBG Flush :1338 extent_limit None deps:[JobId(1337), JobId(1336), JobId(1335)] res:true f:146 g:1
56441 Sep 22 23:17:59.593 DEBG IO Read 1402 has deps [JobId(1400)]
56442 Sep 22 23:17:59.593 DEBG IO Flush 1403 has deps [JobId(1402), JobId(1401), JobId(1400)]
56443 Sep 22 23:17:59.798 INFO [lossy] skipping 1339
56444 Sep 22 23:17:59.798 INFO [lossy] skipping 1339
56445 Sep 22 23:17:59.828 DEBG Write :1339 deps:[JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56446 Sep 22 23:18:00.033 INFO [lossy] skipping 1340
56447 Sep 22 23:18:00.034 INFO [lossy] skipping 1341
56448 Sep 22 23:18:00.034 INFO [lossy] skipping 1340
56449 Sep 22 23:18:00.034 INFO [lossy] skipping 1341
56450 Sep 22 23:18:00.034 INFO [lossy] skipping 1340
56451 Sep 22 23:18:00.064 DEBG Write :1340 deps:[JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56452 Sep 22 23:18:00.065 INFO [lossy] sleeping 1 second
56453 Sep 22 23:18:00.335 ERRO [0] job id 1338 saw error GenericError("test error")
56454 Sep 22 23:18:01.091 WARN returning error on flush!
56455 Sep 22 23:18:01.091 DEBG Flush :1341 extent_limit None deps:[JobId(1340), JobId(1339), JobId(1338)] res:false f:147 g:1
56456 Sep 22 23:18:01.091 INFO [lossy] skipping 1357
56457 Sep 22 23:18:01.100 DEBG Flush :1341 extent_limit None deps:[JobId(1340), JobId(1339), JobId(1338)] res:true f:147 g:1
56458 Sep 22 23:18:01.100 INFO [lossy] skipping 1357
56459 Sep 22 23:18:01.100 INFO [lossy] skipping 1342
56460 Sep 22 23:18:01.100 INFO [lossy] skipping 1343
56461 Sep 22 23:18:01.100 WARN 1344 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56462 Sep 22 23:18:01.100 WARN 1345 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56463 Sep 22 23:18:01.100 INFO [lossy] skipping 1346
56464 Sep 22 23:18:01.100 WARN 1348 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56465 Sep 22 23:18:01.100 WARN 1349 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56466 Sep 22 23:18:01.100 WARN 1351 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56467 Sep 22 23:18:01.100 WARN 1352 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56468 Sep 22 23:18:01.100 INFO [lossy] skipping 1353
56469 Sep 22 23:18:01.100 WARN 1354 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56470 Sep 22 23:18:01.100 WARN 1355 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56471 Sep 22 23:18:01.100 INFO [lossy] skipping 1356
56472 Sep 22 23:18:01.100 INFO [lossy] skipping 1342
56473 Sep 22 23:18:01.129 DEBG Write :1343 deps:[JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56474 Sep 22 23:18:01.130 INFO [lossy] skipping 1346
56475 Sep 22 23:18:01.159 DEBG Write :1342 deps:[JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56476 Sep 22 23:18:01.160 WARN 1346 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56477 Sep 22 23:18:01.168 DEBG Flush :1344 extent_limit None deps:[JobId(1343), JobId(1342), JobId(1341)] res:true f:148 g:1
56478 Sep 22 23:18:01.197 DEBG Write :1345 deps:[JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56479 Sep 22 23:18:01.228 DEBG Write :1346 deps:[JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56480 Sep 22 23:18:01.237 DEBG Flush :1347 extent_limit None deps:[JobId(1346), JobId(1345), JobId(1344)] res:true f:149 g:1
56481 Sep 22 23:18:01.267 DEBG Write :1348 deps:[JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56482 Sep 22 23:18:01.297 DEBG Write :1349 deps:[JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56483 Sep 22 23:18:01.307 DEBG Flush :1350 extent_limit None deps:[JobId(1349), JobId(1348), JobId(1347)] res:true f:150 g:1
56484 Sep 22 23:18:01.307 INFO [lossy] skipping 1351
56485 Sep 22 23:18:01.336 DEBG Write :1352 deps:[JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56486 Sep 22 23:18:01.338 WARN 1353 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56487 Sep 22 23:18:01.338 WARN 1354 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56488 Sep 22 23:18:01.338 WARN 1355 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56489 Sep 22 23:18:01.338 WARN 1357 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56490 Sep 22 23:18:01.367 DEBG Write :1351 deps:[JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56491 Sep 22 23:18:01.376 DEBG Flush :1353 extent_limit None deps:[JobId(1352), JobId(1351), JobId(1350)] res:true f:151 g:1
56492 Sep 22 23:18:01.406 DEBG Write :1354 deps:[JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56493 Sep 22 23:18:01.437 DEBG Write :1355 deps:[JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56494 Sep 22 23:18:01.438 INFO [lossy] skipping 1356
56495 Sep 22 23:18:01.438 WARN 1357 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56496 Sep 22 23:18:01.446 DEBG Flush :1356 extent_limit None deps:[JobId(1355), JobId(1354), JobId(1353)] res:true f:152 g:1
56497 Sep 22 23:18:01.476 DEBG Write :1357 deps:[JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56498 Sep 22 23:18:01.477 INFO [lossy] sleeping 1 second
56499 Sep 22 23:18:02.511 INFO [lossy] skipping 1358
56500 Sep 22 23:18:02.511 INFO [lossy] skipping 1362
56501 Sep 22 23:18:02.511 INFO [lossy] skipping 1366
56502 Sep 22 23:18:02.511 INFO [lossy] skipping 1373
56503 Sep 22 23:18:02.511 INFO [lossy] skipping 1374
56504 Sep 22 23:18:02.511 INFO [lossy] skipping 1376
56505 Sep 22 23:18:02.511 INFO [lossy] skipping 1380
56506 Sep 22 23:18:02.542 DEBG Write :1358 deps:[JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56507 Sep 22 23:18:02.543 INFO [lossy] skipping 1362
56508 Sep 22 23:18:02.543 INFO [lossy] skipping 1366
56509 Sep 22 23:18:02.543 INFO [lossy] skipping 1374
56510 Sep 22 23:18:02.543 INFO [lossy] skipping 1366
56511 Sep 22 23:18:02.543 INFO [lossy] skipping 1359
56512 Sep 22 23:18:02.543 INFO [lossy] skipping 1367
56513 Sep 22 23:18:02.544 INFO [lossy] skipping 1371
56514 Sep 22 23:18:02.544 INFO [lossy] skipping 1372
56515 Sep 22 23:18:02.544 INFO [lossy] skipping 1375
56516 Sep 22 23:18:02.544 INFO [lossy] skipping 1376
56517 Sep 22 23:18:02.544 INFO [lossy] skipping 1377
56518 Sep 22 23:18:02.552 DEBG Flush :1359 extent_limit None deps:[JobId(1358), JobId(1357), JobId(1356)] res:true f:153 g:1
56519 Sep 22 23:18:02.552 INFO [lossy] skipping 1367
56520 Sep 22 23:18:02.552 INFO [lossy] skipping 1372
56521 Sep 22 23:18:02.552 WARN 1375 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56522 Sep 22 23:18:02.552 INFO [lossy] skipping 1376
56523 Sep 22 23:18:02.552 INFO [lossy] skipping 1377
56524 Sep 22 23:18:02.552 WARN 1367 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56525 Sep 22 23:18:02.552 WARN 1372 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56526 Sep 22 23:18:02.552 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56527 Sep 22 23:18:02.552 INFO [lossy] skipping 1377
56528 Sep 22 23:18:02.581 DEBG Write :1360 deps:[JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56529 Sep 22 23:18:02.612 DEBG Write :1361 deps:[JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56530 Sep 22 23:18:02.622 DEBG Flush :1362 extent_limit None deps:[JobId(1361), JobId(1360), JobId(1359)] res:true f:154 g:1
56531 Sep 22 23:18:02.623 INFO [lossy] skipping 1363
56532 Sep 22 23:18:02.624 WARN returning error on write!
56533 Sep 22 23:18:02.624 DEBG Write :1364 deps:[JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56534 Sep 22 23:18:02.624 WARN 1365 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56535 Sep 22 23:18:02.624 WARN 1366 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56536 Sep 22 23:18:02.624 WARN 1367 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56537 Sep 22 23:18:02.624 INFO [lossy] skipping 1368
56538 Sep 22 23:18:02.624 WARN 1369 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56539 Sep 22 23:18:02.624 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56540 Sep 22 23:18:02.625 INFO [lossy] skipping 1371
56541 Sep 22 23:18:02.625 WARN 1372 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56542 Sep 22 23:18:02.625 INFO [lossy] skipping 1373
56543 Sep 22 23:18:02.625 WARN 1375 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56544 Sep 22 23:18:02.625 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56545 Sep 22 23:18:02.625 INFO [lossy] skipping 1377
56546 Sep 22 23:18:02.625 WARN 1378 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56547 Sep 22 23:18:02.625 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56548 Sep 22 23:18:02.655 DEBG Write :1363 deps:[JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56549 Sep 22 23:18:02.686 DEBG Write :1364 deps:[JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56550 Sep 22 23:18:02.688 INFO [lossy] skipping 1371
56551 Sep 22 23:18:02.688 INFO [lossy] skipping 1373
56552 Sep 22 23:18:02.688 INFO [lossy] skipping 1377
56553 Sep 22 23:18:02.688 INFO [lossy] skipping 1373
56554 Sep 22 23:18:02.688 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56555 Sep 22 23:18:02.696 DEBG Flush :1365 extent_limit None deps:[JobId(1364), JobId(1363), JobId(1362)] res:true f:155 g:1
56556 Sep 22 23:18:02.697 WARN returning error on write!
56557 Sep 22 23:18:02.697 DEBG Write :1366 deps:[JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56558 Sep 22 23:18:02.698 INFO [lossy] skipping 1367
56559 Sep 22 23:18:02.698 INFO [lossy] skipping 1368
56560 Sep 22 23:18:02.698 WARN 1369 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56561 Sep 22 23:18:02.698 WARN 1370 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56562 Sep 22 23:18:02.698 INFO [lossy] skipping 1371
56563 Sep 22 23:18:02.698 WARN 1372 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56564 Sep 22 23:18:02.698 INFO [lossy] skipping 1373
56565 Sep 22 23:18:02.698 WARN 1375 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56566 Sep 22 23:18:02.698 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56567 Sep 22 23:18:02.698 WARN 1378 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56568 Sep 22 23:18:02.698 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56569 Sep 22 23:18:02.727 DEBG Write :1366 deps:[JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56570 Sep 22 23:18:02.758 DEBG Write :1367 deps:[JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56571 Sep 22 23:18:02.768 DEBG Flush :1368 extent_limit None deps:[JobId(1367), JobId(1366), JobId(1365)] res:true f:156 g:1
56572 Sep 22 23:18:02.768 WARN 1371 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56573 Sep 22 23:18:02.768 WARN 1373 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56574 Sep 22 23:18:02.769 WARN returning error on write!
56575 Sep 22 23:18:02.769 DEBG Write :1369 deps:[JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56576 Sep 22 23:18:02.770 INFO [lossy] skipping 1370
56577 Sep 22 23:18:02.770 WARN 1372 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56578 Sep 22 23:18:02.770 INFO [lossy] skipping 1375
56579 Sep 22 23:18:02.770 WARN 1376 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56580 Sep 22 23:18:02.770 WARN 1378 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56581 Sep 22 23:18:02.770 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56582 Sep 22 23:18:02.770 INFO [lossy] skipping 1369
56583 Sep 22 23:18:02.800 DEBG Write :1370 deps:[JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56584 Sep 22 23:18:02.802 WARN 1375 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56585 Sep 22 23:18:02.802 INFO [lossy] skipping 1369
56586 Sep 22 23:18:02.802 INFO [lossy] skipping 1369
56587 Sep 22 23:18:02.802 INFO [lossy] skipping 1369
56588 Sep 22 23:18:02.802 INFO [lossy] skipping 1369
56589 Sep 22 23:18:02.831 DEBG Write :1369 deps:[JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56590 Sep 22 23:18:02.841 DEBG Flush :1371 extent_limit None deps:[JobId(1370), JobId(1369), JobId(1368)] res:true f:157 g:1
56591 Sep 22 23:18:02.871 DEBG Write :1372 deps:[JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56592 Sep 22 23:18:02.902 DEBG Write :1373 deps:[JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56593 Sep 22 23:18:02.912 DEBG Flush :1374 extent_limit None deps:[JobId(1373), JobId(1372), JobId(1371)] res:true f:158 g:1
56594 Sep 22 23:18:02.912 INFO [lossy] skipping 1375
56595 Sep 22 23:18:02.912 INFO [lossy] skipping 1376
56596 Sep 22 23:18:02.912 WARN 1377 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56597 Sep 22 23:18:02.912 WARN 1378 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56598 Sep 22 23:18:02.912 WARN 1379 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56599 Sep 22 23:18:02.912 INFO [lossy] skipping 1380
56600 Sep 22 23:18:02.913 INFO [lossy] skipping 1375
56601 Sep 22 23:18:02.942 DEBG Write :1376 deps:[JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56602 Sep 22 23:18:02.944 INFO [lossy] skipping 1375
56603 Sep 22 23:18:02.944 INFO [lossy] skipping 1375
56604 Sep 22 23:18:02.974 DEBG Write :1375 deps:[JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56605 Sep 22 23:18:02.975 INFO [lossy] sleeping 1 second
56606 Sep 22 23:18:03.921 ERRO [0] job id 1341 saw error GenericError("test error")
56607 Sep 22 23:18:03.921 ERRO [0] job id 1364 saw error GenericError("test error")
56608 Sep 22 23:18:03.921 ERRO [0] job id 1366 saw error GenericError("test error")
56609 Sep 22 23:18:03.921 ERRO [0] job id 1369 saw error GenericError("test error")
56610 Sep 22 23:18:03.977 INFO [lossy] skipping 1377
56611 Sep 22 23:18:03.977 INFO [lossy] skipping 1378
56612 Sep 22 23:18:03.977 INFO [lossy] skipping 1380
56613 Sep 22 23:18:03.977 INFO [lossy] skipping 1381
56614 Sep 22 23:18:03.977 INFO [lossy] skipping 1384
56615 Sep 22 23:18:03.977 INFO [lossy] skipping 1391
56616 Sep 22 23:18:03.977 INFO [lossy] skipping 1399
56617 Sep 22 23:18:03.977 INFO [lossy] skipping 1402
56618 Sep 22 23:18:03.986 DEBG Flush :1377 extent_limit None deps:[JobId(1376), JobId(1375), JobId(1374)] res:true f:159 g:1
56619 Sep 22 23:18:03.986 INFO [lossy] skipping 1378
56620 Sep 22 23:18:03.986 WARN 1380 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56621 Sep 22 23:18:03.986 INFO [lossy] skipping 1381
56622 Sep 22 23:18:03.986 INFO [lossy] skipping 1391
56623 Sep 22 23:18:04.016 DEBG Write :1378 deps:[JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56624 Sep 22 23:18:04.046 DEBG Write :1379 deps:[JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56625 Sep 22 23:18:04.047 WARN returning error on flush!
56626 Sep 22 23:18:04.047 DEBG Flush :1380 extent_limit None deps:[JobId(1379), JobId(1378), JobId(1377)] res:false f:160 g:1
56627 Sep 22 23:18:04.047 INFO [lossy] skipping 1382
56628 Sep 22 23:18:04.047 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56629 Sep 22 23:18:04.047 WARN 1387 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56630 Sep 22 23:18:04.048 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56631 Sep 22 23:18:04.048 WARN 1390 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56632 Sep 22 23:18:04.048 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56633 Sep 22 23:18:04.048 WARN 1393 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56634 Sep 22 23:18:04.048 INFO [lossy] skipping 1394
56635 Sep 22 23:18:04.048 INFO [lossy] skipping 1395
56636 Sep 22 23:18:04.048 WARN 1396 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
56637 Sep 22 23:18:04.048 INFO [lossy] skipping 1397
56638 Sep 22 23:18:04.048 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
56639 Sep 22 23:18:04.048 INFO [lossy] skipping 1400
56640 Sep 22 23:18:04.048 WARN 1401 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 8 deps, role: work
56641 Sep 22 23:18:04.048 WARN returning error on flush!
56642 Sep 22 23:18:04.048 DEBG Flush :1380 extent_limit None deps:[JobId(1379), JobId(1378), JobId(1377)] res:false f:160 g:1
56643 Sep 22 23:18:04.048 WARN 1382 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56644 Sep 22 23:18:04.048 WARN 1395 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
56645 Sep 22 23:18:04.048 INFO [lossy] skipping 1397
56646 Sep 22 23:18:04.048 INFO [lossy] skipping 1380
56647 Sep 22 23:18:04.048 INFO [lossy] skipping 1380
56648 Sep 22 23:18:04.056 DEBG Flush :1380 extent_limit None deps:[JobId(1379), JobId(1378), JobId(1377)] res:true f:160 g:1
56649 Sep 22 23:18:04.085 DEBG Write :1381 deps:[JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56650 Sep 22 23:18:04.086 INFO [lossy] skipping 1382
56651 Sep 22 23:18:04.086 WARN 1383 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56652 Sep 22 23:18:04.086 INFO [lossy] skipping 1384
56653 Sep 22 23:18:04.086 WARN 1385 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56654 Sep 22 23:18:04.086 WARN 1387 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56655 Sep 22 23:18:04.086 INFO [lossy] skipping 1388
56656 Sep 22 23:18:04.086 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56657 Sep 22 23:18:04.086 WARN 1390 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56658 Sep 22 23:18:04.086 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56659 Sep 22 23:18:04.086 WARN 1393 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56660 Sep 22 23:18:04.086 WARN 1395 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56661 Sep 22 23:18:04.086 WARN 1396 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56662 Sep 22 23:18:04.086 INFO [lossy] skipping 1397
56663 Sep 22 23:18:04.086 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
56664 Sep 22 23:18:04.086 INFO [lossy] skipping 1399
56665 Sep 22 23:18:04.087 INFO [lossy] skipping 1400
56666 Sep 22 23:18:04.087 WARN 1401 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 7 deps, role: work
56667 Sep 22 23:18:04.087 INFO [lossy] skipping 1402
56668 Sep 22 23:18:04.087 INFO [lossy] skipping 1403
56669 Sep 22 23:18:04.087 WARN returning error on write!
56670 Sep 22 23:18:04.087 DEBG Write :1382 deps:[JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56671 Sep 22 23:18:04.088 INFO [lossy] skipping 1384
56672 Sep 22 23:18:04.088 WARN 1399 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 6 deps, role: work
56673 Sep 22 23:18:04.088 INFO [lossy] skipping 1382
56674 Sep 22 23:18:04.088 WARN 1384 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56675 Sep 22 23:18:04.088 WARN returning error on write!
56676 Sep 22 23:18:04.088 DEBG Write :1382 deps:[JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56677 Sep 22 23:18:04.089 INFO [lossy] skipping 1382
56678 Sep 22 23:18:04.118 DEBG Write :1382 deps:[JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56679 Sep 22 23:18:04.119 WARN returning error on flush!
56680 Sep 22 23:18:04.119 DEBG Flush :1383 extent_limit None deps:[JobId(1382), JobId(1381), JobId(1380)] res:false f:161 g:1
56681 Sep 22 23:18:04.119 INFO [lossy] skipping 1400
56682 Sep 22 23:18:04.119 INFO [lossy] skipping 1383
56683 Sep 22 23:18:04.127 DEBG Flush :1383 extent_limit None deps:[JobId(1382), JobId(1381), JobId(1380)] res:true f:161 g:1
56684 Sep 22 23:18:04.157 DEBG Write :1384 deps:[JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56685 Sep 22 23:18:04.188 DEBG Write :1385 deps:[JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56686 Sep 22 23:18:04.198 DEBG Flush :1386 extent_limit None deps:[JobId(1385), JobId(1384), JobId(1383)] res:true f:162 g:1
56687 Sep 22 23:18:04.198 INFO [lossy] skipping 1387
56688 Sep 22 23:18:04.198 WARN 1388 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56689 Sep 22 23:18:04.198 INFO [lossy] skipping 1389
56690 Sep 22 23:18:04.198 INFO [lossy] skipping 1390
56691 Sep 22 23:18:04.198 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56692 Sep 22 23:18:04.198 WARN 1393 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56693 Sep 22 23:18:04.198 WARN 1395 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56694 Sep 22 23:18:04.198 WARN 1396 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56695 Sep 22 23:18:04.198 INFO [lossy] skipping 1398
56696 Sep 22 23:18:04.198 WARN 1399 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56697 Sep 22 23:18:04.198 WARN 1401 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 5 deps, role: work
56698 Sep 22 23:18:04.199 WARN returning error on write!
56699 Sep 22 23:18:04.199 DEBG Write :1387 deps:[JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:false
56700 Sep 22 23:18:04.199 INFO [lossy] skipping 1389
56701 Sep 22 23:18:04.200 WARN 1390 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56702 Sep 22 23:18:04.200 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56703 Sep 22 23:18:04.229 DEBG Write :1387 deps:[JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56704 Sep 22 23:18:04.230 WARN 1389 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56705 Sep 22 23:18:04.230 INFO [lossy] skipping 1388
56706 Sep 22 23:18:04.230 INFO [lossy] skipping 1389
56707 Sep 22 23:18:04.230 INFO [lossy] skipping 1391
56708 Sep 22 23:18:04.230 INFO [lossy] skipping 1392
56709 Sep 22 23:18:04.230 INFO [lossy] skipping 1393
56710 Sep 22 23:18:04.230 INFO [lossy] skipping 1395
56711 Sep 22 23:18:04.230 INFO [lossy] skipping 1388
56712 Sep 22 23:18:04.234 DEBG Flush :1388 extent_limit None deps:[JobId(1387), JobId(1386)] res:true f:163 g:1
56713 Sep 22 23:18:04.264 DEBG Write :1389 deps:[JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56714 Sep 22 23:18:04.265 INFO [lossy] skipping 1390
56715 Sep 22 23:18:04.265 WARN 1391 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56716 Sep 22 23:18:04.265 WARN 1392 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56717 Sep 22 23:18:04.265 WARN 1393 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56718 Sep 22 23:18:04.265 WARN 1395 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56719 Sep 22 23:18:04.265 INFO [lossy] skipping 1396
56720 Sep 22 23:18:04.265 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56721 Sep 22 23:18:04.265 WARN 1399 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 3 deps, role: work
56722 Sep 22 23:18:04.265 INFO [lossy] skipping 1400
56723 Sep 22 23:18:04.265 WARN 1401 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 4 deps, role: work
56724 Sep 22 23:18:04.265 INFO [lossy] skipping 1402
56725 Sep 22 23:18:04.265 INFO [lossy] skipping 1390
56726 Sep 22 23:18:04.265 WARN 1396 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56727 Sep 22 23:18:04.265 INFO [lossy] skipping 1402
56728 Sep 22 23:18:04.265 INFO [lossy] skipping 1390
56729 Sep 22 23:18:04.295 DEBG Write :1390 deps:[JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56730 Sep 22 23:18:04.296 INFO [lossy] sleeping 1 second
56731 Sep 22 23:18:04.296 ERRO [0] job id 1380 saw error GenericError("test error")
56732 Sep 22 23:18:04.296 ERRO [0] job id 1380 saw error GenericError("test error")
56733 Sep 22 23:18:04.296 ERRO [0] job id 1382 saw error GenericError("test error")
56734 Sep 22 23:18:04.296 ERRO [0] job id 1382 saw error GenericError("test error")
56735 Sep 22 23:18:04.296 ERRO [0] job id 1383 saw error GenericError("test error")
56736 Sep 22 23:18:04.296 ERRO [0] job id 1387 saw error GenericError("test error")
56737 Sep 22 23:18:05.296 WARN returning error on flush!
56738 Sep 22 23:18:05.296 DEBG Flush :1391 extent_limit None deps:[JobId(1390), JobId(1389), JobId(1388)] res:false f:164 g:1
56739 Sep 22 23:18:05.296 INFO [lossy] skipping 1392
56740 Sep 22 23:18:05.296 INFO [lossy] skipping 1394
56741 Sep 22 23:18:05.297 INFO [lossy] skipping 1402
56742 Sep 22 23:18:05.297 INFO [lossy] skipping 1403
56743 Sep 22 23:18:05.305 DEBG Flush :1391 extent_limit None deps:[JobId(1390), JobId(1389), JobId(1388)] res:true f:164 g:1
56744 Sep 22 23:18:05.335 DEBG Write :1392 deps:[JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56745 Sep 22 23:18:05.336 WARN 1394 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56746 Sep 22 23:18:05.336 INFO [lossy] skipping 1402
56747 Sep 22 23:18:05.336 INFO [lossy] skipping 1402
56748 Sep 22 23:18:05.336 INFO [lossy] sleeping 1 second
56749 Sep 22 23:18:05.336 ERRO [0] job id 1391 saw error GenericError("test error")
56750 Sep 22 23:18:06.338 INFO [lossy] skipping 1393
56751 Sep 22 23:18:06.338 INFO [lossy] skipping 1394
56752 Sep 22 23:18:06.338 WARN 1395 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56753 Sep 22 23:18:06.338 WARN 1396 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56754 Sep 22 23:18:06.338 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56755 Sep 22 23:18:06.338 WARN 1399 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56756 Sep 22 23:18:06.338 INFO [lossy] skipping 1401
56757 Sep 22 23:18:06.338 INFO [lossy] skipping 1403
56758 Sep 22 23:18:06.369 DEBG Write :1393 deps:[JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56759 Sep 22 23:18:06.379 DEBG Flush :1394 extent_limit None deps:[JobId(1393), JobId(1392), JobId(1391)] res:true f:165 g:1
56760 Sep 22 23:18:06.379 WARN 1401 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 2 deps, role: work
56761 Sep 22 23:18:06.379 INFO [lossy] skipping 1403
56762 Sep 22 23:18:06.408 DEBG Write :1395 deps:[JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56763 Sep 22 23:18:06.409 INFO [lossy] skipping 1396
56764 Sep 22 23:18:06.409 INFO [lossy] skipping 1397
56765 Sep 22 23:18:06.409 WARN 1398 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56766 Sep 22 23:18:06.409 WARN 1399 job Write for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
56767 Sep 22 23:18:06.439 DEBG Write :1396 deps:[JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56768 Sep 22 23:18:06.448 DEBG Flush :1397 extent_limit None deps:[JobId(1396), JobId(1395), JobId(1394)] res:true f:166 g:1
56769 Sep 22 23:18:06.478 DEBG Write :1398 deps:[JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328)] res:true
56770 Sep 22 23:18:06.509 DEBG Write :1399 deps:[JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1328), JobId(1327)] res:true
56771 Sep 22 23:18:06.518 DEBG Flush :1400 extent_limit None deps:[JobId(1399), JobId(1398), JobId(1397)] res:true f:167 g:1
56772 Sep 22 23:18:06.547 DEBG Write :1401 deps:[JobId(1400), JobId(1397), JobId(1394), JobId(1391), JobId(1388), JobId(1386), JobId(1383), JobId(1380), JobId(1377), JobId(1374), JobId(1371), JobId(1368), JobId(1365), JobId(1362), JobId(1359), JobId(1356), JobId(1353), JobId(1350), JobId(1347), JobId(1344), JobId(1341), JobId(1338), JobId(1335), JobId(1332), JobId(1330), JobId(1329), JobId(1328)] res:true
56773 Sep 22 23:18:06.555 DEBG Read :1402 deps:[JobId(1400)] res:true
56774 Sep 22 23:18:06.578 WARN returning error on flush!
56775 Sep 22 23:18:06.578 DEBG Flush :1403 extent_limit None deps:[JobId(1402), JobId(1401), JobId(1400)] res:false f:168 g:1
56776 Sep 22 23:18:06.578 INFO [lossy] skipping 1403
56777 Sep 22 23:18:06.578 INFO [lossy] skipping 1403
56778 Sep 22 23:18:06.578 INFO [lossy] skipping 1403
56779 Sep 22 23:18:06.582 DEBG Flush :1403 extent_limit None deps:[JobId(1402), JobId(1401), JobId(1400)] res:true f:168 g:1
56780 Sep 22 23:18:06.582 INFO [lossy] sleeping 1 second
56781 Sep 22 23:18:06.958 DEBG [0] Read AckReady 1402, : downstairs
56782 Sep 22 23:18:06.959 ERRO [0] job id 1403 saw error GenericError("test error")
56783 Sep 22 23:18:06.959 DEBG up_ds_listen was notified
56784 Sep 22 23:18:06.959 DEBG up_ds_listen process 1402
56785 Sep 22 23:18:06.959 DEBG [A] ack job 1402:403, : downstairs
56786 Sep 22 23:18:07.012 DEBG up_ds_listen checked 1 jobs, back to waiting
56787 Sep 22 23:18:07.013 DEBG IO Read 1404 has deps [JobId(1403)]
56788 Sep 22 23:18:07.515 DEBG IO Flush 1405 has deps [JobId(1404), JobId(1403)]
56789 Sep 22 23:18:07.582 INFO [lossy] skipping 1404
56790 Sep 22 23:18:07.589 DEBG Read :1404 deps:[JobId(1403)] res:true
56791 Sep 22 23:18:07.612 INFO [lossy] sleeping 1 second
56792 Sep 22 23:18:07.989 DEBG [0] Read AckReady 1404, : downstairs
56793 Sep 22 23:18:07.990 DEBG up_ds_listen was notified
56794 Sep 22 23:18:07.990 DEBG up_ds_listen process 1404
56795 Sep 22 23:18:07.990 DEBG [A] ack job 1404:405, : downstairs
56796 Sep 22 23:18:08.044 DEBG up_ds_listen checked 1 jobs, back to waiting
56797 Sep 22 23:18:08.045 DEBG IO Read 1406 has deps [JobId(1405)]
56798 Sep 22 23:18:08.545 DEBG IO Flush 1407 has deps [JobId(1406), JobId(1405)]
56799 Sep 22 23:18:08.612 DEBG Flush :1405 extent_limit None deps:[JobId(1404), JobId(1403)] res:true f:169 g:1
56800 Sep 22 23:18:08.612 INFO [lossy] skipping 1406
56801 Sep 22 23:18:08.612 INFO [lossy] skipping 1406
56802 Sep 22 23:18:08.619 DEBG Read :1406 deps:[JobId(1405)] res:true
56803 Sep 22 23:18:08.642 INFO [lossy] skipping 1407
56804 Sep 22 23:18:08.642 DEBG Flush :1407 extent_limit None deps:[JobId(1406), JobId(1405)] res:true f:170 g:1
56805 Sep 22 23:18:08.642 INFO [lossy] sleeping 1 second
56806 Sep 22 23:18:09.019 DEBG [0] Read AckReady 1406, : downstairs
56807 Sep 22 23:18:09.020 DEBG up_ds_listen was notified
56808 Sep 22 23:18:09.020 DEBG up_ds_listen process 1406
56809 Sep 22 23:18:09.020 DEBG [A] ack job 1406:407, : downstairs
56810 Sep 22 23:18:09.073 DEBG up_ds_listen checked 1 jobs, back to waiting
56811 Sep 22 23:18:09.074 DEBG IO Read 1408 has deps [JobId(1407)]
56812 Sep 22 23:18:09.575 DEBG IO Flush 1409 has deps [JobId(1408), JobId(1407)]
56813 Sep 22 23:18:09.643 INFO [lossy] skipping 1408
56814 Sep 22 23:18:09.643 INFO [lossy] skipping 1409
56815 Sep 22 23:18:09.650 DEBG Read :1408 deps:[JobId(1407)] res:true
56816 Sep 22 23:18:09.672 DEBG Flush :1409 extent_limit None deps:[JobId(1408), JobId(1407)] res:true f:171 g:1
56817 Sep 22 23:18:09.672 INFO [lossy] sleeping 1 second
56818 Sep 22 23:18:10.049 DEBG [0] Read AckReady 1408, : downstairs
56819 Sep 22 23:18:10.050 DEBG up_ds_listen was notified
56820 Sep 22 23:18:10.050 DEBG up_ds_listen process 1408
56821 Sep 22 23:18:10.050 DEBG [A] ack job 1408:409, : downstairs
56822 Sep 22 23:18:10.102 DEBG up_ds_listen checked 1 jobs, back to waiting
56823 Sep 22 23:18:10.104 DEBG IO Read 1410 has deps [JobId(1409)]
56824 Sep 22 23:18:10.604 DEBG IO Flush 1411 has deps [JobId(1410), JobId(1409)]
56825 Sep 22 23:18:10.674 WARN returning error on read!
56826 Sep 22 23:18:10.674 DEBG Read :1410 deps:[JobId(1409)] res:false
56827 Sep 22 23:18:10.674 WARN returning error on read!
56828 Sep 22 23:18:10.674 DEBG Read :1410 deps:[JobId(1409)] res:false
56829 Sep 22 23:18:10.674 INFO [lossy] skipping 1410
56830 Sep 22 23:18:10.674 INFO [lossy] skipping 1410
56831 Sep 22 23:18:10.674 INFO [lossy] skipping 1410
56832 Sep 22 23:18:10.674 INFO [lossy] skipping 1410
56833 Sep 22 23:18:10.681 DEBG Read :1410 deps:[JobId(1409)] res:true
56834 Sep 22 23:18:10.702 ERRO [0] job id 1410 saw error GenericError("test error")
56835 Sep 22 23:18:10.702 ERRO [0] job id 1410 saw error GenericError("test error")
56836 Sep 22 23:18:10.704 INFO [lossy] skipping 1411
56837 Sep 22 23:18:10.704 WARN returning error on flush!
56838 Sep 22 23:18:10.704 DEBG Flush :1411 extent_limit None deps:[JobId(1410), JobId(1409)] res:false f:172 g:1
56839 Sep 22 23:18:10.704 DEBG Flush :1411 extent_limit None deps:[JobId(1410), JobId(1409)] res:true f:172 g:1
56840 Sep 22 23:18:10.704 INFO [lossy] sleeping 1 second
56841 Sep 22 23:18:11.083 DEBG [0] Read AckReady 1410, : downstairs
56842 Sep 22 23:18:11.084 ERRO [0] job id 1411 saw error GenericError("test error")
56843 Sep 22 23:18:11.084 DEBG up_ds_listen was notified
56844 Sep 22 23:18:11.084 DEBG up_ds_listen process 1410
56845 Sep 22 23:18:11.084 DEBG [A] ack job 1410:411, : downstairs
56846 Sep 22 23:18:11.136 DEBG up_ds_listen checked 1 jobs, back to waiting
56847 Sep 22 23:18:11.138 DEBG IO Read 1412 has deps [JobId(1411)]
56848 Sep 22 23:18:11.638 DEBG IO Flush 1413 has deps [JobId(1412), JobId(1411)]
56849 Sep 22 23:18:11.712 DEBG Read :1412 deps:[JobId(1411)] res:true
56850 Sep 22 23:18:11.735 WARN returning error on flush!
56851 Sep 22 23:18:11.735 DEBG Flush :1413 extent_limit None deps:[JobId(1412), JobId(1411)] res:false f:173 g:1
56852 Sep 22 23:18:11.735 DEBG Flush :1413 extent_limit None deps:[JobId(1412), JobId(1411)] res:true f:173 g:1
56853 Sep 22 23:18:11.735 INFO [lossy] sleeping 1 second
56854 Sep 22 23:18:12.111 DEBG [0] Read AckReady 1412, : downstairs
56855 Sep 22 23:18:12.112 ERRO [0] job id 1413 saw error GenericError("test error")
56856 Sep 22 23:18:12.112 DEBG up_ds_listen was notified
56857 Sep 22 23:18:12.112 DEBG up_ds_listen process 1412
56858 Sep 22 23:18:12.112 DEBG [A] ack job 1412:413, : downstairs
56859 Sep 22 23:18:12.164 DEBG up_ds_listen checked 1 jobs, back to waiting
56860 Sep 22 23:18:12.166 DEBG IO Read 1414 has deps [JobId(1413)]
56861 Sep 22 23:18:12.666 DEBG IO Flush 1415 has deps [JobId(1414), JobId(1413)]
56862 Sep 22 23:18:12.743 DEBG Read :1414 deps:[JobId(1413)] res:true
56863 Sep 22 23:18:12.766 DEBG Flush :1415 extent_limit None deps:[JobId(1414), JobId(1413)] res:true f:174 g:1
56864 Sep 22 23:18:12.766 INFO [lossy] sleeping 1 second
56865 Sep 22 23:18:13.143 DEBG [0] Read AckReady 1414, : downstairs
56866 Sep 22 23:18:13.144 DEBG up_ds_listen was notified
56867 Sep 22 23:18:13.144 DEBG up_ds_listen process 1414
56868 Sep 22 23:18:13.144 DEBG [A] ack job 1414:415, : downstairs
56869 Sep 22 23:18:13.197 DEBG up_ds_listen checked 1 jobs, back to waiting
56870 Sep 22 23:18:13.198 DEBG IO Read 1416 has deps [JobId(1415)]
56871 Sep 22 23:18:13.700 DEBG IO Flush 1417 has deps [JobId(1416), JobId(1415)]
56872 Sep 22 23:18:13.773 DEBG Read :1416 deps:[JobId(1415)] res:true
56873 Sep 22 23:18:13.796 DEBG Flush :1417 extent_limit None deps:[JobId(1416), JobId(1415)] res:true f:175 g:1
56874 Sep 22 23:18:13.796 INFO [lossy] sleeping 1 second
56875 Sep 22 23:18:14.172 DEBG [0] Read AckReady 1416, : downstairs
56876 Sep 22 23:18:14.173 DEBG up_ds_listen was notified
56877 Sep 22 23:18:14.173 DEBG up_ds_listen process 1416
56878 Sep 22 23:18:14.173 DEBG [A] ack job 1416:417, : downstairs
56879 Sep 22 23:18:14.226 DEBG up_ds_listen checked 1 jobs, back to waiting
56880 Sep 22 23:18:14.227 DEBG IO Read 1418 has deps [JobId(1417)]
56881 Sep 22 23:18:14.728 DEBG IO Flush 1419 has deps [JobId(1418), JobId(1417)]
56882 Sep 22 23:18:14.798 WARN returning error on read!
56883 Sep 22 23:18:14.798 DEBG Read :1418 deps:[JobId(1417)] res:false
56884 Sep 22 23:18:14.798 WARN returning error on read!
56885 Sep 22 23:18:14.798 DEBG Read :1418 deps:[JobId(1417)] res:false
56886 Sep 22 23:18:14.804 DEBG Read :1418 deps:[JobId(1417)] res:true
56887 Sep 22 23:18:14.826 ERRO [0] job id 1418 saw error GenericError("test error")
56888 Sep 22 23:18:14.826 ERRO [0] job id 1418 saw error GenericError("test error")
56889 Sep 22 23:18:14.828 DEBG Flush :1419 extent_limit None deps:[JobId(1418), JobId(1417)] res:true f:176 g:1
56890 Sep 22 23:18:14.828 INFO [lossy] sleeping 1 second
56891 Sep 22 23:18:15.207 DEBG [0] Read AckReady 1418, : downstairs
56892 Sep 22 23:18:15.208 DEBG up_ds_listen was notified
56893 Sep 22 23:18:15.208 DEBG up_ds_listen process 1418
56894 Sep 22 23:18:15.208 DEBG [A] ack job 1418:419, : downstairs
56895 Sep 22 23:18:15.260 DEBG up_ds_listen checked 1 jobs, back to waiting
56896 Sep 22 23:18:15.262 DEBG IO Read 1420 has deps [JobId(1419)]
56897 Sep 22 23:18:15.762 DEBG IO Flush 1421 has deps [JobId(1420), JobId(1419)]
56898 Sep 22 23:18:15.829 INFO [lossy] skipping 1420
56899 Sep 22 23:18:15.835 DEBG Read :1420 deps:[JobId(1419)] res:true
56900 Sep 22 23:18:15.858 INFO [lossy] sleeping 1 second
56901 Sep 22 23:18:16.234 DEBG [0] Read AckReady 1420, : downstairs
56902 Sep 22 23:18:16.235 DEBG up_ds_listen was notified
56903 Sep 22 23:18:16.235 DEBG up_ds_listen process 1420
56904 Sep 22 23:18:16.235 DEBG [A] ack job 1420:421, : downstairs
56905 Sep 22 23:18:16.287 DEBG up_ds_listen checked 1 jobs, back to waiting
56906 Sep 22 23:18:16.289 DEBG IO Read 1422 has deps [JobId(1421)]
56907 Sep 22 23:18:16.789 DEBG IO Flush 1423 has deps [JobId(1422), JobId(1421)]
56908 Sep 22 23:18:16.860 INFO [lossy] skipping 1421
56909 Sep 22 23:18:16.860 DEBG Flush :1421 extent_limit None deps:[JobId(1420), JobId(1419)] res:true f:177 g:1
56910 Sep 22 23:18:16.866 DEBG Read :1422 deps:[JobId(1421)] res:true
56911 Sep 22 23:18:16.889 DEBG Flush :1423 extent_limit None deps:[JobId(1422), JobId(1421)] res:true f:178 g:1
56912 Sep 22 23:18:16.889 INFO [lossy] sleeping 1 second
56913 Sep 22 23:18:17.265 DEBG [0] Read AckReady 1422, : downstairs
56914 Sep 22 23:18:17.266 DEBG up_ds_listen was notified
56915 Sep 22 23:18:17.266 DEBG up_ds_listen process 1422
56916 Sep 22 23:18:17.266 DEBG [A] ack job 1422:423, : downstairs
56917 Sep 22 23:18:17.319 DEBG up_ds_listen checked 1 jobs, back to waiting
56918 Sep 22 23:18:17.320 DEBG IO Read 1424 has deps [JobId(1423)]
56919 Sep 22 23:18:17.821 DEBG IO Flush 1425 has deps [JobId(1424), JobId(1423)]
56920 Sep 22 23:18:17.896 DEBG Read :1424 deps:[JobId(1423)] res:true
56921 Sep 22 23:18:17.919 WARN returning error on flush!
56922 Sep 22 23:18:17.919 DEBG Flush :1425 extent_limit None deps:[JobId(1424), JobId(1423)] res:false f:179 g:1
56923 Sep 22 23:18:17.919 WARN returning error on flush!
56924 Sep 22 23:18:17.919 DEBG Flush :1425 extent_limit None deps:[JobId(1424), JobId(1423)] res:false f:179 g:1
56925 Sep 22 23:18:17.919 DEBG Flush :1425 extent_limit None deps:[JobId(1424), JobId(1423)] res:true f:179 g:1
56926 Sep 22 23:18:17.919 INFO [lossy] sleeping 1 second
56927 Sep 22 23:18:18.298 DEBG [0] Read AckReady 1424, : downstairs
56928 Sep 22 23:18:18.299 ERRO [0] job id 1425 saw error GenericError("test error")
56929 Sep 22 23:18:18.299 ERRO [0] job id 1425 saw error GenericError("test error")
56930 Sep 22 23:18:18.299 DEBG up_ds_listen was notified
56931 Sep 22 23:18:18.299 DEBG up_ds_listen process 1424
56932 Sep 22 23:18:18.300 DEBG [A] ack job 1424:425, : downstairs
56933 Sep 22 23:18:18.353 DEBG up_ds_listen checked 1 jobs, back to waiting
56934 Sep 22 23:18:18.354 DEBG IO Read 1426 has deps [JobId(1425)]
56935 Sep 22 23:18:18.855 DEBG IO Flush 1427 has deps [JobId(1426), JobId(1425)]
56936 Sep 22 23:18:18.926 DEBG Read :1426 deps:[JobId(1425)] res:true
56937 Sep 22 23:18:18.949 DEBG Flush :1427 extent_limit None deps:[JobId(1426), JobId(1425)] res:true f:180 g:1
56938 Sep 22 23:18:18.949 INFO [lossy] sleeping 1 second
56939 Sep 22 23:18:19.325 DEBG [0] Read AckReady 1426, : downstairs
56940 Sep 22 23:18:19.326 DEBG up_ds_listen was notified
56941 Sep 22 23:18:19.326 DEBG up_ds_listen process 1426
56942 Sep 22 23:18:19.326 DEBG [A] ack job 1426:427, : downstairs
56943 Sep 22 23:18:19.379 DEBG up_ds_listen checked 1 jobs, back to waiting
56944 Sep 22 23:18:19.380 DEBG IO Read 1428 has deps [JobId(1427)]
56945 Sep 22 23:18:19.881 DEBG IO Flush 1429 has deps [JobId(1428), JobId(1427)]
56946 Sep 22 23:18:19.950 INFO [lossy] skipping 1428
56947 Sep 22 23:18:19.950 INFO [lossy] skipping 1429
56948 Sep 22 23:18:19.950 INFO [lossy] skipping 1428
56949 Sep 22 23:18:19.950 INFO [lossy] skipping 1428
56950 Sep 22 23:18:19.957 DEBG Read :1428 deps:[JobId(1427)] res:true
56951 Sep 22 23:18:19.979 DEBG Flush :1429 extent_limit None deps:[JobId(1428), JobId(1427)] res:true f:181 g:1
56952 Sep 22 23:18:19.979 INFO [lossy] sleeping 1 second
56953 Sep 22 23:18:20.356 DEBG [0] Read AckReady 1428, : downstairs
56954 Sep 22 23:18:20.357 DEBG up_ds_listen was notified
56955 Sep 22 23:18:20.357 DEBG up_ds_listen process 1428
56956 Sep 22 23:18:20.357 DEBG [A] ack job 1428:429, : downstairs
56957 Sep 22 23:18:20.409 DEBG up_ds_listen checked 1 jobs, back to waiting
56958 Sep 22 23:18:20.410 DEBG IO Read 1430 has deps [JobId(1429)]
56959 Sep 22 23:18:20.911 DEBG IO Flush 1431 has deps [JobId(1430), JobId(1429)]
56960 Sep 22 23:18:20.981 INFO [lossy] skipping 1430
56961 Sep 22 23:18:20.981 INFO [lossy] skipping 1430
56962 Sep 22 23:18:20.981 INFO [lossy] skipping 1430
56963 Sep 22 23:18:20.981 INFO [lossy] skipping 1430
56964 Sep 22 23:18:20.988 DEBG Read :1430 deps:[JobId(1429)] res:true
56965 Sep 22 23:18:21.011 INFO [lossy] sleeping 1 second
56966 Sep 22 23:18:21.387 DEBG [0] Read AckReady 1430, : downstairs
56967 Sep 22 23:18:21.388 DEBG up_ds_listen was notified
56968 Sep 22 23:18:21.388 DEBG up_ds_listen process 1430
56969 Sep 22 23:18:21.388 DEBG [A] ack job 1430:431, : downstairs
56970 Sep 22 23:18:21.441 DEBG up_ds_listen checked 1 jobs, back to waiting
56971 Sep 22 23:18:21.442 DEBG IO Read 1432 has deps [JobId(1431)]
56972 Sep 22 23:18:21.943 DEBG IO Flush 1433 has deps [JobId(1432), JobId(1431)]
56973 Sep 22 23:18:22.012 DEBG Flush :1431 extent_limit None deps:[JobId(1430), JobId(1429)] res:true f:182 g:1
56974 Sep 22 23:18:22.018 DEBG Read :1432 deps:[JobId(1431)] res:true
56975 Sep 22 23:18:22.041 INFO [lossy] skipping 1433
56976 Sep 22 23:18:22.041 INFO [lossy] skipping 1433
56977 Sep 22 23:18:22.041 INFO [lossy] skipping 1433
56978 Sep 22 23:18:22.041 DEBG Flush :1433 extent_limit None deps:[JobId(1432), JobId(1431)] res:true f:183 g:1
56979 Sep 22 23:18:22.041 INFO [lossy] sleeping 1 second
56980 Sep 22 23:18:22.416 DEBG [0] Read AckReady 1432, : downstairs
56981 Sep 22 23:18:22.417 DEBG up_ds_listen was notified
56982 Sep 22 23:18:22.417 DEBG up_ds_listen process 1432
56983 Sep 22 23:18:22.417 DEBG [A] ack job 1432:433, : downstairs
56984 Sep 22 23:18:22.469 DEBG up_ds_listen checked 1 jobs, back to waiting
56985 Sep 22 23:18:22.471 DEBG IO Read 1434 has deps [JobId(1433)]
56986 Sep 22 23:18:22.972 DEBG IO Flush 1435 has deps [JobId(1434), JobId(1433)]
56987 Sep 22 23:18:23.048 DEBG Read :1434 deps:[JobId(1433)] res:true
56988 Sep 22 23:18:23.070 DEBG Flush :1435 extent_limit None deps:[JobId(1434), JobId(1433)] res:true f:184 g:1
56989 Sep 22 23:18:23.071 INFO [lossy] sleeping 1 second
56990 Sep 22 23:18:23.449 DEBG [0] Read AckReady 1434, : downstairs
56991 Sep 22 23:18:23.450 DEBG up_ds_listen was notified
56992 Sep 22 23:18:23.450 DEBG up_ds_listen process 1434
56993 Sep 22 23:18:23.450 DEBG [A] ack job 1434:435, : downstairs
56994 Sep 22 23:18:23.503 DEBG up_ds_listen checked 1 jobs, back to waiting
56995 Sep 22 23:18:23.504 DEBG IO Read 1436 has deps [JobId(1435)]
56996 Sep 22 23:18:24.006 DEBG IO Flush 1437 has deps [JobId(1436), JobId(1435)]
56997 Sep 22 23:18:24.078 DEBG Read :1436 deps:[JobId(1435)] res:true
56998 Sep 22 23:18:24.101 DEBG Flush :1437 extent_limit None deps:[JobId(1436), JobId(1435)] res:true f:185 g:1
56999 Sep 22 23:18:24.102 INFO [lossy] sleeping 1 second
57000 Sep 22 23:18:24.480 DEBG [0] Read AckReady 1436, : downstairs
57001 Sep 22 23:18:24.481 DEBG up_ds_listen was notified
57002 Sep 22 23:18:24.481 DEBG up_ds_listen process 1436
57003 Sep 22 23:18:24.481 DEBG [A] ack job 1436:437, : downstairs
57004 Sep 22 23:18:24.534 DEBG up_ds_listen checked 1 jobs, back to waiting
57005 Sep 22 23:18:24.535 DEBG IO Read 1438 has deps [JobId(1437)]
57006 Sep 22 23:18:25.036 DEBG IO Flush 1439 has deps [JobId(1438), JobId(1437)]
57007 Sep 22 23:18:25.109 DEBG Read :1438 deps:[JobId(1437)] res:true
57008 Sep 22 23:18:25.132 DEBG Flush :1439 extent_limit None deps:[JobId(1438), JobId(1437)] res:true f:186 g:1
57009 Sep 22 23:18:25.509 DEBG [0] Read AckReady 1438, : downstairs
57010 Sep 22 23:18:25.510 DEBG up_ds_listen was notified
57011 Sep 22 23:18:25.510 DEBG up_ds_listen process 1438
57012 Sep 22 23:18:25.510 DEBG [A] ack job 1438:439, : downstairs
57013 Sep 22 23:18:25.563 DEBG up_ds_listen checked 1 jobs, back to waiting
57014 Sep 22 23:18:25.565 DEBG IO Read 1440 has deps [JobId(1439)]
57015 Sep 22 23:18:25.569 WARN returning error on read!
57016 Sep 22 23:18:25.569 DEBG Read :1440 deps:[JobId(1439)] res:false
57017 Sep 22 23:18:25.569 INFO [lossy] skipping 1440
57018 Sep 22 23:18:25.575 DEBG Read :1440 deps:[JobId(1439)] res:true
57019 Sep 22 23:18:25.597 ERRO [0] job id 1440 saw error GenericError("test error")
57020 Sep 22 23:18:25.978 DEBG [0] Read AckReady 1440, : downstairs
57021 Sep 22 23:18:25.979 DEBG up_ds_listen was notified
57022 Sep 22 23:18:25.979 DEBG up_ds_listen process 1440
57023 Sep 22 23:18:25.979 DEBG [A] ack job 1440:441, : downstairs
57024 Sep 22 23:18:26.032 DEBG up_ds_listen checked 1 jobs, back to waiting
57025 Sep 22 23:18:26.034 DEBG IO Read 1441 has deps [JobId(1439)]
57026 Sep 22 23:18:26.044 DEBG Read :1441 deps:[JobId(1439)] res:true
57027 Sep 22 23:18:26.066 DEBG IO Flush 1442 has deps [JobId(1441), JobId(1440), JobId(1439)]
57028 Sep 22 23:18:26.068 INFO [lossy] sleeping 1 second
57029 Sep 22 23:18:26.445 DEBG [0] Read AckReady 1441, : downstairs
57030 Sep 22 23:18:26.446 DEBG up_ds_listen was notified
57031 Sep 22 23:18:26.446 DEBG up_ds_listen process 1441
57032 Sep 22 23:18:26.447 DEBG [A] ack job 1441:442, : downstairs
57033 Sep 22 23:18:26.499 DEBG up_ds_listen checked 1 jobs, back to waiting
57034 Sep 22 23:18:26.500 DEBG IO Read 1443 has deps [JobId(1442)]
57035 Sep 22 23:18:26.567 DEBG IO Flush 1444 has deps [JobId(1443), JobId(1442)]
57036 Sep 22 23:18:27.069 DEBG Flush :1442 extent_limit None deps:[JobId(1441), JobId(1440), JobId(1439)] res:true f:187 g:1
57037 Sep 22 23:18:27.075 DEBG Read :1443 deps:[JobId(1442)] res:true
57038 Sep 22 23:18:27.098 INFO [lossy] skipping 1444
57039 Sep 22 23:18:27.098 DEBG Flush :1444 extent_limit None deps:[JobId(1443), JobId(1442)] res:true f:188 g:1
57040 Sep 22 23:18:27.476 DEBG [0] Read AckReady 1443, : downstairs
57041 Sep 22 23:18:27.477 DEBG up_ds_listen was notified
57042 Sep 22 23:18:27.477 DEBG up_ds_listen process 1443
57043 Sep 22 23:18:27.477 DEBG [A] ack job 1443:444, : downstairs
57044 Sep 22 23:18:27.530 DEBG up_ds_listen checked 1 jobs, back to waiting
57045 Sep 22 23:18:27.531 DEBG IO Read 1445 has deps [JobId(1444)]
57046 Sep 22 23:18:27.542 DEBG Read :1445 deps:[JobId(1444)] res:true
57047 Sep 22 23:18:27.945 DEBG [0] Read AckReady 1445, : downstairs
57048 Sep 22 23:18:27.946 DEBG up_ds_listen was notified
57049 Sep 22 23:18:27.946 DEBG up_ds_listen process 1445
57050 Sep 22 23:18:27.946 DEBG [A] ack job 1445:446, : downstairs
57051 Sep 22 23:18:27.998 DEBG up_ds_listen checked 1 jobs, back to waiting
57052 Sep 22 23:18:28.000 DEBG IO Flush 1446 has deps [JobId(1445), JobId(1444)]
57053 Sep 22 23:18:28.000 DEBG IO Read 1447 has deps [JobId(1446)]
57054 Sep 22 23:18:28.002 WARN returning error on flush!
57055 Sep 22 23:18:28.002 DEBG Flush :1446 extent_limit None deps:[JobId(1445), JobId(1444)] res:false f:189 g:1
57056 Sep 22 23:18:28.002 INFO [lossy] skipping 1446
57057 Sep 22 23:18:28.002 DEBG Flush :1446 extent_limit None deps:[JobId(1445), JobId(1444)] res:true f:189 g:1
57058 Sep 22 23:18:28.004 ERRO [0] job id 1446 saw error GenericError("test error")
57059 Sep 22 23:18:28.005 INFO [lossy] skipping 1447
57060 Sep 22 23:18:28.005 INFO [lossy] skipping 1447
57061 Sep 22 23:18:28.011 DEBG Read :1447 deps:[JobId(1446)] res:true
57062 Sep 22 23:18:28.411 DEBG [0] Read AckReady 1447, : downstairs
57063 Sep 22 23:18:28.411 DEBG up_ds_listen was notified
57064 Sep 22 23:18:28.411 DEBG up_ds_listen process 1447
57065 Sep 22 23:18:28.412 DEBG [A] ack job 1447:448, : downstairs
57066 Sep 22 23:18:28.464 DEBG up_ds_listen checked 1 jobs, back to waiting
57067 Sep 22 23:18:28.466 DEBG IO Read 1448 has deps [JobId(1446)]
57068 Sep 22 23:18:28.476 DEBG Read :1448 deps:[JobId(1446)] res:true
57069 Sep 22 23:18:28.876 DEBG [0] Read AckReady 1448, : downstairs
57070 Sep 22 23:18:28.877 DEBG up_ds_listen was notified
57071 Sep 22 23:18:28.877 DEBG up_ds_listen process 1448
57072 Sep 22 23:18:28.877 DEBG [A] ack job 1448:449, : downstairs
57073 Sep 22 23:18:28.929 DEBG up_ds_listen checked 1 jobs, back to waiting
57074 Sep 22 23:18:28.930 DEBG IO Flush 1449 has deps [JobId(1448), JobId(1447), JobId(1446)]
57075 Sep 22 23:18:28.931 DEBG IO Read 1450 has deps [JobId(1449)]
57076 Sep 22 23:18:28.933 DEBG Flush :1449 extent_limit None deps:[JobId(1448), JobId(1447), JobId(1446)] res:true f:190 g:1
57077 Sep 22 23:18:28.942 DEBG Read :1450 deps:[JobId(1449)] res:true
57078 Sep 22 23:18:29.341 DEBG [0] Read AckReady 1450, : downstairs
57079 Sep 22 23:18:29.342 DEBG up_ds_listen was notified
57080 Sep 22 23:18:29.342 DEBG up_ds_listen process 1450
57081 Sep 22 23:18:29.343 DEBG [A] ack job 1450:451, : downstairs
57082 Sep 22 23:18:29.395 DEBG up_ds_listen checked 1 jobs, back to waiting
57083 Sep 22 23:18:29.396 DEBG IO Read 1451 has deps [JobId(1449)]
57084 Sep 22 23:18:29.406 DEBG Read :1451 deps:[JobId(1449)] res:true
57085 Sep 22 23:18:29.804 DEBG [0] Read AckReady 1451, : downstairs
57086 Sep 22 23:18:29.805 DEBG up_ds_listen was notified
57087 Sep 22 23:18:29.805 DEBG up_ds_listen process 1451
57088 Sep 22 23:18:29.805 DEBG [A] ack job 1451:452, : downstairs
57089 Sep 22 23:18:29.857 DEBG up_ds_listen checked 1 jobs, back to waiting
57090 Sep 22 23:18:29.858 DEBG IO Flush 1452 has deps [JobId(1451), JobId(1450), JobId(1449)]
57091 Sep 22 23:18:29.858 INFO [lossy] sleeping 1 second
57092 Sep 22 23:18:29.858 DEBG IO Read 1453 has deps [JobId(1452)]
57093 Sep 22 23:18:30.360 DEBG IO Flush 1454 has deps [JobId(1453), JobId(1452)]
57094 Sep 22 23:18:30.859 WARN returning error on flush!
57095 Sep 22 23:18:30.859 DEBG Flush :1452 extent_limit None deps:[JobId(1451), JobId(1450), JobId(1449)] res:false f:191 g:1
57096 Sep 22 23:18:30.859 DEBG Flush :1452 extent_limit None deps:[JobId(1451), JobId(1450), JobId(1449)] res:true f:191 g:1
57097 Sep 22 23:18:30.860 WARN returning error on read!
57098 Sep 22 23:18:30.860 DEBG Read :1453 deps:[JobId(1452)] res:false
57099 Sep 22 23:18:30.860 INFO [lossy] skipping 1454
57100 Sep 22 23:18:30.860 INFO [lossy] skipping 1453
57101 Sep 22 23:18:30.860 WARN 1454 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
57102 Sep 22 23:18:30.860 WARN returning error on read!
57103 Sep 22 23:18:30.860 DEBG Read :1453 deps:[JobId(1452)] res:false
57104 Sep 22 23:18:30.866 DEBG Read :1453 deps:[JobId(1452)] res:true
57105 Sep 22 23:18:30.887 ERRO [0] job id 1452 saw error GenericError("test error")
57106 Sep 22 23:18:30.888 ERRO [0] job id 1453 saw error GenericError("test error")
57107 Sep 22 23:18:30.888 ERRO [0] job id 1453 saw error GenericError("test error")
57108 Sep 22 23:18:30.889 INFO [lossy] sleeping 1 second
57109 Sep 22 23:18:31.266 DEBG [0] Read AckReady 1453, : downstairs
57110 Sep 22 23:18:31.267 DEBG up_ds_listen was notified
57111 Sep 22 23:18:31.267 DEBG up_ds_listen process 1453
57112 Sep 22 23:18:31.267 DEBG [A] ack job 1453:454, : downstairs
57113 Sep 22 23:18:31.319 DEBG up_ds_listen checked 1 jobs, back to waiting
57114 Sep 22 23:18:31.320 DEBG IO Read 1455 has deps [JobId(1454)]
57115 Sep 22 23:18:31.389 DEBG IO Flush 1456 has deps [JobId(1455), JobId(1454)]
57116 Sep 22 23:18:31.890 INFO [lossy] skipping 1454
57117 Sep 22 23:18:31.890 DEBG Flush :1454 extent_limit None deps:[JobId(1453), JobId(1452)] res:true f:192 g:1
57118 Sep 22 23:18:31.890 INFO [lossy] sleeping 1 second
57119 Sep 22 23:18:32.891 INFO [lossy] skipping 1455
57120 Sep 22 23:18:32.891 WARN 1456 job Flush for connection UpstairsConnection { upstairs_id: 4cf66ffa-a12c-4555-b256-f1d6116cf5e6, session_id: 61abb1c6-aab0-4918-867b-523054aca8f6, gen: 1 } waiting on 1 deps, role: work
57121 Sep 22 23:18:32.891 WARN returning error on read!
57122 Sep 22 23:18:32.891 DEBG Read :1455 deps:[JobId(1454)] res:false
57123 Sep 22 23:18:32.898 DEBG Read :1455 deps:[JobId(1454)] res:true
57124 Sep 22 23:18:32.919 ERRO [0] job id 1455 saw error GenericError("test error")
57125 Sep 22 23:18:32.920 INFO [lossy] sleeping 1 second
57126 Sep 22 23:18:33.296 DEBG [0] Read AckReady 1455, : downstairs
57127 Sep 22 23:18:33.297 DEBG up_ds_listen was notified
57128 Sep 22 23:18:33.297 DEBG up_ds_listen process 1455
57129 Sep 22 23:18:33.297 DEBG [A] ack job 1455:456, : downstairs
57130 Sep 22 23:18:33.349 DEBG up_ds_listen checked 1 jobs, back to waiting
57131 Sep 22 23:18:33.351 DEBG IO Read 1457 has deps [JobId(1456)]
57132 Sep 22 23:18:33.420 DEBG IO Flush 1458 has deps [JobId(1457), JobId(1456)]
57133 Sep 22 23:18:33.921 DEBG Flush :1456 extent_limit None deps:[JobId(1455), JobId(1454)] res:true f:193 g:1
57134 Sep 22 23:18:33.928 DEBG Read :1457 deps:[JobId(1456)] res:true
57135 Sep 22 23:18:33.950 WARN returning error on flush!
57136 Sep 22 23:18:33.950 DEBG Flush :1458 extent_limit None deps:[JobId(1457), JobId(1456)] res:false f:194 g:1
57137 Sep 22 23:18:33.951 DEBG Flush :1458 extent_limit None deps:[JobId(1457), JobId(1456)] res:true f:194 g:1
57138 Sep 22 23:18:33.951 INFO [lossy] sleeping 1 second
57139 Sep 22 23:18:34.326 DEBG [0] Read AckReady 1457, : downstairs
57140 Sep 22 23:18:34.327 ERRO [0] job id 1458 saw error GenericError("test error")
57141 Sep 22 23:18:34.327 DEBG up_ds_listen was notified
57142 Sep 22 23:18:34.327 DEBG up_ds_listen process 1457
57143 Sep 22 23:18:34.327 DEBG [A] ack job 1457:458, : downstairs
57144 Sep 22 23:18:34.380 DEBG up_ds_listen checked 1 jobs, back to waiting
57145 Sep 22 23:18:34.381 DEBG IO Read 1459 has deps [JobId(1458)]
57146 Sep 22 23:18:34.450 DEBG IO Flush 1460 has deps [JobId(1459), JobId(1458)]
57147 Sep 22 23:18:34.957 DEBG Read :1459 deps:[JobId(1458)] res:true
57148 Sep 22 23:18:34.980 DEBG Flush :1460 extent_limit None deps:[JobId(1459), JobId(1458)] res:true f:195 g:1
57149 Sep 22 23:18:34.980 INFO [lossy] sleeping 1 second
57150 Sep 22 23:18:35.356 DEBG [0] Read AckReady 1459, : downstairs
57151 Sep 22 23:18:35.357 DEBG up_ds_listen was notified
57152 Sep 22 23:18:35.357 DEBG up_ds_listen process 1459
57153 Sep 22 23:18:35.357 DEBG [A] ack job 1459:460, : downstairs
57154 Sep 22 23:18:35.409 DEBG up_ds_listen checked 1 jobs, back to waiting
57155 Sep 22 23:18:35.410 DEBG IO Read 1461 has deps [JobId(1460)]
57156 Sep 22 23:18:35.481 DEBG IO Flush 1462 has deps [JobId(1461), JobId(1460)]
57157 Sep 22 23:18:35.987 DEBG Read :1461 deps:[JobId(1460)] res:true
57158 Sep 22 23:18:36.010 DEBG Flush :1462 extent_limit None deps:[JobId(1461), JobId(1460)] res:true f:196 g:1
57159 Sep 22 23:18:36.010 INFO [lossy] sleeping 1 second
57160 Sep 22 23:18:36.386 DEBG [0] Read AckReady 1461, : downstairs
57161 Sep 22 23:18:36.386 DEBG up_ds_listen was notified
57162 Sep 22 23:18:36.386 DEBG up_ds_listen process 1461
57163 Sep 22 23:18:36.386 DEBG [A] ack job 1461:462, : downstairs
57164 Sep 22 23:18:36.439 DEBG up_ds_listen checked 1 jobs, back to waiting
57165 Sep 22 23:18:36.440 DEBG IO Read 1463 has deps [JobId(1462)]
57166 Sep 22 23:18:36.509 DEBG IO Flush 1464 has deps [JobId(1463), JobId(1462)]
57167 Sep 22 23:18:37.011 INFO [lossy] skipping 1463
57168 Sep 22 23:18:37.011 INFO [lossy] skipping 1464
57169 Sep 22 23:18:37.011 INFO [lossy] skipping 1463
57170 Sep 22 23:18:37.011 INFO [lossy] skipping 1464
57171 Sep 22 23:18:37.011 WARN returning error on read!
57172 Sep 22 23:18:37.011 DEBG Read :1463 deps:[JobId(1462)] res:false
57173 Sep 22 23:18:37.011 INFO [lossy] skipping 1463
57174 Sep 22 23:18:37.018 DEBG Read :1463 deps:[JobId(1462)] res:true
57175 Sep 22 23:18:37.039 ERRO [0] job id 1463 saw error GenericError("test error")
57176 Sep 22 23:18:37.040 INFO [lossy] sleeping 1 second
57177 Sep 22 23:18:37.416 DEBG [0] Read AckReady 1463, : downstairs
57178 Sep 22 23:18:37.417 DEBG up_ds_listen was notified
57179 Sep 22 23:18:37.417 DEBG up_ds_listen process 1463
57180 Sep 22 23:18:37.417 DEBG [A] ack job 1463:464, : downstairs
57181 Sep 22 23:18:37.469 DEBG up_ds_listen checked 1 jobs, back to waiting
57182 Sep 22 23:18:37.470 DEBG IO Read 1465 has deps [JobId(1464)]
57183 Sep 22 23:18:37.511 DEBG IO Flush 1466 has deps [JobId(1465), JobId(1464)]
57184 Sep 22 23:18:38.041 INFO [lossy] skipping 1464
57185 Sep 22 23:18:38.041 INFO [lossy] skipping 1465
57186 Sep 22 23:18:38.041 DEBG Flush :1464 extent_limit None deps:[JobId(1463), JobId(1462)] res:true f:197 g:1
57187 Sep 22 23:18:38.048 DEBG Read :1465 deps:[JobId(1464)] res:true
57188 Sep 22 23:18:38.071 INFO [lossy] sleeping 1 second
57189 Sep 22 23:18:38.446 DEBG [0] Read AckReady 1465, : downstairs
57190 Sep 22 23:18:38.447 DEBG up_ds_listen was notified
57191 Sep 22 23:18:38.447 DEBG up_ds_listen process 1465
57192 Sep 22 23:18:38.447 DEBG [A] ack job 1465:466, : downstairs
57193 Sep 22 23:18:38.499 DEBG up_ds_listen checked 1 jobs, back to waiting
57194 Sep 22 23:18:38.500 DEBG IO Read 1467 has deps [JobId(1466)]
57195 Sep 22 23:18:38.514 DEBG IO Flush 1468 has deps [JobId(1467), JobId(1466)]
57196 Sep 22 23:18:39.072 DEBG Flush :1466 extent_limit None deps:[JobId(1465), JobId(1464)] res:true f:198 g:1
57197 Sep 22 23:18:39.079 DEBG Read :1467 deps:[JobId(1466)] res:true
57198 Sep 22 23:18:39.102 DEBG Flush :1468 extent_limit None deps:[JobId(1467), JobId(1466)] res:true f:199 g:1
57199 Sep 22 23:18:39.477 DEBG [0] Read AckReady 1467, : downstairs
57200 Sep 22 23:18:39.478 DEBG up_ds_listen was notified
57201 Sep 22 23:18:39.478 DEBG up_ds_listen process 1467
57202 Sep 22 23:18:39.478 DEBG [A] ack job 1467:468, : downstairs
57203 Sep 22 23:18:39.530 DEBG up_ds_listen checked 1 jobs, back to waiting
57204 Sep 22 23:18:39.531 DEBG IO Read 1469 has deps [JobId(1468)]
57205 Sep 22 23:18:39.541 DEBG Read :1469 deps:[JobId(1468)] res:true
57206 Sep 22 23:18:39.938 DEBG [0] Read AckReady 1469, : downstairs
57207 Sep 22 23:18:39.939 DEBG up_ds_listen was notified
57208 Sep 22 23:18:39.939 DEBG up_ds_listen process 1469
57209 Sep 22 23:18:39.939 DEBG [A] ack job 1469:470, : downstairs
57210 Sep 22 23:18:39.991 DEBG up_ds_listen checked 1 jobs, back to waiting
57211 Sep 22 23:18:39.992 DEBG IO Read 1470 has deps [JobId(1468)]
57212 Sep 22 23:18:40.002 DEBG Read :1470 deps:[JobId(1468)] res:true
57213 Sep 22 23:18:40.400 DEBG [0] Read AckReady 1470, : downstairs
57214 Sep 22 23:18:40.400 DEBG up_ds_listen was notified
57215 Sep 22 23:18:40.400 DEBG up_ds_listen process 1470
57216 Sep 22 23:18:40.401 DEBG [A] ack job 1470:471, : downstairs
57217 Sep 22 23:18:40.453 DEBG up_ds_listen checked 1 jobs, back to waiting
57218 Sep 22 23:18:40.454 DEBG IO Flush 1471 has deps [JobId(1470), JobId(1469), JobId(1468)]
57219 Sep 22 23:18:40.454 DEBG IO Read 1472 has deps [JobId(1471)]
57220 Sep 22 23:18:40.456 INFO [lossy] sleeping 1 second
57221 Sep 22 23:18:40.955 DEBG IO Flush 1473 has deps [JobId(1472), JobId(1471)]
57222 Sep 22 23:18:41.456 DEBG Flush :1471 extent_limit None deps:[JobId(1470), JobId(1469), JobId(1468)] res:true f:200 g:1
57223 Sep 22 23:18:41.463 DEBG Read :1472 deps:[JobId(1471)] res:true
57224 Sep 22 23:18:41.486 WARN returning error on flush!
57225 Sep 22 23:18:41.486 DEBG Flush :1473 extent_limit None deps:[JobId(1472), JobId(1471)] res:false f:201 g:1
57226 Sep 22 23:18:41.486 DEBG Flush :1473 extent_limit None deps:[JobId(1472), JobId(1471)] res:true f:201 g:1
57227 Sep 22 23:18:41.862 DEBG [0] Read AckReady 1472, : downstairs
57228 Sep 22 23:18:41.862 ERRO [0] job id 1473 saw error GenericError("test error")
57229 Sep 22 23:18:41.862 DEBG up_ds_listen was notified
57230 Sep 22 23:18:41.862 DEBG up_ds_listen process 1472
57231 Sep 22 23:18:41.862 DEBG [A] ack job 1472:473, : downstairs
57232 Sep 22 23:18:41.915 DEBG up_ds_listen checked 1 jobs, back to waiting
57233 Sep 22 23:18:41.916 DEBG IO Read 1474 has deps [JobId(1473)]
57234 Sep 22 23:18:41.925 DEBG Read :1474 deps:[JobId(1473)] res:true
57235 Sep 22 23:18:42.324 DEBG [0] Read AckReady 1474, : downstairs
57236 Sep 22 23:18:42.324 DEBG up_ds_listen was notified
57237 Sep 22 23:18:42.324 DEBG up_ds_listen process 1474
57238 Sep 22 23:18:42.324 DEBG [A] ack job 1474:475, : downstairs
57239 Sep 22 23:18:42.377 DEBG up_ds_listen checked 1 jobs, back to waiting
57240 Sep 22 23:18:42.378 DEBG IO Flush 1475 has deps [JobId(1474), JobId(1473)]
57241 Sep 22 23:18:42.378 DEBG IO Read 1476 has deps [JobId(1475)]
57242 Sep 22 23:18:42.380 DEBG Flush :1475 extent_limit None deps:[JobId(1474), JobId(1473)] res:true f:202 g:1
57243 Sep 22 23:18:42.383 WARN returning error on read!
57244 Sep 22 23:18:42.383 DEBG Read :1476 deps:[JobId(1475)] res:false
57245 Sep 22 23:18:42.383 INFO [lossy] skipping 1476
57246 Sep 22 23:18:42.383 INFO [lossy] skipping 1476
57247 Sep 22 23:18:42.383 WARN returning error on read!
57248 Sep 22 23:18:42.383 DEBG Read :1476 deps:[JobId(1475)] res:false
57249 Sep 22 23:18:42.383 WARN returning error on read!
57250 Sep 22 23:18:42.383 DEBG Read :1476 deps:[JobId(1475)] res:false
57251 Sep 22 23:18:42.383 INFO [lossy] skipping 1476
57252 Sep 22 23:18:42.383 INFO [lossy] skipping 1476
57253 Sep 22 23:18:42.389 DEBG Read :1476 deps:[JobId(1475)] res:true
57254 Sep 22 23:18:42.410 ERRO [0] job id 1476 saw error GenericError("test error")
57255 Sep 22 23:18:42.410 ERRO [0] job id 1476 saw error GenericError("test error")
57256 Sep 22 23:18:42.410 ERRO [0] job id 1476 saw error GenericError("test error")
57257 Sep 22 23:18:42.787 DEBG [0] Read AckReady 1476, : downstairs
57258 Sep 22 23:18:42.787 DEBG up_ds_listen was notified
57259 Sep 22 23:18:42.787 DEBG up_ds_listen process 1476
57260 Sep 22 23:18:42.787 DEBG [A] ack job 1476:477, : downstairs
57261 Sep 22 23:18:42.839 DEBG up_ds_listen checked 1 jobs, back to waiting
57262 Sep 22 23:18:42.840 DEBG IO Read 1477 has deps [JobId(1475)]
57263 Sep 22 23:18:42.845 INFO [lossy] skipping 1477
57264 Sep 22 23:18:42.850 DEBG Read :1477 deps:[JobId(1475)] res:true
57265 Sep 22 23:18:43.248 DEBG [0] Read AckReady 1477, : downstairs
57266 Sep 22 23:18:43.249 DEBG up_ds_listen was notified
57267 Sep 22 23:18:43.249 DEBG up_ds_listen process 1477
57268 Sep 22 23:18:43.249 DEBG [A] ack job 1477:478, : downstairs
57269 Sep 22 23:18:43.301 DEBG up_ds_listen checked 1 jobs, back to waiting
57270 Sep 22 23:18:43.302 DEBG IO Flush 1478 has deps [JobId(1477), JobId(1476), JobId(1475)]
57271 Sep 22 23:18:43.303 DEBG IO Read 1479 has deps [JobId(1478)]
57272 Sep 22 23:18:43.305 INFO [lossy] sleeping 1 second
57273 Sep 22 23:18:43.804 DEBG IO Flush 1480 has deps [JobId(1479), JobId(1478)]
57274 Sep 22 23:18:44.306 INFO [lossy] skipping 1478
57275 Sep 22 23:18:44.306 DEBG Flush :1478 extent_limit None deps:[JobId(1477), JobId(1476), JobId(1475)] res:true f:203 g:1
57276 Sep 22 23:18:44.306 INFO [lossy] sleeping 1 second
57277 Sep 22 23:18:45.313 DEBG Read :1479 deps:[JobId(1478)] res:true
57278 Sep 22 23:18:45.336 WARN returning error on flush!
57279 Sep 22 23:18:45.336 DEBG Flush :1480 extent_limit None deps:[JobId(1479), JobId(1478)] res:false f:204 g:1
57280 Sep 22 23:18:45.336 INFO [lossy] skipping 1480
57281 Sep 22 23:18:45.336 WARN returning error on flush!
57282 Sep 22 23:18:45.336 DEBG Flush :1480 extent_limit None deps:[JobId(1479), JobId(1478)] res:false f:204 g:1
57283 Sep 22 23:18:45.336 WARN returning error on flush!
57284 Sep 22 23:18:45.336 DEBG Flush :1480 extent_limit None deps:[JobId(1479), JobId(1478)] res:false f:204 g:1
57285 Sep 22 23:18:45.337 DEBG Flush :1480 extent_limit None deps:[JobId(1479), JobId(1478)] res:true f:204 g:1
57286 Sep 22 23:18:45.712 DEBG [0] Read AckReady 1479, : downstairs
57287 Sep 22 23:18:45.713 ERRO [0] job id 1480 saw error GenericError("test error")
57288 Sep 22 23:18:45.713 ERRO [0] job id 1480 saw error GenericError("test error")
57289 Sep 22 23:18:45.713 ERRO [0] job id 1480 saw error GenericError("test error")
57290 Sep 22 23:18:45.713 DEBG up_ds_listen was notified
57291 Sep 22 23:18:45.713 DEBG up_ds_listen process 1479
57292 Sep 22 23:18:45.714 DEBG [A] ack job 1479:480, : downstairs
57293 Sep 22 23:18:45.766 DEBG up_ds_listen checked 1 jobs, back to waiting
57294 Sep 22 23:18:45.767 DEBG IO Read 1481 has deps [JobId(1480)]
57295 Sep 22 23:18:45.771 INFO [lossy] skipping 1481
57296 Sep 22 23:18:45.777 DEBG Read :1481 deps:[JobId(1480)] res:true
57297 Sep 22 23:18:46.176 DEBG [0] Read AckReady 1481, : downstairs
57298 Sep 22 23:18:46.177 DEBG up_ds_listen was notified
57299 Sep 22 23:18:46.177 DEBG up_ds_listen process 1481
57300 Sep 22 23:18:46.177 DEBG [A] ack job 1481:482, : downstairs
57301 Sep 22 23:18:46.230 DEBG up_ds_listen checked 1 jobs, back to waiting
57302 Sep 22 23:18:46.231 DEBG IO Flush 1482 has deps [JobId(1481), JobId(1480)]
57303 Sep 22 23:18:46.231 DEBG IO Read 1483 has deps [JobId(1482)]
57304 Sep 22 23:18:46.233 WARN returning error on flush!
57305 Sep 22 23:18:46.233 DEBG Flush :1482 extent_limit None deps:[JobId(1481), JobId(1480)] res:false f:205 g:1
57306 Sep 22 23:18:46.233 DEBG Flush :1482 extent_limit None deps:[JobId(1481), JobId(1480)] res:true f:205 g:1
57307 Sep 22 23:18:46.236 ERRO [0] job id 1482 saw error GenericError("test error")
57308 Sep 22 23:18:46.241 DEBG Read :1483 deps:[JobId(1482)] res:true
57309 Sep 22 23:18:46.640 DEBG [0] Read AckReady 1483, : downstairs
57310 Sep 22 23:18:46.641 DEBG up_ds_listen was notified
57311 Sep 22 23:18:46.641 DEBG up_ds_listen process 1483
57312 Sep 22 23:18:46.641 DEBG [A] ack job 1483:484, : downstairs
57313 Sep 22 23:18:46.693 DEBG up_ds_listen checked 1 jobs, back to waiting
57314 Sep 22 23:18:46.695 DEBG IO Read 1484 has deps [JobId(1482)]
57315 Sep 22 23:18:46.705 DEBG Read :1484 deps:[JobId(1482)] res:true
57316 Sep 22 23:18:47.103 DEBG [0] Read AckReady 1484, : downstairs
57317 Sep 22 23:18:47.104 DEBG up_ds_listen was notified
57318 Sep 22 23:18:47.104 DEBG up_ds_listen process 1484
57319 Sep 22 23:18:47.104 DEBG [A] ack job 1484:485, : downstairs
57320 Sep 22 23:18:47.156 DEBG up_ds_listen checked 1 jobs, back to waiting
57321 Sep 22 23:18:47.157 DEBG IO Flush 1485 has deps [JobId(1484), JobId(1483), JobId(1482)]
57322 Sep 22 23:18:47.158 DEBG IO Read 1486 has deps [JobId(1485)]
57323 Sep 22 23:18:47.160 INFO [lossy] skipping 1485
57324 Sep 22 23:18:47.160 DEBG Flush :1485 extent_limit None deps:[JobId(1484), JobId(1483), JobId(1482)] res:true f:206 g:1
57325 Sep 22 23:18:47.162 INFO [lossy] sleeping 1 second
57326 Sep 22 23:18:47.658 DEBG IO Flush 1487 has deps [JobId(1486), JobId(1485)]
57327 Sep 22 23:18:48.170 DEBG Read :1486 deps:[JobId(1485)] res:true
57328 Sep 22 23:18:48.192 DEBG Flush :1487 extent_limit None deps:[JobId(1486), JobId(1485)] res:true f:207 g:1
57329 Sep 22 23:18:48.568 DEBG [0] Read AckReady 1486, : downstairs
57330 Sep 22 23:18:48.569 DEBG up_ds_listen was notified
57331 Sep 22 23:18:48.569 DEBG up_ds_listen process 1486
57332 Sep 22 23:18:48.569 DEBG [A] ack job 1486:487, : downstairs
57333 Sep 22 23:18:48.622 DEBG up_ds_listen checked 1 jobs, back to waiting
57334 Sep 22 23:18:48.623 DEBG IO Read 1488 has deps [JobId(1487)]
57335 Sep 22 23:18:48.627 INFO [lossy] skipping 1488
57336 Sep 22 23:18:48.627 WARN returning error on read!
57337 Sep 22 23:18:48.627 DEBG Read :1488 deps:[JobId(1487)] res:false
57338 Sep 22 23:18:48.633 DEBG Read :1488 deps:[JobId(1487)] res:true
57339 Sep 22 23:18:48.655 ERRO [0] job id 1488 saw error GenericError("test error")
57340 Sep 22 23:18:49.032 DEBG [0] Read AckReady 1488, : downstairs
57341 Sep 22 23:18:49.033 DEBG up_ds_listen was notified
57342 Sep 22 23:18:49.033 DEBG up_ds_listen process 1488
57343 Sep 22 23:18:49.033 DEBG [A] ack job 1488:489, : downstairs
57344 Sep 22 23:18:49.085 DEBG up_ds_listen checked 1 jobs, back to waiting
57345 test test::integration_test_problematic_downstairs ... ok
57346 
57347 test result: ok. 57 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 261.76s
57348 
57349 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_nbd_server-8c1612631a1669fd --nocapture`
57350 
57351 running 0 tests
57352 
57353 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57354 
57355 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_package-e8ff0170d25e0da5 --nocapture`
57356 
57357 running 0 tests
57358 
57359 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57360 
57361 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry-8e1bf492bfe90e8c --nocapture`
57362 
57363 running 0 tests
57364 
57365 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57366 
57367 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry-b51bc30f7a0cbfa5 --nocapture`
57368 
57369 running 1 test
57370 test tests::test_crucible_pantry_openapi ... ok
57371 
57372 test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
57373 
57374 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_pantry_client-8a27f01eb086219e --nocapture`
57375 
57376 running 0 tests
57377 
57378 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57379 
57380 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_protocol-c776b78ce4b42bf6 --nocapture`
57381 
57382 running 9 tests
57383 test tests::latest_message_version ... ok
57384 test tests::correctly_detect_truncated_message ... ok
57385 test tests::rt_evp ... ok
57386 test tests::rt_ev_0 ... ok
57387 test tests::rt_ev_7 ... ok
57388 test tests::rt_here_i_am ... ok
57389 test tests::rt_imok ... ok
57390 test tests::rt_ruok ... ok
57391 test tests::rt_yes_its_me ... ok
57392 
57393 test result: ok. 9 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57394 
57395 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_protocol-d81e6562be2ffe77 --nocapture`
57396 
57397 running 0 tests
57398 
57399 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
57400 
57401 Running `/work/oxidecomputer/crucible/target/debug/deps/crucible_smf-6114df38a9482a0c --nocapture`
57402 
57403 running 10 tests
57404 SCF_LIMIT_MAX_NAME_LENGTH = 119
57405 SCF_LIMIT_MAX_VALUE_LENGTH = 4095
57406 SCF_LIMIT_MAX_PG_TYPE_LENGTH = 119
57407 SCF_LIMIT_MAX_FMRI_LENGTH = 628
57408 test scf_sys::tests::limits ... ok
57409 name = Ok("localhost")
57410 test scf_sys::tests::handle ... ok
57411 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57412 milestone/multi-user
57413 test tests::scope_local ... ok
57414 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57415 test tests::scope_iter ... ok
57416 test tests::basic ... ok
57417 test tests::scope_not_set ... ok
57418 default
57419 test tests::iter ... ok
57420 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57421 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57422 milestone/multi-user
57423 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57424 milestone/multi-user
57425 milestone/name-services
57426 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57427 milestone/name-services
57428 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57429 default
57430 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57431 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57432 milestone/single-user
57433 default
57434 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57435 initial
57436 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57437 network/datalink-management
57438 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57439 last-import
57440 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57441 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57442 system/install-discovery
57443 milestone/single-user
57444 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57445 previous
57446 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57447 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57448 system/device/local
57449 running
57450 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57451 default
57452 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57453 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57454 network/physical
57455 start
57456 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57457 network/initial
57458 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57459 network/datalink-management
57460 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57461 network/ip-interface-management
57462 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57463 milestone/name-services
57464 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57465 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57466 network/loopback
57467 default
57468 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57469 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57470 default
57471 network/iptun
57472 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57473 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57474 network/netcfg
57475 initial
57476 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57477 system/install-discovery
57478 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57479 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57480 last-import
57481 network/rpc/bind
57482 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57483 previous
57484 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57485 system/boot-archive
57486 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57487 running
57488 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57489 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57490 system/device/local
57491 milestone/devices
57492 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57493 start
57494 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57495 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57496 system/filesystem/local
57497 default
57498 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57499 system/filesystem/minimal
57500 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57501 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57502 milestone/single-user
57503 system/filesystem/root
57504 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57505 network/physical
57506 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57507 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57508 system/filesystem/usr
57509 default
57510 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57511 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57512 system/identity
57513 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57514 default
57515 initial
57516 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57517 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57518 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57519 system/manifest-import
57520 nwam
57521 last-import
57522 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57523 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57524 previous
57525 system/svc/global
57526 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57527 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57528 running
57529 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57530 system/svc/restarter
57531 network/initial
57532 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57533 start
57534 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57535 milestone/multi-user-server
57536 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57537 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57538 default
57539 network/inetd-upgrade
57540 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57541 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57542 system/console-login
57543 network/datalink-management
57544 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57545 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57546 network/ip-interface-management
57547 system/utmp
57548 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57549 default
57550 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57551 application/management/net-snmp
57552 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57553 default
57554 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57555 initial
57556 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57557 smf/manifest
57558 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57559 last-import
57560 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57561 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57562 application/pkg/repositories-setup
57563 previous
57564 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57565 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57566 network/loopback
57567 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57568 application/pkg/dynamic-mirror
57569 running
57570 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57571 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57572 start
57573 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57574 application/pkg/mirror
57575 default
57576 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57577 application/pkg/server
57578 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57579 application/security/tcsd
57580 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57581 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57582 network/iptun
57583 system/install-discovery
57584 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57585 milestone/sysconfig
57586 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57587 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57588 default
57589 milestone/network
57590 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57591 system/device/local
57592 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57593 network/ntp
57594 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57595 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57596 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57597 network/netcfg
57598 default
57599 network/ipmp
57600 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57601 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57602 initial
57603 network/routing/rdisc
57604 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57605 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57606 default
57607 last-import
57608 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57609 network/routing/route
57610 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57611 previous
57612 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57613 network/routing/ndp
57614 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57615 running
57616 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57617 network/rpc/bind
57618 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57619 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57620 network/routing/legacy-routing
57621 start
57622 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57623 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57624 network/routing/ripng
57625 default
57626 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57627 network/inetd
57628 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57629 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57630 network/physical
57631 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57632 network/bridge
57633 system/boot-archive
57634 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57635 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57636 network/ipv4-forwarding
57637 default
57638 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57639 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57640 default
57641 network/ipv6-forwarding
57642 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57643 initial
57644 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57645 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57646 network/ipqos
57647 last-import
57648 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57649 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57650 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57651 previous
57652 milestone/devices
57653 network/ipsec/ipsecalgs
57654 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57655 running
57656 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57657 network/ipsec/policy
57658 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57659 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57660 default
57661 start
57662 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57663 network/ipsec/manual-key
57664 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57665 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57666 network/ipsec/ike
57667 nwam
57668 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57669 system/filesystem/local
57670 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57671 network/install
57672 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57673 initial
57674 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57675 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57676 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57677 default
57678 network/shares/group
57679 last-import
57680 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57681 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57682 system/filesystem/reparse
57683 previous
57684 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57685 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57686 running
57687 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57688 network/rpc/smserver
57689 system/filesystem/minimal
57690 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57691 network/rpc/keyserv
57692 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57693 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57694 default
57695 network/rpc/gss
57696 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57697 network/initial
57698 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57699 network/ipfilter
57700 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57701 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57702 default
57703 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57704 system/filesystem/root
57705 network/ldap/client
57706 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57707 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57708 initial
57709 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57710 network/smb/client
57711 default
57712 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57713 last-import
57714 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57715 network/smb/server
57716 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57717 previous
57718 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57719 network/routing-setup
57720 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57721 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57722 running
57723 system/filesystem/usr
57724 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57725 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57726 network/npiv_config
57727 start
57728 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57729 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57730 default
57731 system/device/fc-fabric
57732 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57733 network/ssh
57734 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57735 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57736 network/ip-interface-management
57737 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57738 network/varpd
57739 system/identity
57740 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57741 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57742 default
57743 network/security/kadmin
57744 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57745 domain
57746 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57747 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57748 network/security/krb5kdc
57749 initial
57750 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57751 node
57752 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57753 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57754 last-import
57755 network/security/ktkt_warn
57756 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57757 previous
57758 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57759 network/device-discovery/printers
57760 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57761 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57762 system/manifest-import
57763 running
57764 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57765 network/service
57766 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57767 start
57768 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57769 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57770 default
57771 network/nis/client
57772 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57773 network/location
57774 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57775 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57776 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57777 network/dns/client
57778 network/loopback
57779 system/svc/global
57780 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57781 system/name-service-cache
57782 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57783 default
57784 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57785 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57786 default
57787 network/nfs/mapid
57788 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57789 initial
57790 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57791 network/chrony
57792 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57793 last-import
57794 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57795 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57796 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57797 system/svc/restarter
57798 network/ibd-post-upgrade
57799 previous
57800 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57801 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57802 running
57803 network/tftp/udp6
57804 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57805 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57806 default
57807 start
57808 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57809 network/netmask
57810 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57811 network/dns/multicast
57812 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57813 milestone/multi-user-server
57814 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57815 network/dns/install
57816 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57817 network/iptun
57818 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57819 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57820 network/nfs/log
57821 default
57822 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57823 default
57824 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57825 network/nfs/rquota
57826 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57827 initial
57828 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57829 network/nfs/client
57830 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57831 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57832 last-import
57833 network/inetd-upgrade
57834 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57835 network/nfs/server
57836 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57837 previous
57838 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57839 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57840 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57841 default
57842 network/nfs/cbd
57843 running
57844 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57845 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57846 start
57847 network/nfs/status
57848 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57849 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57850 system/console-login
57851 network/nfs/nlockmgr
57852 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57853 platform/i86pc/acpihpd
57854 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57855 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57856 default
57857 network/netcfg
57858 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57859 system/sac
57860 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57861 vt2
57862 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57863 default
57864 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57865 system/fcoe_initiator
57866 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57867 vt3
57868 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57869 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57870 initial
57871 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57872 system/fmd
57873 vt4
57874 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57875 last-import
57876 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57877 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57878 system/sysevent
57879 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57880 vt5
57881 previous
57882 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57883 system/boot-config
57884 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57885 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57886 running
57887 vt6
57888 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57889 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57890 system/picl
57891 start
57892 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57893 system/coreadm
57894 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57895 system/utmp
57896 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57897 system/hal
57898 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57899 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57900 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57901 network/rpc/bind
57902 default
57903 system/resource-mgmt
57904 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57905 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57906 default
57907 system/rcap
57908 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57909 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57910 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57911 system/system-log
57912 initial
57913 application/management/net-snmp
57914 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57915 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57916 last-import
57917 system/dumpadm
57918 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57919 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57920 default
57921 previous
57922 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57923 system/dbus
57924 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57925 running
57926 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57927 system/pools
57928 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57929 start
57930 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57931 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57932 smf/manifest
57933 system/power
57934 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57935 system/keymap
57936 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57937 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57938 system/auditset
57939 system/boot-archive
57940 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57941 application/pkg/repositories-setup
57942 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57943 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57944 system/stmf
57945 default
57946 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57947 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57948 default
57949 system/hotplug
57950 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57951 initial
57952 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57953 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57954 system/rbac
57955 last-import
57956 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57957 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57958 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57959 previous
57960 system/logadm-upgrade
57961 application/pkg/dynamic-mirror
57962 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57963 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57964 running
57965 system/hostid
57966 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57967 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57968 default
57969 start
57970 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57971 system/filesystem/autofs
57972 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57973 system/cron
57974 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57975 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57976 application/pkg/mirror
57977 system/illumos/userscript
57978 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
57979 milestone/devices
57980 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57981 system/vtdaemon
57982 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
57983 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
57984 default
57985 default
57986 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57987 system/boot-archive-update
57988 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57989 initial
57990 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57991 system/cryptosvc
57992 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
57993 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57994 application/pkg/server
57995 last-import
57996 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
57997 system/intrd
57998 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
57999 previous
58000 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58001 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58002 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58003 default
58004 system/rmtmpfiles
58005 running
58006 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58007 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58008 start
58009 system/t6init
58010 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58011 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58012 system/auditd
58013 application/security/tcsd
58014 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58015 system/idmap
58016 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58017 system/filesystem/local
58018 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58019 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58020 default
58021 system/zones-monitoring
58022 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58023 default
58024 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58025 system/zones
58026 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58027 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58028 initial
58029 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58030 system/ipcc
58031 milestone/sysconfig
58032 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58033 last-import
58034 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58035 system/update-man-index
58036 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58037 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58038 previous
58039 default
58040 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58041 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58042 system/process-security
58043 running
58044 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58045 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58046 start
58047 system/fm/notify-params
58048 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58049 milestone/network
58050 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58051 system/pkgserv
58052 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58053 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58054 system/extended-accounting
58055 default
58056 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58057 system/filesystem/minimal
58058 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58059 system/consadm
58060 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58061 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58062 default
58063 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58064 system/scheduler
58065 network/ntp
58066 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58067 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58068 system/pfexec
58069 initial
58070 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58071 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58072 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58073 default
58074 last-import
58075 system/illumos/metadata
58076 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58077 previous
58078 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58079 system/sar
58080 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58081 running
58082 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58083 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58084 network/ipmp
58085 system/early-manifest-import
58086 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58087 start
58088 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58089 system/fcoe_target
58090 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58091 default
58092 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58093 system/device/mpxio-upgrade
58094 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58095 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58096 system/filesystem/root
58097 system/device/audio
58098 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58099 network/routing/rdisc
58100 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58101 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58102 system/device/allocate
58103 default
58104 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58105 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58106 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58107 default
58108 smf/legacy_run
58109 initial
58110 Ok(Service { scf: Scf { handle: 0x6bc550 }, service: 0x6bd790 })
58111 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58112 last-import
58113 site/buildomat/agent
58114 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58115 previous
58116 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58117 network/routing/route
58118 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58119 running
58120 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58121 start
58122 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58123 default
58124 test tests::service_iter ... ok
58125 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58126 network/routing/ndp
58127 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58128 system/filesystem/usr
58129 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58130 default
58131 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58132 default
58133 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58134 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58135 initial
58136 network/routing/legacy-routing
58137 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58138 last-import
58139 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58140 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58141 ipv4
58142 previous
58143 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58144 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58145 ipv6
58146 running
58147 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58148 start
58149 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58150 network/routing/ripng
58151 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58152 default
58153 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58154 system/identity
58155 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58156 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58157 network/inetd
58158 domain
58159 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58160 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58161 default
58162 initial
58163 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58164 last-import
58165 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58166 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58167 network/bridge
58168 previous
58169 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58170 running
58171 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58172 start
58173 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58174 network/ipv4-forwarding
58175 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58176 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58177 node
58178 default
58179 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58180 initial
58181 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58182 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58183 network/ipv6-forwarding
58184 last-import
58185 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58186 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58187 previous
58188 default
58189 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58190 running
58191 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58192 start
58193 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58194 network/ipqos
58195 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58196 default
58197 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58198 system/manifest-import
58199 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58200 network/ipsec/ipsecalgs
58201 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58202 default
58203 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58204 default
58205 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58206 initial
58207 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58208 last-import
58209 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58210 network/ipsec/policy
58211 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58212 previous
58213 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58214 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58215 default
58216 running
58217 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58218 start
58219 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58220 network/ipsec/manual-key
58221 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58222 default
58223 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58224 system/svc/global
58225 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58226 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58227 network/ipsec/ike
58228 default
58229 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58230 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58231 default
58232 initial
58233 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58234 last-import
58235 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58236 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58237 previous
58238 network/install
58239 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58240 running
58241 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58242 default
58243 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58244 network/shares/group
58245 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58246 system/svc/restarter
58247 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58248 default
58249 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58250 default
58251 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58252 zfs
58253 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58254 initial
58255 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58256 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58257 last-import
58258 system/filesystem/reparse
58259 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58260 previous
58261 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58262 default
58263 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58264 running
58265 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58266 network/rpc/smserver
58267 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58268 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58269 default
58270 milestone/multi-user-server
58271 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58272 default
58273 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58274 network/rpc/keyserv
58275 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58276 initial
58277 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58278 default
58279 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58280 last-import
58281 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58282 previous
58283 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58284 network/rpc/gss
58285 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58286 running
58287 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58288 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58289 start
58290 default
58291 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58292 network/ipfilter
58293 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58294 network/inetd-upgrade
58295 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58296 default
58297 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58298 default
58299 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58300 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58301 network/ldap/client
58302 initial
58303 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58304 last-import
58305 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58306 default
58307 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58308 previous
58309 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58310 running
58311 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58312 network/smb/client
58313 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58314 start
58315 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58316 default
58317 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58318 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58319 network/smb/server
58320 system/console-login
58321 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58322 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58323 default
58324 default
58325 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58326 initial
58327 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58328 network/routing-setup
58329 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58330 last-import
58331 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58332 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58333 previous
58334 default
58335 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58336 running
58337 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58338 start
58339 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58340 network/npiv_config
58341 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58342 default
58343 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58344 vt2
58345 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58346 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58347 initial
58348 system/device/fc-fabric
58349 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58350 last-import
58351 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58352 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58353 default
58354 previous
58355 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58356 running
58357 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58358 network/ssh
58359 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58360 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58361 vt3
58362 default
58363 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58364 initial
58365 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58366 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58367 network/varpd
58368 last-import
58369 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58370 previous
58371 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58372 default
58373 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58374 running
58375 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58376 network/security/kadmin
58377 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58378 vt4
58379 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58380 default
58381 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58382 initial
58383 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58384 last-import
58385 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58386 network/security/krb5kdc
58387 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58388 previous
58389 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58390 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58391 default
58392 running
58393 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58394 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58395 network/security/ktkt_warn
58396 vt5
58397 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58398 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58399 default
58400 initial
58401 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58402 last-import
58403 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58404 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58405 previous
58406 network/device-discovery/printers
58407 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58408 running
58409 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58410 snmp
58411 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58412 vt6
58413 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58414 network/service
58415 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58416 initial
58417 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58418 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58419 default
58420 last-import
58421 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58422 previous
58423 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58424 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58425 network/nis/client
58426 running
58427 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58428 default
58429 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58430 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58431 system/utmp
58432 network/location
58433 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58434 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58435 default
58436 default
58437 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58438 initial
58439 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58440 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58441 network/dns/client
58442 last-import
58443 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58444 previous
58445 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58446 default
58447 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58448 running
58449 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58450 start
58451 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58452 system/name-service-cache
58453 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58454 default
58455 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58456 application/management/net-snmp
58457 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58458 network/nfs/mapid
58459 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58460 default
58461 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58462 default
58463 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58464 initial
58465 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58466 last-import
58467 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58468 network/chrony
58469 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58470 running
58471 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58472 default
58473 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58474 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58475 network/ibd-post-upgrade
58476 smf/manifest
58477 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58478 default
58479 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58480 application/pkg/repositories-setup
58481 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58482 network/tftp/udp6
58483 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58484 default
58485 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58486 default
58487 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58488 initial
58489 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58490 last-import
58491 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58492 network/netmask
58493 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58494 running
58495 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58496 default
58497 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58498 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58499 network/dns/multicast
58500 application/pkg/dynamic-mirror
58501 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58502 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58503 default
58504 default
58505 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58506 initial
58507 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58508 network/dns/install
58509 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58510 last-import
58511 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58512 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58513 running
58514 default
58515 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58516 network/nfs/log
58517 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58518 application/pkg/mirror
58519 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58520 default
58521 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58522 default
58523 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58524 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58525 network/nfs/rquota
58526 initial
58527 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58528 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58529 last-import
58530 default
58531 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58532 running
58533 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58534 network/nfs/client
58535 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58536 default
58537 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58538 application/pkg/server
58539 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58540 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58541 default
58542 network/nfs/server
58543 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58544 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58545 initial
58546 default
58547 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58548 last-import
58549 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58550 running
58551 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58552 network/nfs/cbd
58553 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58554 default
58555 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58556 application/security/tcsd
58557 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58558 network/nfs/status
58559 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58560 default
58561 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58562 default
58563 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58564 initial
58565 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58566 last-import
58567 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58568 network/nfs/nlockmgr
58569 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58570 running
58571 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58572 default
58573 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58574 platform/i86pc/acpihpd
58575 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58576 milestone/sysconfig
58577 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58578 default
58579 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58580 default
58581 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58582 initial
58583 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58584 system/sac
58585 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58586 last-import
58587 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58588 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58589 default
58590 running
58591 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58592 start
58593 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58594 system/fcoe_initiator
58595 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58596 default
58597 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58598 milestone/network
58599 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58600 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58601 system/fmd
58602 default
58603 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58604 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58605 default
58606 initial
58607 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58608 last-import
58609 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58610 running
58611 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58612 system/sysevent
58613 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58614 start
58615 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58616 default
58617 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58618 system/boot-config
58619 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58620 network/ntp
58621 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58622 default
58623 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58624 default
58625 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58626 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58627 initial
58628 system/picl
58629 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58630 last-import
58631 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58632 default
58633 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58634 running
58635 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58636 start
58637 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58638 system/coreadm
58639 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58640 default
58641 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58642 network/ipmp
58643 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58644 system/hal
58645 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58646 default
58647 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58648 default
58649 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58650 initial
58651 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58652 last-import
58653 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58654 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58655 system/resource-mgmt
58656 running
58657 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58658 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58659 start
58660 default
58661 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58662 system/rcap
58663 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58664 network/routing/rdisc
58665 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58666 default
58667 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58668 system/system-log
58669 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58670 initial
58671 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58672 default
58673 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58674 last-import
58675 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58676 rsyslog
58677 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58678 running
58679 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58680 system/dumpadm
58681 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58682 default
58683 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58684 network/routing/route
58685 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58686 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58687 default
58688 system/dbus
58689 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58690 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58691 initial
58692 default
58693 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58694 last-import
58695 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58696 running
58697 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58698 system/pools
58699 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58700 default
58701 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58702 network/routing/ndp
58703 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58704 system/power
58705 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58706 default
58707 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58708 default
58709 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58710 initial
58711 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58712 last-import
58713 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58714 system/keymap
58715 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58716 running
58717 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58718 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58719 start
58720 default
58721 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58722 system/auditset
58723 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58724 network/routing/legacy-routing
58725 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58726 default
58727 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58728 ipv4
58729 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58730 system/stmf
58731 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58732 initial
58733 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58734 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58735 last-import
58736 default
58737 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58738 running
58739 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58740 system/hotplug
58741 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58742 ipv6
58743 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58744 default
58745 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58746 initial
58747 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58748 last-import
58749 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58750 system/rbac
58751 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58752 running
58753 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58754 default
58755 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58756 system/logadm-upgrade
58757 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58758 network/routing/ripng
58759 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58760 default
58761 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58762 default
58763 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58764 initial
58765 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58766 system/hostid
58767 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58768 last-import
58769 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58770 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58771 default
58772 running
58773 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58774 system/filesystem/autofs
58775 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58776 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58777 network/inetd
58778 default
58779 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58780 default
58781 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58782 system/cron
58783 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58784 initial
58785 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58786 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58787 default
58788 last-import
58789 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58790 running
58791 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58792 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58793 start
58794 system/illumos/userscript
58795 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58796 default
58797 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58798 network/bridge
58799 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58800 system/vtdaemon
58801 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58802 default
58803 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58804 network/ipv4-forwarding
58805 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58806 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58807 default
58808 system/boot-archive-update
58809 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58810 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58811 initial
58812 default
58813 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58814 last-import
58815 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58816 running
58817 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58818 system/cryptosvc
58819 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58820 default
58821 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58822 network/ipv6-forwarding
58823 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58824 system/intrd
58825 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58826 default
58827 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58828 default
58829 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58830 initial
58831 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58832 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58833 last-import
58834 system/rmtmpfiles
58835 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58836 running
58837 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58838 default
58839 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58840 system/t6init
58841 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58842 network/ipqos
58843 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58844 default
58845 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58846 default
58847 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58848 initial
58849 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58850 system/auditd
58851 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58852 last-import
58853 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58854 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58855 default
58856 running
58857 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58858 system/idmap
58859 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58860 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58861 network/ipsec/ipsecalgs
58862 default
58863 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58864 default
58865 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58866 system/zones-monitoring
58867 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58868 initial
58869 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58870 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58871 default
58872 last-import
58873 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58874 running
58875 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58876 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58877 start
58878 system/zones
58879 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58880 default
58881 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58882 network/ipsec/policy
58883 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58884 system/ipcc
58885 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58886 default
58887 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58888 default
58889 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58890 initial
58891 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58892 last-import
58893 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58894 system/update-man-index
58895 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58896 running
58897 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58898 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58899 default
58900 start
58901 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58902 system/process-security
58903 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58904 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58905 network/ipsec/manual-key
58906 default
58907 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58908 default
58909 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58910 system/fm/notify-params
58911 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58912 initial
58913 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58914 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58915 default
58916 last-import
58917 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58918 running
58919 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58920 system/pkgserv
58921 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58922 default
58923 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58924 network/ipsec/ike
58925 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58926 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58927 default
58928 system/extended-accounting
58929 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58930 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58931 initial
58932 flow
58933 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58934 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58935 last-import
58936 net
58937 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58938 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58939 running
58940 process
58941 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58942 task
58943 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58944 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58945 system/consadm
58946 network/install
58947 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58948 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58949 default
58950 default
58951 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58952 initial
58953 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58954 system/scheduler
58955 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58956 last-import
58957 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58958 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58959 running
58960 default
58961 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58962 system/pfexec
58963 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58964 network/shares/group
58965 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58966 default
58967 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58968 default
58969 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58970 system/illumos/metadata
58971 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58972 initial
58973 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58974 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58975 last-import
58976 default
58977 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58978 running
58979 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58980 start
58981 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58982 system/sar
58983 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58984 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
58985 default
58986 zfs
58987 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58988 running
58989 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58990 system/early-manifest-import
58991 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
58992 start
58993 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
58994 default
58995 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
58996 system/fcoe_target
58997 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
58998 system/filesystem/reparse
58999 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
59000 default
59001 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59002 default
59003 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59004 initial
59005 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
59006 system/device/mpxio-upgrade
59007 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59008 last-import
59009 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
59010 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59011 default
59012 running
59013 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
59014 system/device/audio
59015 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59016 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
59017 network/rpc/smserver
59018 default
59019 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59020 default
59021 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
59022 system/device/allocate
59023 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59024 initial
59025 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
59026 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59027 default
59028 last-import
59029 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59030 running
59031 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
59032 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59033 smf/legacy_run
59034 start
59035 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59036 network/rpc/keyserv
59037 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59038 default
59039 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59040 initial
59041 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59042 last-import
59043 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59044 running
59045 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59046 network/rpc/gss
59047 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59048 default
59049 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59050 initial
59051 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59052 last-import
59053 Ok(Service { scf: Scf { handle: 0x6b1ad0 }, service: 0x6bddd0 })
59054 site/buildomat/agent
59055 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59056 running
59057 Ok(Instance { scf: Scf { handle: 0x6b1ad0 }, instance: 0x6bd8d0 })
59058 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59059 default
59060 start
59061 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59062 network/ipfilter
59063 test tests::instance_iter ... ok
59064 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59065 default
59066 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59067 initial
59068 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59069 last-import
59070 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59071 running
59072 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59073 network/ldap/client
59074 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59075 default
59076 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59077 initial
59078 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59079 last-import
59080 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59081 running
59082 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59083 network/smb/client
59084 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59085 default
59086 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59087 initial
59088 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59089 last-import
59090 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59091 running
59092 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59093 network/smb/server
59094 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59095 default
59096 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59097 initial
59098 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59099 last-import
59100 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59101 running
59102 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59103 network/routing-setup
59104 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59105 default
59106 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59107 initial
59108 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59109 last-import
59110 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59111 running
59112 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59113 start
59114 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59115 network/npiv_config
59116 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59117 default
59118 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59119 initial
59120 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59121 last-import
59122 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59123 running
59124 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59125 start
59126 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59127 system/device/fc-fabric
59128 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59129 default
59130 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59131 initial
59132 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59133 last-import
59134 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59135 previous
59136 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59137 running
59138 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59139 start
59140 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59141 network/ssh
59142 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59143 default
59144 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59145 initial
59146 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59147 last-import
59148 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59149 running
59150 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59151 start
59152 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59153 network/varpd
59154 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59155 default
59156 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59157 initial
59158 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59159 last-import
59160 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59161 running
59162 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59163 network/security/kadmin
59164 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59165 default
59166 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59167 initial
59168 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59169 last-import
59170 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59171 running
59172 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59173 network/security/krb5kdc
59174 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59175 default
59176 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59177 initial
59178 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59179 last-import
59180 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59181 running
59182 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59183 network/security/ktkt_warn
59184 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59185 default
59186 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59187 initial
59188 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59189 last-import
59190 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59191 running
59192 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59193 start
59194 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59195 network/device-discovery/printers
59196 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59197 snmp
59198 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59199 initial
59200 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59201 last-import
59202 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59203 running
59204 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59205 network/service
59206 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59207 default
59208 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59209 initial
59210 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59211 last-import
59212 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59213 running
59214 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59215 start
59216 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59217 network/nis/client
59218 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59219 default
59220 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59221 initial
59222 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59223 last-import
59224 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59225 running
59226 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59227 network/location
59228 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59229 default
59230 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59231 initial
59232 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59233 last-import
59234 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59235 running
59236 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59237 network/dns/client
59238 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59239 default
59240 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59241 initial
59242 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59243 last-import
59244 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59245 previous
59246 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59247 running
59248 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59249 start
59250 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59251 system/name-service-cache
59252 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59253 default
59254 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59255 initial
59256 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59257 last-import
59258 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59259 previous
59260 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59261 running
59262 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59263 start
59264 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59265 network/nfs/mapid
59266 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59267 default
59268 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59269 initial
59270 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59271 last-import
59272 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59273 previous
59274 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59275 running
59276 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59277 network/chrony
59278 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59279 default
59280 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59281 initial
59282 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59283 last-import
59284 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59285 running
59286 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59287 network/ibd-post-upgrade
59288 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59289 default
59290 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59291 initial
59292 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59293 last-import
59294 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59295 running
59296 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59297 start
59298 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59299 network/tftp/udp6
59300 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59301 default
59302 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59303 initial
59304 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59305 last-import
59306 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59307 running
59308 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59309 network/netmask
59310 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59311 default
59312 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59313 initial
59314 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59315 last-import
59316 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59317 running
59318 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59319 start
59320 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59321 network/dns/multicast
59322 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59323 default
59324 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59325 initial
59326 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59327 last-import
59328 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59329 running
59330 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59331 network/dns/install
59332 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59333 default
59334 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59335 initial
59336 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59337 last-import
59338 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59339 running
59340 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59341 network/nfs/log
59342 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59343 default
59344 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59345 initial
59346 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59347 last-import
59348 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59349 running
59350 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59351 network/nfs/rquota
59352 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59353 default
59354 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59355 initial
59356 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59357 last-import
59358 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59359 running
59360 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59361 network/nfs/client
59362 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59363 default
59364 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59365 initial
59366 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59367 last-import
59368 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59369 running
59370 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59371 network/nfs/server
59372 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59373 default
59374 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59375 initial
59376 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59377 last-import
59378 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59379 running
59380 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59381 network/nfs/cbd
59382 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59383 default
59384 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59385 initial
59386 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59387 last-import
59388 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59389 running
59390 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59391 network/nfs/status
59392 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59393 default
59394 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59395 initial
59396 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59397 last-import
59398 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59399 running
59400 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59401 network/nfs/nlockmgr
59402 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59403 default
59404 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59405 initial
59406 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59407 last-import
59408 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59409 running
59410 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59411 platform/i86pc/acpihpd
59412 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59413 default
59414 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59415 initial
59416 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59417 last-import
59418 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59419 running
59420 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59421 start
59422 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59423 system/sac
59424 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59425 default
59426 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59427 initial
59428 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59429 last-import
59430 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59431 running
59432 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59433 start
59434 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59435 system/fcoe_initiator
59436 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59437 default
59438 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59439 initial
59440 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59441 last-import
59442 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59443 running
59444 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59445 start
59446 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59447 system/fmd
59448 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59449 default
59450 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59451 initial
59452 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59453 last-import
59454 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59455 running
59456 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59457 start
59458 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59459 system/sysevent
59460 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59461 default
59462 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59463 initial
59464 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59465 last-import
59466 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59467 running
59468 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59469 start
59470 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59471 system/boot-config
59472 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59473 default
59474 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59475 initial
59476 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59477 last-import
59478 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59479 running
59480 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59481 start
59482 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59483 system/picl
59484 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59485 default
59486 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59487 initial
59488 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59489 last-import
59490 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59491 running
59492 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59493 start
59494 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59495 system/coreadm
59496 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59497 default
59498 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59499 initial
59500 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59501 last-import
59502 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59503 running
59504 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59505 start
59506 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59507 system/hal
59508 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59509 default
59510 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59511 initial
59512 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59513 last-import
59514 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59515 running
59516 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59517 start
59518 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59519 system/resource-mgmt
59520 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59521 default
59522 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59523 initial
59524 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59525 last-import
59526 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59527 running
59528 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59529 start
59530 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59531 system/rcap
59532 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59533 system/system-log
59534 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59535 default
59536 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59537 initial
59538 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59539 last-import
59540 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59541 previous
59542 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59543 running
59544 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59545 start
59546 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59547 rsyslog
59548 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59549 initial
59550 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59551 last-import
59552 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59553 previous
59554 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59555 running
59556 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59557 system/dumpadm
59558 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59559 default
59560 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59561 initial
59562 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59563 last-import
59564 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59565 running
59566 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59567 start
59568 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59569 system/dbus
59570 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59571 default
59572 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59573 initial
59574 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59575 last-import
59576 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59577 running
59578 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59579 start
59580 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59581 system/pools
59582 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59583 default
59584 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59585 initial
59586 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59587 last-import
59588 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59589 running
59590 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59591 system/power
59592 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59593 default
59594 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59595 initial
59596 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59597 last-import
59598 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59599 running
59600 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59601 start
59602 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59603 system/keymap
59604 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59605 default
59606 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59607 initial
59608 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59609 last-import
59610 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59611 running
59612 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59613 start
59614 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59615 system/auditset
59616 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59617 default
59618 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59619 initial
59620 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59621 last-import
59622 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59623 running
59624 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59625 start
59626 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59627 system/stmf
59628 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59629 default
59630 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59631 initial
59632 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59633 last-import
59634 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59635 running
59636 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59637 system/hotplug
59638 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59639 default
59640 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59641 initial
59642 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59643 last-import
59644 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59645 running
59646 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59647 system/rbac
59648 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59649 default
59650 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59651 initial
59652 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59653 last-import
59654 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59655 running
59656 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59657 start
59658 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59659 system/logadm-upgrade
59660 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59661 default
59662 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59663 initial
59664 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59665 last-import
59666 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59667 running
59668 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59669 start
59670 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59671 system/hostid
59672 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59673 default
59674 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59675 initial
59676 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59677 last-import
59678 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59679 running
59680 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59681 start
59682 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59683 system/filesystem/autofs
59684 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59685 default
59686 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59687 initial
59688 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59689 last-import
59690 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59691 running
59692 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59693 start
59694 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59695 system/cron
59696 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59697 default
59698 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59699 initial
59700 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59701 last-import
59702 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59703 running
59704 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59705 start
59706 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59707 system/illumos/userscript
59708 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59709 default
59710 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59711 initial
59712 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59713 last-import
59714 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59715 running
59716 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59717 start
59718 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59719 system/vtdaemon
59720 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59721 default
59722 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59723 initial
59724 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59725 last-import
59726 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59727 running
59728 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59729 system/boot-archive-update
59730 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59731 default
59732 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59733 initial
59734 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59735 last-import
59736 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59737 running
59738 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59739 start
59740 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59741 system/cryptosvc
59742 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59743 default
59744 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59745 initial
59746 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59747 last-import
59748 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59749 running
59750 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59751 start
59752 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59753 system/intrd
59754 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59755 default
59756 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59757 initial
59758 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59759 last-import
59760 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59761 running
59762 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59763 start
59764 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59765 system/rmtmpfiles
59766 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59767 default
59768 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59769 initial
59770 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59771 last-import
59772 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59773 running
59774 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59775 start
59776 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59777 system/t6init
59778 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59779 default
59780 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59781 initial
59782 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59783 last-import
59784 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59785 running
59786 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59787 system/auditd
59788 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59789 default
59790 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59791 initial
59792 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59793 last-import
59794 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59795 running
59796 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59797 system/idmap
59798 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59799 default
59800 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59801 initial
59802 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59803 last-import
59804 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59805 running
59806 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59807 system/zones-monitoring
59808 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59809 default
59810 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59811 initial
59812 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59813 last-import
59814 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59815 running
59816 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59817 start
59818 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59819 system/zones
59820 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59821 default
59822 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59823 initial
59824 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59825 last-import
59826 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59827 previous
59828 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59829 running
59830 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59831 start
59832 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59833 system/ipcc
59834 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59835 default
59836 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59837 initial
59838 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59839 last-import
59840 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59841 running
59842 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59843 start
59844 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59845 system/update-man-index
59846 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59847 default
59848 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59849 initial
59850 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59851 last-import
59852 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59853 running
59854 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59855 start
59856 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59857 system/process-security
59858 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59859 default
59860 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59861 initial
59862 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59863 last-import
59864 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59865 running
59866 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59867 system/fm/notify-params
59868 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59869 default
59870 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59871 initial
59872 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59873 last-import
59874 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59875 running
59876 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59877 system/pkgserv
59878 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59879 default
59880 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59881 initial
59882 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59883 last-import
59884 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59885 running
59886 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59887 start
59888 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59889 system/extended-accounting
59890 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59891 flow
59892 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59893 initial
59894 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59895 last-import
59896 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59897 running
59898 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59899 net
59900 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59901 initial
59902 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59903 last-import
59904 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59905 running
59906 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59907 process
59908 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59909 initial
59910 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59911 last-import
59912 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59913 running
59914 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59915 task
59916 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59917 initial
59918 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59919 last-import
59920 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59921 running
59922 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59923 system/consadm
59924 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59925 default
59926 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59927 initial
59928 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59929 last-import
59930 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59931 running
59932 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59933 system/scheduler
59934 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59935 default
59936 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59937 initial
59938 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59939 last-import
59940 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59941 running
59942 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59943 start
59944 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59945 system/pfexec
59946 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59947 default
59948 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59949 initial
59950 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59951 last-import
59952 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59953 running
59954 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59955 start
59956 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59957 system/illumos/metadata
59958 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59959 default
59960 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59961 initial
59962 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59963 last-import
59964 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59965 running
59966 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59967 start
59968 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59969 system/sar
59970 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59971 default
59972 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59973 initial
59974 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59975 last-import
59976 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59977 running
59978 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59979 system/early-manifest-import
59980 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59981 default
59982 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59983 initial
59984 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59985 last-import
59986 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59987 running
59988 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59989 start
59990 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
59991 system/fcoe_target
59992 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
59993 default
59994 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59995 initial
59996 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59997 last-import
59998 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
59999 running
60000 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
60001 system/device/mpxio-upgrade
60002 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
60003 default
60004 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60005 initial
60006 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60007 last-import
60008 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60009 running
60010 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
60011 system/device/audio
60012 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
60013 default
60014 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60015 initial
60016 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60017 last-import
60018 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60019 running
60020 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60021 start
60022 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
60023 system/device/allocate
60024 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
60025 default
60026 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60027 initial
60028 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60029 last-import
60030 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60031 running
60032 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
60033 smf/legacy_run
60034 Ok(Service { scf: Scf { handle: 0x6bc010 }, service: 0x6bd750 })
60035 site/buildomat/agent
60036 Ok(Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 })
60037 default
60038 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60039 last-import
60040 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60041 running
60042 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60043 initial
60044 Instance { scf: Scf { handle: 0x6bc010 }, instance: 0x6bd6d0 }
60045 start
60046 test tests::snapshot_iter ... ok
60047 
60048 test result: ok. 10 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.11s
60049 
60050 Running `/work/oxidecomputer/crucible/target/debug/deps/crudd-3e9b00990c25260e --nocapture`
60051 
60052 running 0 tests
60053 
60054 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
60055 
60056 Running `/work/oxidecomputer/crucible/target/debug/deps/crutest-af78e92d646e2d06 --nocapture`
60057 
60058 running 64 tests
60059 test protocol::tests::correctly_detect_truncated_message ... ok
60060 test protocol::tests::rt_commit ... ok
60061 test protocol::tests::rt_activate ... ok
60062 test protocol::tests::rt_deactivate ... ok
60063 test protocol::tests::rt_done_ok ... ok
60064 test protocol::tests::rt_generic ... ok
60065 test protocol::tests::rt_info ... ok
60066 test protocol::tests::rt_info_please ... ok
60067 test protocol::tests::rt_is_active ... ok
60068 test protocol::tests::rt_is_show ... ok
60069 test protocol::tests::rt_perf ... ok
60070 test protocol::tests::rt_read ... ok
60071 test protocol::tests::rt_my_uuid ... ok
60072 test test::test_95_10 ... ok
60073 test protocol::tests::rt_write ... ok
60074 test protocol::tests::rt_write_unwritten ... ok
60075 test protocol::tests::rt_replace ... ok
60076 test protocol::tests::rt_uuid ... ok
60077 test test::test_95_2 ... ok
60078 test test::test_95_21 ... ok
60079 test test::test_95_small ... ok
60080 test test::test_perc_bad_big_perc ... ok
60081 test test::test_perc_bad_perc ... ok
60082 test test::test_95_20 ... ok
60083 test test::test_perc_mixed ... ok
60084 test test::test_read_compare_empty_data ... ok
60085 test test::test_read_compare_1 ... ok
60086 test test::test_read_compare_commit ... ok
60087 test test::test_read_compare_empty ... Mismatch Block::0 bo:1 Volume offset:1 Expected:3 Got:2
60088 Mismatch Block::0 bo:2 Volume offset:2 Expected:3 Got:2
60089 Mismatch Block::0 bo:1 Volume offset:1 Expected:1 Got:2
60090 Mismatch Block::0 bo:2 Volume offset:2 Expected:1 Got:2
60091 Mismatch Block::0 bo:3 Volume offset:3 Expected:1 Got:2
60092 Mismatch Block::0 bo:4 Volume offset:4 Expected:1 Got:2
60093 Mismatch Block::0 bo:5 Volume offset:5 Expected:1 Got:2
60094 Mismatch Block::0 bo:6 Volume offset:6 Expected:1 Got:2
60095 Mismatch Block::0 bo:7 Volume offset:7 Expected:1 Got:2
60096 Mismatch Block::0 bo:8 Volume offset:8 Expected:1 Got:2
60097 Mismatch Block::0 bo:9 Volume offset:9 Expected:1 Got:2
60098 Mismatch Block::0 bo:3 Volume offset:3 Expected:3 Got:2
60099 Mismatch Block::0 bo:10 Volume offset:10 Expected:1 Got:2
60100 Mismatch Block::0 bo:4 Volume offset:4 Expected:3 Got:2
60101 Mismatch Block::0 bo:11 Volume offset:11 Expected:1 Got:2
60102 Mismatch Block::0 bo:5 Volume offset:5 Expected:3 Got:2
60103 Mismatch Block::0 bo:12 Volume offset:12 Expected:1 Got:2
60104 Mismatch Block::0 bo:6 Volume offset:6 Expected:3 Got:2
60105 Mismatch Block::0 bo:13 Volume offset:13 Expected:1 Got:2
60106 Mismatch Block::0 bo:7 Volume offset:7 Expected:3 Got:2
60107 Mismatch Block::0 bo:14 Volume offset:14 Expected:1 Got:2
60108 Mismatch Block::0 bo:8 Volume offset:8 Expected:3 Got:2
60109 Mismatch Block::0 bo:15 Volume offset:15 Expected:1 Got:2
60110 Mismatch Block::0 bo:9 Volume offset:9 Expected:3 Got:2
60111 Mismatch Block::0 bo:16 Volume offset:16 Expected:1 Got:2
60112 Mismatch Block::0 bo:10 Volume offset:10 Expected:3 Got:2
60113 Mismatch Block::0 bo:17 Volume offset:17 Expected:1 Got:2
60114 Mismatch Block::0 bo:11 Volume offset:11 Expected:3 Got:2
60115 Mismatch Block::0 bo:18 Volume offset:18 Expected:1 Got:2
60116 Mismatch Block::0 bo:12 Volume offset:12 Expected:3 Got:2
60117 Mismatch Block::0 bo:19 Volume offset:19 Expected:1 Got:2
60118 Mismatch Block::0 bo:13 Volume offset:13 Expected:3 Got:2
60119 Mismatch Block::0 bo:20 Volume offset:20 Expected:1 Got:2
60120 Mismatch Block::0 bo:14 Volume offset:14 Expected:3 Got:2
60121 Mismatch Block::0 bo:21 Volume offset:21 Expected:1 Got:2
60122 Mismatch Block::0 bo:15 Volume offset:15 Expected:3 Got:2
60123 Mismatch Block::0 bo:22 Volume offset:22 Expected:1 Got:2
60124 Mismatch Block::0 bo:16 Volume offset:16 Expected:3 Got:2
60125 Mismatch Block::0 bo:23 Volume offset:23 Expected:1 Got:2
60126 Mismatch Block::0 bo:17 Volume offset:17 Expected:3 Got:2
60127 Mismatch Block::2 bo:1 Volume offset:1025 Expected:2 Got:9
60128 Mismatch Block::0 bo:18 Volume offset:18 Expected:3 Got:2
60129 Mismatch Block::0 bo:24 Volume offset:24 Expected:1 Got:2
60130 Mismatch Block::0 bo:19 Volume offset:19 Expected:3 Got:2
60131 Mismatch Block::0 bo:25 Volume offset:25 Expected:1 Got:2
60132 Mismatch Block::0 bo:20 Volume offset:20 Expected:3 Got:2
60133 Mismatch Block::0 bo:26 Volume offset:26 Expected:1 Got:2
60134 Mismatch Block::0 bo:21 Volume offset:21 Expected:3 Got:2
60135 Mismatch Block::0 bo:27 Volume offset:27 Expected:1 Got:2
60136 Mismatch Block::0 bo:22 Volume offset:22 Expected:3 Got:2
60137 Mismatch Block::0 bo:28 Volume offset:28 Expected:1 Got:2
60138 Mismatch Block::0 bo:23 Volume offset:23 Expected:3 Got:2
60139 Mismatch Block::0 bo:29 Volume offset:29 Expected:1 Got:2
60140 Mismatch Block::0 bo:24 Volume offset:24 Expected:3 Got:2
60141 Mismatch Block::0 bo:30 Volume offset:30 Expected:1 Got:2
60142 Mismatch Block::0 bo:25 Volume offset:25 Expected:3 Got:2
60143 Mismatch Block::0 bo:31 Volume offset:31 Expected:1 Got:2
60144 Mismatch Block::0 bo:26 Volume offset:26 Expected:3 Got:2
60145 Mismatch Block::0 bo:32 Volume offset:32 Expected:1 Got:2
60146 Mismatch Block::0 bo:27 Volume offset:27 Expected:3 Got:2
60147 Mismatch Block::0 bo:33 Volume offset:33 Expected:1 Got:2
60148 Mismatch Block::0 bo:28 Volume offset:28 Expected:3 Got:2
60149 Mismatch Block::0 bo:34 Volume offset:34 Expected:1 Got:2
60150 Mismatch Block::0 bo:29 Volume offset:29 Expected:3 Got:2
60151 Mismatch Block::0 bo:35 Volume offset:35 Expected:1 Got:2
60152 Mismatch Block::0 bo:30 Volume offset:30 Expected:3 Got:2
60153 Mismatch Block::0 bo:36 Volume offset:36 Expected:1 Got:2
60154 Mismatch Block::0 bo:31 Volume offset:31 Expected:3 Got:2
60155 Mismatch Block::0 bo:37 Volume offset:37 Expected:1 Got:2
60156 Mismatch Block::0 bo:32 Volume offset:32 Expected:3 Got:2
60157 Mismatch Block::0 bo:38 Volume offset:38 Expected:1 Got:2
60158 Mismatch Block::0 bo:33 Volume offset:33 Expected:3 Got:2
60159 Mismatch Block::0 bo:39 Volume offset:39 Expected:1 Got:2
60160 Mismatch Block::0 bo:34 Volume offset:34 Expected:3 Got:2
60161 Mismatch Block::0 bo:40 Volume offset:40 Expected:1 Got:2
60162 Mismatch Block::0 bo:35 Volume offset:35 Expected:3 Got:2
60163 Mismatch Block::0 bo:41 Volume offset:41 Expected:1 Got:2
60164 Mismatch Block::0 bo:36 Volume offset:36 Expected:3 Got:2
60165 Mismatch Block::0 bo:42 Volume offset:42 Expected:1 Got:2
60166 Mismatch Block::0 bo:37 Volume offset:37 Expected:3 Got:2
60167 Mismatch Block::0 bo:43 Volume offset:43 Expected:1 Got:2
60168 Mismatch Block::0 bo:38 Volume offset:38 Expected:3 Got:2
60169 Mismatch Block::0 bo:44 Volume offset:44 Expected:1 Got:2
60170 Mismatch Block::0 bo:39 Volume offset:39 Expected:3 Got:2
60171 Mismatch Block::0 bo:45 Volume offset:45 Expected:1 Got:2
60172 Mismatch Block::0 bo:40 Volume offset:40 Expected:3 Got:2
60173 Mismatch Block::0 bo:46 Volume offset:46 Expected:1 Got:2
60174 Mismatch Block::0 bo:41 Volume offset:41 Expected:3 Got:2
60175 Mismatch Block::0 bo:47 Volume offset:47 Expected:1 Got:2
60176 Mismatch Block::0 bo:42 Volume offset:42 Expected:3 Got:2
60177 Mismatch Block::0 bo:48 Volume offset:48 Expected:1 Got:2
60178 Mismatch Block::0 bo:43 Volume offset:43 Expected:3 Got:2
60179 Mismatch Block::0 bo:49 Volume offset:49 Expected:1 Got:2
60180 Mismatch Block::0 bo:44 Volume offset:44 Expected:3 Got:2
60181 Mismatch Block::0 bo:50 Volume offset:50 Expected:1 Got:2
60182 Mismatch Block::0 bo:45 Volume offset:45 Expected:3 Got:2
60183 Mismatch Block::0 bo:51 Volume offset:51 Expected:1 Got:2
60184 Mismatch Block::0 bo:46 Volume offset:46 Expected:3 Got:2
60185 Mismatch Block::0 bo:52 Volume offset:52 Expected:1 Got:2
60186 Mismatch Block::0 bo:47 Volume offset:47 Expected:3 Got:2
60187 Mismatch Block::0 bo:53 Volume offset:53 Expected:1 Got:2
60188 Mismatch Block::0 bo:48 Volume offset:48 Expected:3 Got:2
60189 Mismatch Block::0 bo:54 Volume offset:54 Expected:1 Got:2
60190 Mismatch Block::0 bo:49 Volume offset:49 Expected:3 Got:2
60191 Mismatch Block::0 bo:55 Volume offset:55 Expected:1 Got:2
60192 Mismatch Block::0 bo:50 Volume offset:50 Expected:3 Got:2
60193 Mismatch Block::0 bo:56 Volume offset:56 Expected:1 Got:2
60194 Mismatch Block::0 bo:51 Volume offset:51 Expected:3 Got:2
60195 Mismatch Block::0 bo:57 Volume offset:57 Expected:1 Got:2
60196 Mismatch Block::0 bo:52 Volume offset:52 Expected:3 Got:2
60197 Mismatch Block::0 bo:58 Volume offset:58 Expected:1 Got:2
60198 Mismatch Block::0 bo:53 Volume offset:53 Expected:3 Got:2
60199 Mismatch Block::0 bo:59 Volume offset:59 Expected:1 Got:2
60200 Mismatch Block::0 bo:54 Volume offset:54 Expected:3 Got:2
60201 Mismatch Block::0 bo:60 Volume offset:60 Expected:1 Got:2
60202 Mismatch Block::0 bo:55 Volume offset:55 Expected:3 Got:2
60203 Mismatch Block::0 bo:61 Volume offset:61 Expected:1 Got:2
60204 Mismatch Block::0 bo:56 Volume offset:56 Expected:3 Got:2
60205 Mismatch Block::0 bo:62 Volume offset:62 Expected:1 Got:2
60206 Mismatch Block::0 bo:57 Volume offset:57 Expected:3 Got:2
60207 Mismatch Block::0 bo:63 Volume offset:63 Expected:1 Got:2
60208 Mismatch Block::0 bo:58 Volume offset:58 Expected:3 Got:2
60209 Mismatch Block::0 bo:64 Volume offset:64 Expected:1 Got:2
60210 Mismatch Block::0 bo:59 Volume offset:59 Expected:3 Got:2
60211 Mismatch Block::0 bo:65 Volume offset:65 Expected:1 Got:2
60212 Mismatch Block::0 bo:60 Volume offset:60 Expected:3 Got:2
60213 Mismatch Block::0 bo:66 Volume offset:66 Expected:1 Got:2
60214 Mismatch Block::0 bo:61 Volume offset:61 Expected:3 Got:2
60215 Mismatch Block::0 bo:67 Volume offset:67 Expected:1 Got:2
60216 Mismatch Block::0 bo:62 Volume offset:62 Expected:3 Got:2
60217 Mismatch Block::0 bo:68 Volume offset:68 Expected:1 Got:2
60218 Mismatch Block::0 bo:63 Volume offset:63 Expected:3 Got:2
60219 Mismatch Block::0 bo:69 Volume offset:69 Expected:1 Got:2
60220 Mismatch Block::0 bo:64 Volume offset:64 Expected:3 Got:2
60221 Mismatch Block::0 bo:70 Volume offset:70 Expected:1 Got:2
60222 Mismatch Block::0 bo:65 Volume offset:65 Expected:3 Got:2
60223 Mismatch Block::0 bo:71 Volume offset:71 Expected:1 Got:2
60224 Mismatch Block::0 bo:66 Volume offset:66 Expected:3 Got:2
60225 Mismatch Block::0 bo:72 Volume offset:72 Expected:1 Got:2
60226 Mismatch Block::0 bo:67 Volume offset:67 Expected:3 Got:2
60227 Mismatch Block::0 bo:73 Volume offset:73 Expected:1 Got:2
60228 Mismatch Block::0 bo:68 Volume offset:68 Expected:3 Got:2
60229 Mismatch Block::0 bo:74 Volume offset:74 Expected:1 Got:2
60230 Mismatch Block::0 bo:69 Volume offset:69 Expected:3 Got:2
60231 Mismatch Block::0 bo:75 Volume offset:75 Expected:1 Got:2
60232 Mismatch Block::0 bo:70 Volume offset:70 Expected:3 Got:2
60233 Mismatch Block::0 bo:76 Volume offset:76 Expected:1 Got:2
60234 Mismatch Block::0 bo:71 Volume offset:71 Expected:3 Got:2
60235 Mismatch Block::0 bo:77 Volume offset:77 Expected:1 Got:2
60236 Mismatch Block::0 bo:72 Volume offset:72 Expected:3 Got:2
60237 Mismatch Block::0 bo:78 Volume offset:78 Expected:1 Got:2
60238 Mismatch Block::0 bo:73 Volume offset:73 Expected:3 Got:2
60239 Mismatch Block::0 bo:79 Volume offset:79 Expected:1 Got:2
60240 Mismatch Block::0 bo:74 Volume offset:74 Expected:3 Got:2
60241 Mismatch Block::0 bo:80 Volume offset:80 Expected:1 Got:2
60242 Mismatch Block::0 bo:75 Volume offset:75 Expected:3 Got:2
60243 Mismatch Block::0 bo:81 Volume offset:81 Expected:1 Got:2
60244 Mismatch Block::0 bo:76 Volume offset:76 Expected:3 Got:2
60245 Mismatch Block::0 bo:82 Volume offset:82 Expected:1 Got:2
60246 Mismatch Block::0 bo:77 Volume offset:77 Expected:3 Got:2
60247 Mismatch Block::0 bo:83 Volume offset:83 Expected:1 Got:2
60248 Mismatch Block::0 bo:78 Volume offset:78 Expected:3 Got:2
60249 Mismatch Block::0 bo:84 Volume offset:84 Expected:1 Got:2
60250 Mismatch Block::0 bo:79 Volume offset:79 Expected:3 Got:2
60251 Mismatch Block::0 bo:85 Volume offset:85 Expected:1 Got:2
60252 Mismatch Block::0 bo:80 Volume offset:80 Expected:3 Got:2
60253 Mismatch Block::0 bo:86 Volume offset:86 Expected:1 Got:2
60254 Mismatch Block::0 bo:81 Volume offset:81 Expected:3 Got:2
60255 Mismatch Block::0 bo:87 Volume offset:87 Expected:1 Got:2
60256 Mismatch Block::0 bo:82 Volume offset:82 Expected:3 Got:2
60257 Mismatch Block::0 bo:88 Volume offset:88 Expected:1 Got:2
60258 Mismatch Block::0 bo:83 Volume offset:83 Expected:3 Got:2
60259 Mismatch Block::0 bo:89 Volume offset:89 Expected:1 Got:2
60260 Mismatch Block::0 bo:84 Volume offset:84 Expected:3 Got:2
60261 Mismatch Block::0 bo:90 Volume offset:90 Expected:1 Got:2
60262 Mismatch Block::0 bo:85 Volume offset:85 Expected:3 Got:2
60263 Mismatch Block::0 bo:91 Volume offset:91 Expected:1 Got:2
60264 Mismatch Block::0 bo:86 Volume offset:86 Expected:3 Got:2
60265 Mismatch Block::0 bo:92 Volume offset:92 Expected:1 Got:2
60266 Mismatch Block::0 bo:87 Volume offset:87 Expected:3 Got:2
60267 Mismatch Block::0 bo:93 Volume offset:93 Expected:1 Got:2
60268 Mismatch Block::0 bo:88 Volume offset:88 Expected:3 Got:2
60269 Mismatch Block::0 bo:94 Volume offset:94 Expected:1 Got:2
60270 Mismatch Block::0 bo:89 Volume offset:89 Expected:3 Got:2
60271 Mismatch Block::0 bo:95 Volume offset:95 Expected:1 Got:2
60272 Mismatch Block::0 bo:90 Volume offset:90 Expected:3 Got:2
60273 Mismatch Block::0 bo:96 Volume offset:96 Expected:1 Got:2
60274 Mismatch Block::0 bo:91 Volume offset:91 Expected:3 Got:2
60275 Mismatch Block::0 bo:97 Volume offset:97 Expected:1 Got:2
60276 Mismatch Block::0 bo:92 Volume offset:92 Expected:3 Got:2
60277 Mismatch Block::0 bo:98 Volume offset:98 Expected:1 Got:2
60278 Mismatch Block::0 bo:93 Volume offset:93 Expected:3 Got:2
60279 Mismatch Block::0 bo:99 Volume offset:99 Expected:1 Got:2
60280 Mismatch Block::0 bo:94 Volume offset:94 Expected:3 Got:2
60281 Mismatch Block::0 bo:100 Volume offset:100 Expected:1 Got:2
60282 Mismatch Block::0 bo:95 Volume offset:95 Expected:3 Got:2
60283 Mismatch Block::0 bo:101 Volume offset:101 Expected:1 Got:2
60284 Mismatch Block::0 bo:96 Volume offset:96 Expected:3 Got:2
60285 Mismatch Block::0 bo:102 Volume offset:102 Expected:1 Got:2
60286 Mismatch Block::0 bo:97 Volume offset:97 Expected:3 Got:2
60287 Mismatch Block::0 bo:103 Volume offset:103 Expected:1 Got:2
60288 Mismatch Block::0 bo:98 Volume offset:98 Expected:3 Got:2
60289 Mismatch Block::0 bo:104 Volume offset:104 Expected:1 Got:2
60290 Mismatch Block::0 bo:99 Volume offset:99 Expected:3 Got:2
60291 Mismatch Block::0 bo:105 Volume offset:105 Expected:1 Got:2
60292 Mismatch Block::0 bo:100 Volume offset:100 Expected:3 Got:2
60293 Mismatch Block::0 bo:106 Volume offset:106 Expected:1 Got:2
60294 Mismatch Block::0 bo:101 Volume offset:101 Expected:3 Got:2
60295 Mismatch Block::0 bo:107 Volume offset:107 Expected:1 Got:2
60296 Mismatch Block::0 bo:102 Volume offset:102 Expected:3 Got:2
60297 Mismatch Block::0 bo:108 Volume offset:108 Expected:1 Got:2
60298 Mismatch Block::0 bo:103 Volume offset:103 Expected:3 Got:2
60299 Mismatch Block::0 bo:109 Volume offset:109 Expected:1 Got:2
60300 Mismatch Block::0 bo:104 Volume offset:104 Expected:3 Got:2
60301 Mismatch Block::0 bo:110 Volume offset:110 Expected:1 Got:2
60302 Mismatch Block::0 bo:105 Volume offset:105 Expected:3 Got:2
60303 Mismatch Block::0 bo:111 Volume offset:111 Expected:1 Got:2
60304 Mismatch Block::0 bo:106 Volume offset:106 Expected:3 Got:2
60305 Mismatch Block::0 bo:112 Volume offset:112 Expected:1 Got:2
60306 Mismatch Block::0 bo:107 Volume offset:107 Expected:3 Got:2
60307 Mismatch Block::0 bo:113 Volume offset:113 Expected:1 Got:2
60308 Mismatch Block::0 bo:108 Volume offset:108 Expected:3 Got:2
60309 Mismatch Block::0 bo:114 Volume offset:114 Expected:1 Got:2
60310 Mismatch Block::0 bo:109 Volume offset:109 Expected:3 Got:2
60311 Mismatch Block::0 bo:115 Volume offset:115 Expected:1 Got:2
60312 Mismatch Block::0 bo:110 Volume offset:110 Expected:3 Got:2
60313 Mismatch Block::0 bo:116 Volume offset:116 Expected:1 Got:2
60314 Mismatch Block::0 bo:111 Volume offset:111 Expected:3 Got:2
60315 Mismatch Block::0 bo:117 Volume offset:117 Expected:1 Got:2
60316 Mismatch Block::0 bo:112 Volume offset:112 Expected:3 Got:2
60317 Mismatch Block::0 bo:118 Volume offset:118 Expected:1 Got:2
60318 Mismatch Block::0 bo:113 Volume offset:113 Expected:3 Got:2
60319 Mismatch Block::0 bo:119 Volume offset:119 Expected:1 Got:2
60320 Mismatch Block::0 bo:114 Volume offset:114 Expected:3 Got:2
60321 Mismatch Block::0 bo:120 Volume offset:120 Expected:1 Got:2
60322 Mismatch Block::0 bo:115 Volume offset:115 Expected:3 Got:2
60323 Mismatch Block::0 bo:121 Volume offset:121 Expected:1 Got:2
60324 Mismatch Block::0 bo:116 Volume offset:116 Expected:3 Got:2
60325 Mismatch Block::0 bo:122 Volume offset:122 Expected:1 Got:2
60326 Mismatch Block::0 bo:117 Volume offset:117 Expected:3 Got:2
60327 Mismatch Block::0 bo:123 Volume offset:123 Expected:1 Got:2
60328 Mismatch Block::0 bo:118 Volume offset:118 Expected:3 Got:2
60329 Mismatch Block::0 bo:124 Volume offset:124 Expected:1 Got:2
60330 Mismatch Block::0 bo:119 Volume offset:119 Expected:3 Got:2
60331 Mismatch Block::0 bo:125 Volume offset:125 Expected:1 Got:2
60332 Mismatch Block::0 bo:120 Volume offset:120 Expected:3 Got:2
60333 Mismatch Block::0 bo:126 Volume offset:126 Expected:1 Got:2
60334 Mismatch Block::0 bo:121 Volume offset:121 Expected:3 Got:2
60335 Mismatch Block::0 bo:127 Volume offset:127 Expected:1 Got:2
60336 Mismatch Block::0 bo:122 Volume offset:122 Expected:3 Got:2
60337 Mismatch Block::0 bo:128 Volume offset:128 Expected:1 Got:2
60338 Mismatch Block::0 bo:123 Volume offset:123 Expected:3 Got:2
60339 Mismatch Block::0 bo:129 Volume offset:129 Expected:1 Got:2
60340 Mismatch Block::0 bo:124 Volume offset:124 Expected:3 Got:2
60341 Mismatch Block::0 bo:130 Volume offset:130 Expected:1 Got:2
60342 Mismatch Block::0 bo:125 Volume offset:125 Expected:3 Got:2
60343 Mismatch Block::0 bo:131 Volume offset:131 Expected:1 Got:2
60344 Mismatch Block::0 bo:126 Volume offset:126 Expected:3 Got:2
60345 Mismatch Block::0 bo:132 Volume offset:132 Expected:1 Got:2
60346 Mismatch Block::0 bo:127 Volume offset:127 Expected:3 Got:2
60347 Mismatch Block::0 bo:133 Volume offset:133 Expected:1 Got:2
60348 Mismatch Block::0 bo:128 Volume offset:128 Expected:3 Got:2
60349 Mismatch Block::0 bo:134 Volume offset:134 Expected:1 Got:2
60350 Mismatch Block::0 bo:129 Volume offset:129 Expected:3 Got:2
60351 Mismatch Block::0 bo:135 Volume offset:135 Expected:1 Got:2
60352 Mismatch Block::0 bo:130 Volume offset:130 Expected:3 Got:2
60353 Mismatch Block::0 bo:136 Volume offset:136 Expected:1 Got:2
60354 Mismatch Block::0 bo:131 Volume offset:131 Expected:3 Got:2
60355 Mismatch Block::0 bo:137 Volume offset:137 Expected:1 Got:2
60356 Mismatch Block::0 bo:132 Volume offset:132 Expected:3 Got:2
60357 Mismatch Block::0 bo:138 Volume offset:138 Expected:1 Got:2
60358 Mismatch Block::0 bo:133 Volume offset:133 Expected:3 Got:2
60359 Mismatch Block::0 bo:139 Volume offset:139 Expected:1 Got:2
60360 Mismatch Block::0 bo:134 Volume offset:134 Expected:3 Got:2
60361 Mismatch Block::0 bo:140 Volume offset:140 Expected:1 Got:2
60362 Mismatch Block::0 bo:135 Volume offset:135 Expected:3 Got:2
60363 Mismatch Block::0 bo:141 Volume offset:141 Expected:1 Got:2
60364 Mismatch Block::0 bo:136 Volume offset:136 Expected:3 Got:2
60365 Mismatch Block::0 bo:142 Volume offset:142 Expected:1 Got:2
60366 Mismatch Block::0 bo:137 Volume offset:137 Expected:3 Got:2
60367 Mismatch Block::0 bo:143 Volume offset:143 Expected:1 Got:2
60368 Mismatch Block::0 bo:138 Volume offset:138 Expected:3 Got:2
60369 Mismatch Block::0 bo:144 Volume offset:144 Expected:1 Got:2
60370 Mismatch Block::0 bo:139 Volume offset:139 Expected:3 Got:2
60371 Mismatch Block::0 bo:145 Volume offset:145 Expected:1 Got:2
60372 Mismatch Block::0 bo:140 Volume offset:140 Expected:3 Got:2
60373 Mismatch Block::0 bo:146 Volume offset:146 Expected:1 Got:2
60374 Mismatch Block::0 bo:141 Volume offset:141 Expected:3 Got:2
60375 Mismatch Block::0 bo:147 Volume offset:147 Expected:1 Got:2
60376 Mismatch Block::0 bo:142 Volume offset:142 Expected:3 Got:2
60377 Mismatch Block::0 bo:148 Volume offset:148 Expected:1 Got:2
60378 Mismatch Block::0 bo:143 Volume offset:143 Expected:3 Got:2
60379 Mismatch Block::0 bo:149 Volume offset:149 Expected:1 Got:2
60380 Mismatch Block::0 bo:144 Volume offset:144 Expected:3 Got:2
60381 Mismatch Block::0 bo:150 Volume offset:150 Expected:1 Got:2
60382 Mismatch Block::0 bo:145 Volume offset:145 Expected:3 Got:2
60383 Mismatch Block::0 bo:151 Volume offset:151 Expected:1 Got:2
60384 Mismatch Block::0 bo:146 Volume offset:146 Expected:3 Got:2
60385 Mismatch Block::0 bo:152 Volume offset:152 Expected:1 Got:2
60386 Mismatch Block::0 bo:147 Volume offset:147 Expected:3 Got:2
60387 Mismatch Block::0 bo:153 Volume offset:153 Expected:1 Got:2
60388 Mismatch Block::0 bo:148 Volume offset:148 Expected:3 Got:2
60389 Mismatch Block::0 bo:154 Volume offset:154 Expected:1 Got:2
60390 Mismatch Block::0 bo:149 Volume offset:149 Expected:3 Got:2
60391 Mismatch Block::0 bo:155 Volume offset:155 Expected:1 Got:2
60392 Mismatch Block::0 bo:150 Volume offset:150 Expected:3 Got:2
60393 Mismatch Block::0 bo:156 Volume offset:156 Expected:1 Got:2
60394 Mismatch Block::0 bo:151 Volume offset:151 Expected:3 Got:2
60395 Mismatch Block::0 bo:157 Volume offset:157 Expected:1 Got:2
60396 Mismatch Block::0 bo:152 Volume offset:152 Expected:3 Got:2
60397 Mismatch Block::0 bo:158 Volume offset:158 Expected:1 Got:2
60398 Mismatch Block::0 bo:153 Volume offset:153 Expected:3 Got:2
60399 Mismatch Block::0 bo:159 Volume offset:159 Expected:1 Got:2
60400 Mismatch Block::0 bo:154 Volume offset:154 Expected:3 Got:2
60401 Mismatch Block::0 bo:160 Volume offset:160 Expected:1 Got:2
60402 Mismatch Block::0 bo:155 Volume offset:155 Expected:3 Got:2
60403 Mismatch Block::0 bo:161 Volume offset:161 Expected:1 Got:2
60404 Mismatch Block::0 bo:156 Volume offset:156 Expected:3 Got:2
60405 Mismatch Block::0 bo:162 Volume offset:162 Expected:1 Got:2
60406 Mismatch Block::0 bo:157 Volume offset:157 Expected:3 Got:2
60407 Mismatch Block::0 bo:163 Volume offset:163 Expected:1 Got:2
60408 Mismatch Block::0 bo:158 Volume offset:158 Expected:3 Got:2
60409 Mismatch Block::0 bo:164 Volume offset:164 Expected:1 Got:2
60410 Mismatch Block::0 bo:159 Volume offset:159 Expected:3 Got:2
60411 Mismatch Block::0 bo:165 Volume offset:165 Expected:1 Got:2
60412 Mismatch Block::0 bo:160 Volume offset:160 Expected:3 Got:2
60413 Mismatch Block::0 bo:166 Volume offset:166 Expected:1 Got:2
60414 Mismatch Block::0 bo:161 Volume offset:161 Expected:3 Got:2
60415 Mismatch Block::0 bo:167 Volume offset:167 Expected:1 Got:2
60416 Mismatch Block::0 bo:162 Volume offset:162 Expected:3 Got:2
60417 Mismatch Block::0 bo:168 Volume offset:168 Expected:1 Got:2
60418 Mismatch Block::0 bo:163 Volume offset:163 Expected:3 Got:2
60419 Mismatch Block::0 bo:169 Volume offset:169 Expected:1 Got:2
60420 Mismatch Block::0 bo:164 Volume offset:164 Expected:3 Got:2
60421 Mismatch Block::0 bo:170 Volume offset:170 Expected:1 Got:2
60422 Mismatch Block::0 bo:165 Volume offset:165 Expected:3 Got:2
60423 Mismatch Block::0 bo:171 Volume offset:171 Expected:1 Got:2
60424 Mismatch Block::0 bo:166 Volume offset:166 Expected:3 Got:2
60425 Mismatch Block::0 bo:172 Volume offset:172 Expected:1 Got:2
60426 Mismatch Block::0 bo:167 Volume offset:167 Expected:3 Got:2
60427 Mismatch Block::0 bo:173 Volume offset:173 Expected:1 Got:2
60428 Mismatch Block::0 bo:168 Volume offset:168 Expected:3 Got:2
60429 Mismatch Block::0 bo:174 Volume offset:174 Expected:1 Got:2
60430 Mismatch Block::0 bo:169 Volume offset:169 Expected:3 Got:2
60431 Mismatch Block::0 bo:175 Volume offset:175 Expected:1 Got:2
60432 Mismatch Block::0 bo:170 Volume offset:170 Expected:3 Got:2
60433 Mismatch Block::0 bo:176 Volume offset:176 Expected:1 Got:2
60434 Mismatch Block::0 bo:171 Volume offset:171 Expected:3 Got:2
60435 Mismatch Block::0 bo:177 Volume offset:177 Expected:1 Got:2
60436 Mismatch Block::0 bo:172 Volume offset:172 Expected:3 Got:2
60437 Mismatch Block::0 bo:178 Volume offset:178 Expected:1 Got:2
60438 Mismatch Block::0 bo:173 Volume offset:173 Expected:3 Got:2
60439 Mismatch Block::0 bo:179 Volume offset:179 Expected:1 Got:2
60440 Mismatch Block::0 bo:174 Volume offset:174 Expected:3 Got:2
60441 Mismatch Block::0 bo:180 Volume offset:180 Expected:1 Got:2
60442 Mismatch Block::0 bo:175 Volume offset:175 Expected:3 Got:2
60443 Mismatch Block::0 bo:181 Volume offset:181 Expected:1 Got:2
60444 Mismatch Block::0 bo:176 Volume offset:176 Expected:3 Got:2
60445 Mismatch Block::0 bo:182 Volume offset:182 Expected:1 Got:2
60446 Mismatch Block::0 bo:177 Volume offset:177 Expected:3 Got:2
60447 Mismatch Block::0 bo:183 Volume offset:183 Expected:1 Got:2
60448 Mismatch Block::0 bo:178 Volume offset:178 Expected:3 Got:2
60449 Mismatch Block::0 bo:184 Volume offset:184 Expected:1 Got:2
60450 Mismatch Block::0 bo:179 Volume offset:179 Expected:3 Got:2
60451 Mismatch Block::0 bo:185 Volume offset:185 Expected:1 Got:2
60452 Mismatch Block::0 bo:180 Volume offset:180 Expected:3 Got:2
60453 Mismatch Block::0 bo:186 Volume offset:186 Expected:1 Got:2
60454 Mismatch Block::0 bo:181 Volume offset:181 Expected:3 Got:2
60455 Mismatch Block::0 bo:187 Volume offset:187 Expected:1 Got:2
60456 Mismatch Block::0 bo:182 Volume offset:182 Expected:3 Got:2
60457 Mismatch Block::0 bo:188 Volume offset:188 Expected:1 Got:2
60458 Mismatch Block::0 bo:183 Volume offset:183 Expected:3 Got:2
60459 Mismatch Block::0 bo:189 Volume offset:189 Expected:1 Got:2
60460 Mismatch Block::0 bo:184 Volume offset:184 Expected:3 Got:2
60461 Mismatch Block::0 bo:190 Volume offset:190 Expected:1 Got:2
60462 Mismatch Block::0 bo:185 Volume offset:185 Expected:3 Got:2
60463 Mismatch Block::0 bo:191 Volume offset:191 Expected:1 Got:2
60464 Mismatch Block::0 bo:186 Volume offset:186 Expected:3 Got:2
60465 Mismatch Block::0 bo:192 Volume offset:192 Expected:1 Got:2
60466 Mismatch Block::0 bo:187 Volume offset:187 Expected:3 Got:2
60467 Mismatch Block::0 bo:193 Volume offset:193 Expected:1 Got:2
60468 Mismatch Block::0 bo:188 Volume offset:188 Expected:3 Got:2
60469 Mismatch Block::0 bo:194 Volume offset:194 Expected:1 Got:2
60470 Mismatch Block::0 bo:189 Volume offset:189 Expected:3 Got:2
60471 Mismatch Block::0 bo:195 Volume offset:195 Expected:1 Got:2
60472 Mismatch Block::0 bo:190 Volume offset:190 Expected:3 Got:2
60473 Mismatch Block::0 bo:196 Volume offset:196 Expected:1 Got:2
60474 Mismatch Block::0 bo:191 Volume offset:191 Expected:3 Got:2
60475 Mismatch Block::0 bo:197 Volume offset:197 Expected:1 Got:2
60476 Mismatch Block::0 bo:192 Volume offset:192 Expected:3 Got:2
60477 Mismatch Block::0 bo:198 Volume offset:198 Expected:1 Got:2
60478 Mismatch Block::0 bo:193 Volume offset:193 Expected:3 Got:2
60479 Mismatch Block::0 bo:199 Volume offset:199 Expected:1 Got:2
60480 Mismatch Block::0 bo:194 Volume offset:194 Expected:3 Got:2
60481 Mismatch Block::0 bo:200 Volume offset:200 Expected:1 Got:2
60482 Mismatch Block::0 bo:195 Volume offset:195 Expected:3 Got:2
60483 Mismatch Block::0 bo:201 Volume offset:201 Expected:1 Got:2
60484 Mismatch Block::0 bo:196 Volume offset:196 Expected:3 Got:2
60485 Mismatch Block::0 bo:202 Volume offset:202 Expected:1 Got:2
60486 Mismatch Block::0 bo:197 Volume offset:197 Expected:3 Got:2
60487 Mismatch Block::0 bo:203 Volume offset:203 Expected:1 Got:2
60488 Mismatch Block::0 bo:198 Volume offset:198 Expected:3 Got:2
60489 Mismatch Block::0 bo:204 Volume offset:204 Expected:1 Got:2
60490 Mismatch Block::0 bo:199 Volume offset:199 Expected:3 Got:2
60491 Mismatch Block::0 bo:205 Volume offset:205 Expected:1 Got:2
60492 Mismatch Block::0 bo:200 Volume offset:200 Expected:3 Got:2
60493 Mismatch Block::0 bo:206 Volume offset:206 Expected:1 Got:2
60494 Mismatch Block::0 bo:201 Volume offset:201 Expected:3 Got:2
60495 Mismatch Block::0 bo:207 Volume offset:207 Expected:1 Got:2
60496 Mismatch Block::0 bo:202 Volume offset:202 Expected:3 Got:2
60497 Mismatch Block::0 bo:208 Volume offset:208 Expected:1 Got:2
60498 Mismatch Block::0 bo:203 Volume offset:203 Expected:3 Got:2
60499 Mismatch Block::0 bo:209 Volume offset:209 Expected:1 Got:2
60500 Mismatch Block::0 bo:204 Volume offset:204 Expected:3 Got:2
60501 Mismatch Block::0 bo:210 Volume offset:210 Expected:1 Got:2
60502 Mismatch Block::0 bo:205 Volume offset:205 Expected:3 Got:2
60503 Mismatch Block::0 bo:211 Volume offset:211 Expected:1 Got:2
60504 Mismatch Block::0 bo:206 Volume offset:206 Expected:3 Got:2
60505 Mismatch Block::0 bo:212 Volume offset:212 Expected:1 Got:2
60506 Mismatch Block::0 bo:207 Volume offset:207 Expected:3 Got:2
60507 Mismatch Block::0 bo:213 Volume offset:213 Expected:1 Got:2
60508 Mismatch Block::0 bo:208 Volume offset:208 Expected:3 Got:2
60509 Mismatch Block::0 bo:214 Volume offset:214 Expected:1 Got:2
60510 Mismatch Block::0 bo:209 Volume offset:209 Expected:3 Got:2
60511 Mismatch Block::0 bo:215 Volume offset:215 Expected:1 Got:2
60512 Mismatch Block::0 bo:210 Volume offset:210 Expected:3 Got:2
60513 Mismatch Block::0 bo:216 Volume offset:216 Expected:1 Got:2
60514 Mismatch Block::0 bo:211 Volume offset:211 Expected:3 Got:2
60515 Mismatch Block::0 bo:217 Volume offset:217 Expected:1 Got:2
60516 Mismatch Block::0 bo:212 Volume offset:212 Expected:3 Got:2
60517 Mismatch Block::0 bo:218 Volume offset:218 Expected:1 Got:2
60518 Mismatch Block::0 bo:213 Volume offset:213 Expected:3 Got:2
60519 Mismatch Block::0 bo:219 Volume offset:219 Expected:1 Got:2
60520 Mismatch Block::0 bo:214 Volume offset:214 Expected:3 Got:2
60521 Mismatch Block::0 bo:220 Volume offset:220 Expected:1 Got:2
60522 Mismatch Block::0 bo:215 Volume offset:215 Expected:3 Got:2
60523 Mismatch Block::0 bo:221 Volume offset:221 Expected:1 Got:2
60524 Mismatch Block::0 bo:216 Volume offset:216 Expected:3 Got:2
60525 Mismatch Block::0 bo:222 Volume offset:222 Expected:1 Got:2
60526 Mismatch Block::0 bo:217 Volume offset:217 Expected:3 Got:2
60527 Mismatch Block::0 bo:223 Volume offset:223 Expected:1 Got:2
60528 Mismatch Block::0 bo:218 Volume offset:218 Expected:3 Got:2
60529 Mismatch Block::0 bo:224 Volume offset:224 Expected:1 Got:2
60530 Mismatch Block::0 bo:219 Volume offset:219 Expected:3 Got:2
60531 Mismatch Block::0 bo:225 Volume offset:225 Expected:1 Got:2
60532 Mismatch Block::0 bo:220 Volume offset:220 Expected:3 Got:2
60533 Mismatch Block::0 bo:226 Volume offset:226 Expected:1 Got:2
60534 Mismatch Block::0 bo:221 Volume offset:221 Expected:3 Got:2
60535 Mismatch Block::0 bo:227 Volume offset:227 Expected:1 Got:2
60536 Mismatch Block::0 bo:222 Volume offset:222 Expected:3 Got:2
60537 Mismatch Block::0 bo:228 Volume offset:228 Expected:1 Got:2
60538 Mismatch Block::0 bo:223 Volume offset:223 Expected:3 Got:2
60539 Mismatch Block::0 bo:229 Volume offset:229 Expected:1 Got:2
60540 Mismatch Block::0 bo:224 Volume offset:224 Expected:3 Got:2
60541 Mismatch Block::0 bo:230 Volume offset:230 Expected:1 Got:2
60542 Mismatch Block::0 bo:225 Volume offset:225 Expected:3 Got:2
60543 Mismatch Block::0 bo:231 Volume offset:231 Expected:1 Got:2
60544 Mismatch Block::0 bo:226 Volume offset:226 Expected:3 Got:2
60545 Mismatch Block::0 bo:232 Volume offset:232 Expected:1 Got:2
60546 Mismatch Block::0 bo:227 Volume offset:227 Expected:3 Got:2
60547 Mismatch Block::0 bo:233 Volume offset:233 Expected:1 Got:2
60548 Mismatch Block::0 bo:228 Volume offset:228 Expected:3 Got:2
60549 Mismatch Block::0 bo:234 Volume offset:234 Expected:1 Got:2
60550 Mismatch Block::0 bo:229 Volume offset:229 Expected:3 Got:2
60551 Mismatch Block::0 bo:235 Volume offset:235 Expected:1 Got:2
60552 Mismatch Block::0 bo:230 Volume offset:230 Expected:3 Got:2
60553 Mismatch Block::0 bo:236 Volume offset:236 Expected:1 Got:2
60554 Mismatch Block::0 bo:231 Volume offset:231 Expected:3 Got:2
60555 Mismatch Block::0 bo:237 Volume offset:237 Expected:1 Got:2
60556 Mismatch Block::0 bo:232 Volume offset:232 Expected:3 Got:2
60557 Mismatch Block::0 bo:238 Volume offset:238 Expected:1 Got:2
60558 Mismatch Block::0 bo:233 Volume offset:233 Expected:3 Got:2
60559 Mismatch Block::0 bo:239 Volume offset:239 Expected:1 Got:2
60560 Mismatch Block::0 bo:234 Volume offset:234 Expected:3 Got:2
60561 Mismatch Block::0 bo:240 Volume offset:240 Expected:1 Got:2
60562 Mismatch Block::0 bo:235 Volume offset:235 Expected:3 Got:2
60563 Mismatch Block::0 bo:241 Volume offset:241 Expected:1 Got:2
60564 Mismatch Block::0 bo:236 Volume offset:236 Expected:3 Got:2
60565 Mismatch Block::0 bo:242 Volume offset:242 Expected:1 Got:2
60566 Mismatch Block::0 bo:237 Volume offset:237 Expected:3 Got:2
60567 Mismatch Block::0 bo:243 Volume offset:243 Expected:1 Got:2
60568 Mismatch Block::0 bo:238 Volume offset:238 Expected:3 Got:2
60569 Mismatch Block::0 bo:244 Volume offset:244 Expected:1 Got:2
60570 Mismatch Block::0 bo:239 Volume offset:239 Expected:3 Got:2
60571 Mismatch Block::0 bo:245 Volume offset:245 Expected:1 Got:2
60572 Mismatch Block::0 bo:240 Volume offset:240 Expected:3 Got:2
60573 Mismatch Block::0 bo:246 Volume offset:246 Expected:1 Got:2
60574 Mismatch Block::0 bo:241 Volume offset:241 Expected:3 Got:2
60575 Mismatch Block::0 bo:247 Volume offset:247 Expected:1 Got:2
60576 Mismatch Block::0 bo:242 Volume offset:242 Expected:3 Got:2
60577 Mismatch Block::0 bo:248 Volume offset:248 Expected:1 Got:2
60578 Mismatch Block::0 bo:243 Volume offset:243 Expected:3 Got:2
60579 Mismatch Block::0 bo:249 Volume offset:249 Expected:1 Got:2
60580 Mismatch Block::0 bo:244 Volume offset:244 Expected:3 Got:2
60581 Mismatch Block::0 bo:250 Volume offset:250 Expected:1 Got:2
60582 Mismatch Block::0 bo:245 Volume offset:245 Expected:3 Got:2
60583 Mismatch Block::0 bo:251 Volume offset:251 Expected:1 Got:2
60584 Mismatch Block::0 bo:246 Volume offset:246 Expected:3 Got:2
60585 Mismatch Block::0 bo:252 Volume offset:252 Expected:1 Got:2
60586 Mismatch Block::0 bo:247 Volume offset:247 Expected:3 Got:2
60587 Mismatch Block::0 bo:253 Volume offset:253 Expected:1 Got:2
60588 Mismatch Block::0 bo:248 Volume offset:248 Expected:3 Got:2
60589 Mismatch Block::0 bo:254 Volume offset:254 Expected:1 Got:2
60590 Mismatch Block::0 bo:249 Volume offset:249 Expected:3 Got:2
60591 Mismatch Block::0 bo:255 Volume offset:255 Expected:1 Got:2
60592 Mismatch Block::0 bo:250 Volume offset:250 Expected:3 Got:2
60593 Mismatch Block::0 bo:256 Volume offset:256 Expected:1 Got:2
60594 Mismatch Block::0 bo:251 Volume offset:251 Expected:3 Got:2
60595 Mismatch Block::0 bo:257 Volume offset:257 Expected:1 Got:2
60596 Mismatch Block::0 bo:252 Volume offset:252 Expected:3 Got:2
60597 Mismatch Block::0 bo:258 Volume offset:258 Expected:1 Got:2
60598 Mismatch Block::0 bo:253 Volume offset:253 Expected:3 Got:2
60599 Mismatch Block::0 bo:259 Volume offset:259 Expected:1 Got:2
60600 Mismatch Block::0 bo:254 Volume offset:254 Expected:3 Got:2
60601 Mismatch Block::0 bo:260 Volume offset:260 Expected:1 Got:2
60602 Mismatch Block::0 bo:255 Volume offset:255 Expected:3 Got:2
60603 Mismatch Block::0 bo:261 Volume offset:261 Expected:1 Got:2
60604 Mismatch Block::0 bo:256 Volume offset:256 Expected:3 Got:2
60605 Mismatch Block::0 bo:262 Volume offset:262 Expected:1 Got:2
60606 Mismatch Block::0 bo:257 Volume offset:257 Expected:3 Got:2
60607 Mismatch Block::0 bo:263 Volume offset:263 Expected:1 Got:2
60608 Mismatch Block::0 bo:258 Volume offset:258 Expected:3 Got:2
60609 Mismatch Block::0 bo:264 Volume offset:264 Expected:1 Got:2
60610 Mismatch Block::0 bo:259 Volume offset:259 Expected:3 Got:2
60611 Mismatch Block::0 bo:265 Volume offset:265 Expected:1 Got:2
60612 Mismatch Block::0 bo:260 Volume offset:260 Expected:3 Got:2
60613 Mismatch Block::0 bo:266 Volume offset:266 Expected:1 Got:2
60614 Mismatch Block::0 bo:261 Volume offset:261 Expected:3 Got:2
60615 Mismatch Block::0 bo:267 Volume offset:267 Expected:1 Got:2
60616 Mismatch Block::0 bo:262 Volume offset:262 Expected:3 Got:2
60617 Mismatch Block::0 bo:268 Volume offset:268 Expected:1 Got:2
60618 Mismatch Block::0 bo:263 Volume offset:263 Expected:3 Got:2
60619 Mismatch Block::0 bo:269 Volume offset:269 Expected:1 Got:2
60620 Mismatch Block::0 bo:264 Volume offset:264 Expected:3 Got:2
60621 Mismatch Block::0 bo:270 Volume offset:270 Expected:1 Got:2
60622 Mismatch Block::0 bo:265 Volume offset:265 Expected:3 Got:2
60623 Mismatch Block::0 bo:271 Volume offset:271 Expected:1 Got:2
60624 Mismatch Block::0 bo:266 Volume offset:266 Expected:3 Got:2
60625 Mismatch Block::0 bo:272 Volume offset:272 Expected:1 Got:2
60626 Mismatch Block::0 bo:267 Volume offset:267 Expected:3 Got:2
60627 Mismatch Block::0 bo:273 Volume offset:273 Expected:1 Got:2
60628 Mismatch Block::0 bo:268 Volume offset:268 Expected:3 Got:2
60629 Mismatch Block::0 bo:274 Volume offset:274 Expected:1 Got:2
60630 Mismatch Block::0 bo:269 Volume offset:269 Expected:3 Got:2
60631 Mismatch Block::0 bo:275 Volume offset:275 Expected:1 Got:2
60632 Mismatch Block::0 bo:270 Volume offset:270 Expected:3 Got:2
60633 Mismatch Block::0 bo:276 Volume offset:276 Expected:1 Got:2
60634 Mismatch Block::0 bo:271 Volume offset:271 Expected:3 Got:2
60635 Mismatch Block::0 bo:277 Volume offset:277 Expected:1 Got:2
60636 Mismatch Block::0 bo:272 Volume offset:272 Expected:3 Got:2
60637 Mismatch Block::0 bo:278 Volume offset:278 Expected:1 Got:2
60638 Mismatch Block::0 bo:273 Volume offset:273 Expected:3 Got:2
60639 Mismatch Block::0 bo:279 Volume offset:279 Expected:1 Got:2
60640 Mismatch Block::0 bo:274 Volume offset:274 Expected:3 Got:2
60641 Mismatch Block::0 bo:280 Volume offset:280 Expected:1 Got:2
60642 Mismatch Block::0 bo:275 Volume offset:275 Expected:3 Got:2
60643 Mismatch Block::0 bo:281 Volume offset:281 Expected:1 Got:2
60644 Mismatch Block::0 bo:276 Volume offset:276 Expected:3 Got:2
60645 Mismatch Block::0 bo:282 Volume offset:282 Expected:1 Got:2
60646 Mismatch Block::0 bo:277 Volume offset:277 Expected:3 Got:2
60647 Mismatch Block::0 bo:283 Volume offset:283 Expected:1 Got:2
60648 Mismatch Block::0 bo:278 Volume offset:278 Expected:3 Got:2
60649 Mismatch Block::0 bo:284 Volume offset:284 Expected:1 Got:2
60650 Mismatch Block::0 bo:279 Volume offset:279 Expected:3 Got:2
60651 Mismatch Block::0 bo:285 Volume offset:285 Expected:1 Got:2
60652 Mismatch Block::0 bo:280 Volume offset:280 Expected:3 Got:2
60653 Mismatch Block::0 bo:286 Volume offset:286 Expected:1 Got:2
60654 Mismatch Block::0 bo:281 Volume offset:281 Expected:3 Got:2
60655 Mismatch Block::0 bo:287 Volume offset:287 Expected:1 Got:2
60656 Mismatch Block::0 bo:282 Volume offset:282 Expected:3 Got:2
60657 Mismatch Block::0 bo:288 Volume offset:288 Expected:1 Got:2
60658 Mismatch Block::0 bo:283 Volume offset:283 Expected:3 Got:2
60659 Mismatch Block::0 bo:289 Volume offset:289 Expected:1 Got:2
60660 Mismatch Block::0 bo:284 Volume offset:284 Expected:3 Got:2
60661 Mismatch Block::0 bo:290 Volume offset:290 Expected:1 Got:2
60662 Mismatch Block::0 bo:285 Volume offset:285 Expected:3 Got:2
60663 Mismatch Block::0 bo:291 Volume offset:291 Expected:1 Got:2
60664 Mismatch Block::0 bo:286 Volume offset:286 Expected:3 Got:2
60665 Mismatch Block::0 bo:292 Volume offset:292 Expected:1 Got:2
60666 Mismatch Block::0 bo:287 Volume offset:287 Expected:3 Got:2
60667 Mismatch Block::0 bo:293 Volume offset:293 Expected:1 Got:2
60668 Mismatch Block::0 bo:288 Volume offset:288 Expected:3 Got:2
60669 Mismatch Block::0 bo:294 Volume offset:294 Expected:1 Got:2
60670 Mismatch Block::0 bo:289 Volume offset:289 Expected:3 Got:2
60671 Mismatch Block::0 bo:295 Volume offset:295 Expected:1 Got:2
60672 Mismatch Block::0 bo:290 Volume offset:290 Expected:3 Got:2
60673 Mismatch Block::0 bo:296 Volume offset:296 Expected:1 Got:2
60674 Mismatch Block::0 bo:291 Volume offset:291 Expected:3 Got:2
60675 Mismatch Block::0 bo:297 Volume offset:297 Expected:1 Got:2
60676 Mismatch Block::0 bo:292 Volume offset:292 Expected:3 Got:2
60677 Mismatch Block::0 bo:298 Volume offset:298 Expected:1 Got:2
60678 Mismatch Block::0 bo:293 Volume offset:293 Expected:3 Got:2
60679 Mismatch Block::0 bo:299 Volume offset:299 Expected:1 Got:2
60680 Mismatch Block::0 bo:294 Volume offset:294 Expected:3 Got:2
60681 Mismatch Block::0 bo:300 Volume offset:300 Expected:1 Got:2
60682 Mismatch Block::0 bo:295 Volume offset:295 Expected:3 Got:2
60683 Mismatch Block::0 bo:301 Volume offset:301 Expected:1 Got:2
60684 Mismatch Block::0 bo:296 Volume offset:296 Expected:3 Got:2
60685 Mismatch Block::0 bo:302 Volume offset:302 Expected:1 Got:2
60686 Mismatch Block::0 bo:297 Volume offset:297 Expected:3 Got:2
60687 Mismatch Block::0 bo:303 Volume offset:303 Expected:1 Got:2
60688 Mismatch Block::0 bo:298 Volume offset:298 Expected:3 Got:2
60689 Mismatch Block::0 bo:304 Volume offset:304 Expected:1 Got:2
60690 Mismatch Block::0 bo:299 Volume offset:299 Expected:3 Got:2
60691 Mismatch Block::0 bo:305 Volume offset:305 Expected:1 Got:2
60692 Mismatch Block::0 bo:300 Volume offset:300 Expected:3 Got:2
60693 Mismatch Block::0 bo:306 Volume offset:306 Expected:1 Got:2
60694 Mismatch Block::0 bo:301 Volume offset:301 Expected:3 Got:2
60695 Mismatch Block::0 bo:307 Volume offset:307 Expected:1 Got:2
60696 Mismatch Block::0 bo:302 Volume offset:302 Expected:3 Got:2
60697 Mismatch Block::0 bo:308 Volume offset:308 Expected:1 Got:2
60698 Mismatch Block::0 bo:303 Volume offset:303 Expected:3 Got:2
60699 Mismatch Block::0 bo:309 Volume offset:309 Expected:1 Got:2
60700 Mismatch Block::0 bo:304 Volume offset:304 Expected:3 Got:2
60701 Mismatch Block::0 bo:310 Volume offset:310 Expected:1 Got:2
60702 Mismatch Block::0 bo:305 Volume offset:305 Expected:3 Got:2
60703 Mismatch Block::0 bo:311 Volume offset:311 Expected:1 Got:2
60704 Mismatch Block::0 bo:306 Volume offset:306 Expected:3 Got:2
60705 Mismatch Block::0 bo:312 Volume offset:312 Expected:1 Got:2
60706 Mismatch Block::0 bo:307 Volume offset:307 Expected:3 Got:2
60707 Mismatch Block::0 bo:313 Volume offset:313 Expected:1 Got:2
60708 Mismatch Block::0 bo:308 Volume offset:308 Expected:3 Got:2
60709 Mismatch Block::0 bo:314 Volume offset:314 Expected:1 Got:2
60710 Mismatch Block::0 bo:309 Volume offset:309 Expected:3 Got:2
60711 Mismatch Block::0 bo:315 Volume offset:315 Expected:1 Got:2
60712 Mismatch Block::0 bo:310 Volume offset:310 Expected:3 Got:2
60713 Mismatch Block::0 bo:316 Volume offset:316 Expected:1 Got:2
60714 Mismatch Block::0 bo:311 Volume offset:311 Expected:3 Got:2
60715 Mismatch Block::0 bo:317 Volume offset:317 Expected:1 Got:2
60716 Mismatch Block::0 bo:312 Volume offset:312 Expected:3 Got:2
60717 Mismatch Block::0 bo:318 Volume offset:318 Expected:1 Got:2
60718 Mismatch Block::0 bo:313 Volume offset:313 Expected:3 Got:2
60719 Mismatch Block::0 bo:319 Volume offset:319 Expected:1 Got:2
60720 Mismatch Block::0 bo:314 Volume offset:314 Expected:3 Got:2
60721 Mismatch Block::0 bo:320 Volume offset:320 Expected:1 Got:2
60722 Mismatch Block::0 bo:315 Volume offset:315 Expected:3 Got:2
60723 Mismatch Block::0 bo:321 Volume offset:321 Expected:1 Got:2
60724 Mismatch Block::0 bo:316 Volume offset:316 Expected:3 Got:2
60725 Mismatch Block::0 bo:322 Volume offset:322 Expected:1 Got:2
60726 Mismatch Block::0 bo:317 Volume offset:317 Expected:3 Got:2
60727 Mismatch Block::0 bo:323 Volume offset:323 Expected:1 Got:2
60728 Mismatch Block::0 bo:318 Volume offset:318 Expected:3 Got:2
60729 Mismatch Block::0 bo:324 Volume offset:324 Expected:1 Got:2
60730 Mismatch Block::0 bo:319 Volume offset:319 Expected:3 Got:2
60731 Mismatch Block::0 bo:325 Volume offset:325 Expected:1 Got:2
60732 Mismatch Block::0 bo:320 Volume offset:320 Expected:3 Got:2
60733 Mismatch Block::0 bo:326 Volume offset:326 Expected:1 Got:2
60734 Mismatch Block::0 bo:321 Volume offset:321 Expected:3 Got:2
60735 Mismatch Block::0 bo:327 Volume offset:327 Expected:1 Got:2
60736 Mismatch Block::0 bo:322 Volume offset:322 Expected:3 Got:2
60737 Mismatch Block::0 bo:328 Volume offset:328 Expected:1 Got:2
60738 Mismatch Block::0 bo:323 Volume offset:323 Expected:3 Got:2
60739 Mismatch Block::0 bo:329 Volume offset:329 Expected:1 Got:2
60740 Mismatch Block::0 bo:324 Volume offset:324 Expected:3 Got:2
60741 Mismatch Block::0 bo:330 Volume offset:330 Expected:1 Got:2
60742 Mismatch Block::0 bo:325 Volume offset:325 Expected:3 Got:2
60743 Mismatch Block::0 bo:331 Volume offset:331 Expected:1 Got:2
60744 Mismatch Block::0 bo:326 Volume offset:326 Expected:3 Got:2
60745 Mismatch Block::0 bo:332 Volume offset:332 Expected:1 Got:2
60746 Mismatch Block::0 bo:327 Volume offset:327 Expected:3 Got:2
60747 Mismatch Block::0 bo:333 Volume offset:333 Expected:1 Got:2
60748 Mismatch Block::0 bo:328 Volume offset:328 Expected:3 Got:2
60749 Mismatch Block::0 bo:334 Volume offset:334 Expected:1 Got:2
60750 Mismatch Block::0 bo:329 Volume offset:329 Expected:3 Got:2
60751 Mismatch Block::0 bo:335 Volume offset:335 Expected:1 Got:2
60752 Mismatch Block::0 bo:330 Volume offset:330 Expected:3 Got:2
60753 Mismatch Block::0 bo:336 Volume offset:336 Expected:1 Got:2
60754 Mismatch Block::0 bo:331 Volume offset:331 Expected:3 Got:2
60755 Mismatch Block::0 bo:337 Volume offset:337 Expected:1 Got:2
60756 Mismatch Block::0 bo:332 Volume offset:332 Expected:3 Got:2
60757 Mismatch Block::0 bo:338 Volume offset:338 Expected:1 Got:2
60758 Mismatch Block::0 bo:333 Volume offset:333 Expected:3 Got:2
60759 Mismatch Block::0 bo:339 Volume offset:339 Expected:1 Got:2
60760 Mismatch Block::0 bo:334 Volume offset:334 Expected:3 Got:2
60761 Mismatch Block::0 bo:340 Volume offset:340 Expected:1 Got:2
60762 Mismatch Block::0 bo:335 Volume offset:335 Expected:3 Got:2
60763 Mismatch Block::0 bo:341 Volume offset:341 Expected:1 Got:2
60764 Mismatch Block::0 bo:336 Volume offset:336 Expected:3 Got:2
60765 Mismatch Block::0 bo:342 Volume offset:342 Expected:1 Got:2
60766 Mismatch Block::0 bo:337 Volume offset:337 Expected:3 Got:2
60767 Mismatch Block::0 bo:343 Volume offset:343 Expected:1 Got:2
60768 Mismatch Block::0 bo:338 Volume offset:338 Expected:3 Got:2
60769 Mismatch Block::0 bo:344 Volume offset:344 Expected:1 Got:2
60770 Mismatch Block::0 bo:339 Volume offset:339 Expected:3 Got:2
60771 Mismatch Block::0 bo:345 Volume offset:345 Expected:1 Got:2
60772 Mismatch Block::0 bo:340 Volume offset:340 Expected:3 Got:2
60773 Mismatch Block::0 bo:346 Volume offset:346 Expected:1 Got:2
60774 Mismatch Block::0 bo:341 Volume offset:341 Expected:3 Got:2
60775 Mismatch Block::0 bo:347 Volume offset:347 Expected:1 Got:2
60776 Mismatch Block::0 bo:342 Volume offset:342 Expected:3 Got:2
60777 Mismatch Block::0 bo:348 Volume offset:348 Expected:1 Got:2
60778 Mismatch Block::0 bo:343 Volume offset:343 Expected:3 Got:2
60779 Mismatch Block::0 bo:349 Volume offset:349 Expected:1 Got:2
60780 Mismatch Block::0 bo:344 Volume offset:344 Expected:3 Got:2
60781 Mismatch Block::0 bo:350 Volume offset:350 Expected:1 Got:2
60782 Mismatch Block::0 bo:345 Volume offset:345 Expected:3 Got:2
60783 Mismatch Block::0 bo:351 Volume offset:351 Expected:1 Got:2
60784 Mismatch Block::0 bo:346 Volume offset:346 Expected:3 Got:2
60785 Mismatch Block::0 bo:352 Volume offset:352 Expected:1 Got:2
60786 Mismatch Block::0 bo:347 Volume offset:347 Expected:3 Got:2
60787 Mismatch Block::0 bo:353 Volume offset:353 Expected:1 Got:2
60788 Mismatch Block::0 bo:348 Volume offset:348 Expected:3 Got:2
60789 Mismatch Block::0 bo:354 Volume offset:354 Expected:1 Got:2
60790 Mismatch Block::0 bo:349 Volume offset:349 Expected:3 Got:2
60791 Mismatch Block::0 bo:355 Volume offset:355 Expected:1 Got:2
60792 Mismatch Block::0 bo:350 Volume offset:350 Expected:3 Got:2
60793 Mismatch Block::0 bo:356 Volume offset:356 Expected:1 Got:2
60794 Mismatch Block::0 bo:351 Volume offset:351 Expected:3 Got:2
60795 Mismatch Block::0 bo:357 Volume offset:357 Expected:1 Got:2
60796 Mismatch Block::0 bo:352 Volume offset:352 Expected:3 Got:2
60797 Mismatch Block::0 bo:358 Volume offset:358 Expected:1 Got:2
60798 Mismatch Block::0 bo:353 Volume offset:353 Expected:3 Got:2
60799 Mismatch Block::0 bo:359 Volume offset:359 Expected:1 Got:2
60800 Mismatch Block::0 bo:354 Volume offset:354 Expected:3 Got:2
60801 Mismatch Block::0 bo:360 Volume offset:360 Expected:1 Got:2
60802 Mismatch Block::0 bo:355 Volume offset:355 Expected:3 Got:2
60803 Mismatch Block::0 bo:361 Volume offset:361 Expected:1 Got:2
60804 Mismatch Block::0 bo:356 Volume offset:356 Expected:3 Got:2
60805 Mismatch Block::0 bo:362 Volume offset:362 Expected:1 Got:2
60806 Mismatch Block::0 bo:357 Volume offset:357 Expected:3 Got:2
60807 Mismatch Block::0 bo:363 Volume offset:363 Expected:1 Got:2
60808 Mismatch Block::0 bo:358 Volume offset:358 Expected:3 Got:2
60809 Mismatch Block::0 bo:364 Volume offset:364 Expected:1 Got:2
60810 Mismatch Block::0 bo:359 Volume offset:359 Expected:3 Got:2
60811 Mismatch Block::0 bo:365 Volume offset:365 Expected:1 Got:2
60812 Mismatch Block::0 bo:360 Volume offset:360 Expected:3 Got:2
60813 Mismatch Block::0 bo:366 Volume offset:366 Expected:1 Got:2
60814 Mismatch Block::0 bo:361 Volume offset:361 Expected:3 Got:2
60815 Mismatch Block::0 bo:367 Volume offset:367 Expected:1 Got:2
60816 Mismatch Block::0 bo:362 Volume offset:362 Expected:3 Got:2
60817 Mismatch Block::0 bo:368 Volume offset:368 Expected:1 Got:2
60818 Mismatch Block::0 bo:363 Volume offset:363 Expected:3 Got:2
60819 Mismatch Block::0 bo:369 Volume offset:369 Expected:1 Got:2
60820 Mismatch Block::0 bo:364 Volume offset:364 Expected:3 Got:2
60821 Mismatch Block::0 bo:370 Volume offset:370 Expected:1 Got:2
60822 Mismatch Block::0 bo:365 Volume offset:365 Expected:3 Got:2
60823 Mismatch Block::0 bo:371 Volume offset:371 Expected:1 Got:2
60824 Mismatch Block::0 bo:366 Volume offset:366 Expected:3 Got:2
60825 Mismatch Block::0 bo:372 Volume offset:372 Expected:1 Got:2
60826 Mismatch Block::0 bo:367 Volume offset:367 Expected:3 Got:2
60827 Mismatch Block::0 bo:373 Volume offset:373 Expected:1 Got:2
60828 Mismatch Block::0 bo:368 Volume offset:368 Expected:3 Got:2
60829 Mismatch Block::0 bo:374 Volume offset:374 Expected:1 Got:2
60830 Mismatch Block::0 bo:369 Volume offset:369 Expected:3 Got:2
60831 Mismatch Block::0 bo:375 Volume offset:375 Expected:1 Got:2
60832 Mismatch Block::0 bo:370 Volume offset:370 Expected:3 Got:2
60833 Mismatch Block::0 bo:376 Volume offset:376 Expected:1 Got:2
60834 Mismatch Block::0 bo:371 Volume offset:371 Expected:3 Got:2
60835 Mismatch Block::0 bo:377 Volume offset:377 Expected:1 Got:2
60836 Mismatch Block::0 bo:372 Volume offset:372 Expected:3 Got:2
60837 Mismatch Block::0 bo:378 Volume offset:378 Expected:1 Got:2
60838 Mismatch Block::0 bo:373 Volume offset:373 Expected:3 Got:2
60839 Mismatch Block::0 bo:379 Volume offset:379 Expected:1 Got:2
60840 Mismatch Block::0 bo:374 Volume offset:374 Expected:3 Got:2
60841 Mismatch Block::0 bo:380 Volume offset:380 Expected:1 Got:2
60842 Mismatch Block::0 bo:375 Volume offset:375 Expected:3 Got:2
60843 Mismatch Block::0 bo:381 Volume offset:381 Expected:1 Got:2
60844 Mismatch Block::0 bo:376 Volume offset:376 Expected:3 Got:2
60845 Mismatch Block::0 bo:382 Volume offset:382 Expected:1 Got:2
60846 Mismatch Block::0 bo:377 Volume offset:377 Expected:3 Got:2
60847 Mismatch Block::0 bo:383 Volume offset:383 Expected:1 Got:2
60848 Mismatch Block::0 bo:378 Volume offset:378 Expected:3 Got:2
60849 Mismatch Block::0 bo:384 Volume offset:384 Expected:1 Got:2
60850 Mismatch Block::0 bo:379 Volume offset:379 Expected:3 Got:2
60851 Mismatch Block::0 bo:385 Volume offset:385 Expected:1 Got:2
60852 Mismatch Block::0 bo:380 Volume offset:380 Expected:3 Got:2
60853 Mismatch Block::0 bo:386 Volume offset:386 Expected:1 Got:2
60854 Mismatch Block::0 bo:381 Volume offset:381 Expected:3 Got:2
60855 Mismatch Block::0 bo:387 Volume offset:387 Expected:1 Got:2
60856 Mismatch Block::0 bo:382 Volume offset:382 Expected:3 Got:2
60857 Mismatch Block::0 bo:388 Volume offset:388 Expected:1 Got:2
60858 Mismatch Block::0 bo:383 Volume offset:383 Expected:3 Got:2
60859 Mismatch Block::0 bo:389 Volume offset:389 Expected:1 Got:2
60860 Mismatch Block::0 bo:384 Volume offset:384 Expected:3 Got:2
60861 Mismatch Block::0 bo:390 Volume offset:390 Expected:1 Got:2
60862 Mismatch Block::0 bo:385 Volume offset:385 Expected:3 Got:2
60863 Mismatch Block::0 bo:391 Volume offset:391 Expected:1 Got:2
60864 Mismatch Block::0 bo:386 Volume offset:386 Expected:3 Got:2
60865 Mismatch Block::0 bo:392 Volume offset:392 Expected:1 Got:2
60866 Mismatch Block::0 bo:387 Volume offset:387 Expected:3 Got:2
60867 Mismatch Block::0 bo:393 Volume offset:393 Expected:1 Got:2
60868 Mismatch Block::0 bo:388 Volume offset:388 Expected:3 Got:2
60869 Mismatch Block::0 bo:394 Volume offset:394 Expected:1 Got:2
60870 Mismatch Block::0 bo:389 Volume offset:389 Expected:3 Got:2
60871 Mismatch Block::0 bo:395 Volume offset:395 Expected:1 Got:2
60872 Mismatch Block::0 bo:390 Volume offset:390 Expected:3 Got:2
60873 Mismatch Block::0 bo:396 Volume offset:396 Expected:1 Got:2
60874 Mismatch Block::0 bo:391 Volume offset:391 Expected:3 Got:2
60875 Mismatch Block::0 bo:397 Volume offset:397 Expected:1 Got:2
60876 Mismatch Block::0 bo:392 Volume offset:392 Expected:3 Got:2
60877 Mismatch Block::0 bo:398 Volume offset:398 Expected:1 Got:2
60878 Mismatch Block::0 bo:393 Volume offset:393 Expected:3 Got:2
60879 Mismatch Block::0 bo:399 Volume offset:399 Expected:1 Got:2
60880 Mismatch Block::0 bo:394 Volume offset:394 Expected:3 Got:2
60881 Mismatch Block::0 bo:400 Volume offset:400 Expected:1 Got:2
60882 Mismatch Block::0 bo:395 Volume offset:395 Expected:3 Got:2
60883 Mismatch Block::0 bo:401 Volume offset:401 Expected:1 Got:2
60884 Mismatch Block::0 bo:396 Volume offset:396 Expected:3 Got:2
60885 Mismatch Block::0 bo:402 Volume offset:402 Expected:1 Got:2
60886 Mismatch Block::0 bo:397 Volume offset:397 Expected:3 Got:2
60887 Mismatch Block::0 bo:403 Volume offset:403 Expected:1 Got:2
60888 Mismatch Block::0 bo:398 Volume offset:398 Expected:3 Got:2
60889 Mismatch Block::0 bo:404 Volume offset:404 Expected:1 Got:2
60890 Mismatch Block::0 bo:399 Volume offset:399 Expected:3 Got:2
60891 Mismatch Block::0 bo:405 Volume offset:405 Expected:1 Got:2
60892 Mismatch Block::0 bo:400 Volume offset:400 Expected:3 Got:2
60893 Mismatch Block::0 bo:406 Volume offset:406 Expected:1 Got:2
60894 Mismatch Block::0 bo:401 Volume offset:401 Expected:3 Got:2
60895 Mismatch Block::0 bo:407 Volume offset:407 Expected:1 Got:2
60896 Mismatch Block::0 bo:402 Volume offset:402 Expected:3 Got:2
60897 Mismatch Block::0 bo:408 Volume offset:408 Expected:1 Got:2
60898 Mismatch Block::0 bo:403 Volume offset:403 Expected:3 Got:2
60899 Mismatch Block::0 bo:409 Volume offset:409 Expected:1 Got:2
60900 Mismatch Block::0 bo:404 Volume offset:404 Expected:3 Got:2
60901 Mismatch Block::0 bo:410 Volume offset:410 Expected:1 Got:2
60902 Mismatch Block::0 bo:405 Volume offset:405 Expected:3 Got:2
60903 Mismatch Block::0 bo:411 Volume offset:411 Expected:1 Got:2
60904 Mismatch Block::0 bo:406 Volume offset:406 Expected:3 Got:2
60905 Mismatch Block::0 bo:412 Volume offset:412 Expected:1 Got:2
60906 Mismatch Block::0 bo:407 Volume offset:407 Expected:3 Got:2
60907 Mismatch Block::0 bo:413 Volume offset:413 Expected:1 Got:2
60908 Mismatch Block::0 bo:408 Volume offset:408 Expected:3 Got:2
60909 Mismatch Block::0 bo:414 Volume offset:414 Expected:1 Got:2
60910 Mismatch Block::0 bo:409 Volume offset:409 Expected:3 Got:2
60911 Mismatch Block::0 bo:415 Volume offset:415 Expected:1 Got:2
60912 Mismatch Block::0 bo:410 Volume offset:410 Expected:3 Got:2
60913 Mismatch Block::0 bo:416 Volume offset:416 Expected:1 Got:2
60914 Mismatch Block::0 bo:411 Volume offset:411 Expected:3 Got:2
60915 Mismatch Block::0 bo:417 Volume offset:417 Expected:1 Got:2
60916 Mismatch Block::0 bo:412 Volume offset:412 Expected:3 Got:2
60917 Mismatch Block::0 bo:418 Volume offset:418 Expected:1 Got:2
60918 Mismatch Block::0 bo:413 Volume offset:413 Expected:3 Got:2
60919 Mismatch Block::0 bo:419 Volume offset:419 Expected:1 Got:2
60920 Mismatch Block::0 bo:414 Volume offset:414 Expected:3 Got:2
60921 Mismatch Block::0 bo:420 Volume offset:420 Expected:1 Got:2
60922 Mismatch Block::0 bo:415 Volume offset:415 Expected:3 Got:2
60923 Mismatch Block::0 bo:416 Volume offset:416 Expected:3 Got:2
60924 Mismatch Block::0 bo:421 Volume offset:421 Expected:1 Got:2
60925 Mismatch Block::0 bo:417 Volume offset:417 Expected:3 Got:2
60926 Mismatch Block::0 bo:422 Volume offset:422 Expected:1 Got:2
60927 Mismatch Block::0 bo:418 Volume offset:418 Expected:3 Got:2
60928 Mismatch Block::0 bo:423 Volume offset:423 Expected:1 Got:2
60929 Mismatch Block::0 bo:419 Volume offset:419 Expected:3 Got:2
60930 Mismatch Block::0 bo:424 Volume offset:424 Expected:1 Got:2
60931 Mismatch Block::0 bo:420 Volume offset:420 Expected:3 Got:2
60932 Mismatch Block::0 bo:425 Volume offset:425 Expected:1 Got:2
60933 Mismatch Block::0 bo:421 Volume offset:421 Expected:3 Got:2
60934 Mismatch Block::0 bo:426 Volume offset:426 Expected:1 Got:2
60935 Mismatch Block::0 bo:422 Volume offset:422 Expected:3 Got:2
60936 Mismatch Block::0 bo:427 Volume offset:427 Expected:1 Got:2
60937 Mismatch Block::0 bo:423 Volume offset:423 Expected:3 Got:2
60938 Mismatch Block::0 bo:428 Volume offset:428 Expected:1 Got:2
60939 Mismatch Block::0 bo:424 Volume offset:424 Expected:3 Got:2
60940 Mismatch Block::0 bo:429 Volume offset:429 Expected:1 Got:2
60941 Mismatch Block::0 bo:425 Volume offset:425 Expected:3 Got:2
60942 Mismatch Block::0 bo:430 Volume offset:430 Expected:1 Got:2
60943 Mismatch Block::0 bo:426 Volume offset:426 Expected:3 Got:2
60944 Mismatch Block::0 bo:431 Volume offset:431 Expected:1 Got:2
60945 Mismatch Block::0 bo:427 Volume offset:427 Expected:3 Got:2
60946 Mismatch Block::0 bo:432 Volume offset:432 Expected:1 Got:2
60947 Mismatch Block::0 bo:428 Volume offset:428 Expected:3 Got:2
60948 Mismatch Block::0 bo:433 Volume offset:433 Expected:1 Got:2
60949 Mismatch Block::0 bo:429 Volume offset:429 Expected:3 Got:2
60950 Mismatch Block::0 bo:434 Volume offset:434 Expected:1 Got:2
60951 Mismatch Block::0 bo:430 Volume offset:430 Expected:3 Got:2
60952 Mismatch Block::0 bo:435 Volume offset:435 Expected:1 Got:2
60953 Mismatch Block::0 bo:431 Volume offset:431 Expected:3 Got:2
60954 Mismatch Block::0 bo:436 Volume offset:436 Expected:1 Got:2
60955 Mismatch Block::0 bo:432 Volume offset:432 Expected:3 Got:2
60956 Mismatch Block::0 bo:437 Volume offset:437 Expected:1 Got:2
60957 Mismatch Block::0 bo:433 Volume offset:433 Expected:3 Got:2
60958 Mismatch Block::0 bo:438 Volume offset:438 Expected:1 Got:2
60959 Mismatch Block::0 bo:434 Volume offset:434 Expected:3 Got:2
60960 Mismatch Block::0 bo:439 Volume offset:439 Expected:1 Got:2
60961 Mismatch Block::0 bo:435 Volume offset:435 Expected:3 Got:2
60962 Mismatch Block::0 bo:440 Volume offset:440 Expected:1 Got:2
60963 Mismatch Block::0 bo:436 Volume offset:436 Expected:3 Got:2
60964 Mismatch Block::0 bo:441 Volume offset:441 Expected:1 Got:2
60965 Mismatch Block::0 bo:437 Volume offset:437 Expected:3 Got:2
60966 Mismatch Block::0 bo:442 Volume offset:442 Expected:1 Got:2
60967 Mismatch Block::0 bo:438 Volume offset:438 Expected:3 Got:2
60968 Mismatch Block::0 bo:443 Volume offset:443 Expected:1 Got:2
60969 Mismatch Block::0 bo:439 Volume offset:439 Expected:3 Got:2
60970 Mismatch Block::0 bo:444 Volume offset:444 Expected:1 Got:2
60971 Mismatch Block::0 bo:440 Volume offset:440 Expected:3 Got:2
60972 Mismatch Block::0 bo:445 Volume offset:445 Expected:1 Got:2
60973 Mismatch Block::0 bo:441 Volume offset:441 Expected:3 Got:2
60974 Mismatch Block::0 bo:446 Volume offset:446 Expected:1 Got:2
60975 Mismatch Block::0 bo:442 Volume offset:442 Expected:3 Got:2
60976 Mismatch Block::0 bo:447 Volume offset:447 Expected:1 Got:2
60977 Mismatch Block::0 bo:443 Volume offset:443 Expected:3 Got:2
60978 Mismatch Block::0 bo:448 Volume offset:448 Expected:1 Got:2
60979 Mismatch Block::0 bo:444 Volume offset:444 Expected:3 Got:2
60980 Mismatch Block::0 bo:449 Volume offset:449 Expected:1 Got:2
60981 Mismatch Block::0 bo:445 Volume offset:445 Expected:3 Got:2
60982 Mismatch Block::0 bo:450 Volume offset:450 Expected:1 Got:2
60983 Mismatch Block::0 bo:446 Volume offset:446 Expected:3 Got:2
60984 Mismatch Block::0 bo:451 Volume offset:451 Expected:1 Got:2
60985 Mismatch Block::0 bo:447 Volume offset:447 Expected:3 Got:2
60986 Mismatch Block::0 bo:452 Volume offset:452 Expected:1 Got:2
60987 Mismatch Block::0 bo:448 Volume offset:448 Expected:3 Got:2
60988 Mismatch Block::0 bo:453 Volume offset:453 Expected:1 Got:2
60989 Mismatch Block::0 bo:449 Volume offset:449 Expected:3 Got:2
60990 Mismatch Block::0 bo:454 Volume offset:454 Expected:1 Got:2
60991 Mismatch Block::0 bo:450 Volume offset:450 Expected:3 Got:2
60992 Mismatch Block::0 bo:455 Volume offset:455 Expected:1 Got:2
60993 Mismatch Block::0 bo:451 Volume offset:451 Expected:3 Got:2
60994 Mismatch Block::0 bo:456 Volume offset:456 Expected:1 Got:2
60995 Mismatch Block::0 bo:452 Volume offset:452 Expected:3 Got:2
60996 Mismatch Block::0 bo:457 Volume offset:457 Expected:1 Got:2
60997 Mismatch Block::0 bo:453 Volume offset:453 Expected:3 Got:2
60998 Mismatch Block::0 bo:458 Volume offset:458 Expected:1 Got:2
60999 Mismatch Block::0 bo:454 Volume offset:454 Expected:3 Got:2
61000 Mismatch Block::0 bo:459 Volume offset:459 Expected:1 Got:2
61001 Mismatch Block::0 bo:455 Volume offset:455 Expected:3 Got:2
61002 Mismatch Block::0 bo:460 Volume offset:460 Expected:1 Got:2
61003 Mismatch Block::0 bo:456 Volume offset:456 Expected:3 Got:2
61004 Mismatch Block::0 bo:461 Volume offset:461 Expected:1 Got:2
61005 Mismatch Block::0 bo:457 Volume offset:457 Expected:3 Got:2
61006 Mismatch Block::0 bo:462 Volume offset:462 Expected:1 Got:2
61007 Mismatch Block::0 bo:458 Volume offset:458 Expected:3 Got:2
61008 Mismatch Block::0 bo:463 Volume offset:463 Expected:1 Got:2
61009 Mismatch Block::0 bo:459 Volume offset:459 Expected:3 Got:2
61010 Mismatch Block::0 bo:464 Volume offset:464 Expected:1 Got:2
61011 Mismatch Block::0 bo:460 Volume offset:460 Expected:3 Got:2
61012 Mismatch Block::0 bo:465 Volume offset:465 Expected:1 Got:2
61013 Mismatch Block::0 bo:461 Volume offset:461 Expected:3 Got:2
61014 Mismatch Block::0 bo:466 Volume offset:466 Expected:1 Got:2
61015 Mismatch Block::0 bo:462 Volume offset:462 Expected:3 Got:2
61016 Mismatch Block::0 bo:467 Volume offset:467 Expected:1 Got:2
61017 Mismatch Block::0 bo:463 Volume offset:463 Expected:3 Got:2
61018 Mismatch Block::0 bo:468 Volume offset:468 Expected:1 Got:2
61019 Mismatch Block::0 bo:464 Volume offset:464 Expected:3 Got:2
61020 Mismatch Block::0 bo:469 Volume offset:469 Expected:1 Got:2
61021 Mismatch Block::0 bo:465 Volume offset:465 Expected:3 Got:2
61022 Mismatch Block::0 bo:470 Volume offset:470 Expected:1 Got:2
61023 Mismatch Block::0 bo:466 Volume offset:466 Expected:3 Got:2
61024 Mismatch Block::0 bo:471 Volume offset:471 Expected:1 Got:2
61025 Mismatch Block::0 bo:467 Volume offset:467 Expected:3 Got:2
61026 Mismatch Block::0 bo:472 Volume offset:472 Expected:1 Got:2
61027 Mismatch Block::0 bo:468 Volume offset:468 Expected:3 Got:2
61028 Mismatch Block::0 bo:473 Volume offset:473 Expected:1 Got:2
61029 Mismatch Block::0 bo:469 Volume offset:469 Expected:3 Got:2
61030 Mismatch Block::0 bo:474 Volume offset:474 Expected:1 Got:2
61031 Mismatch Block::0 bo:470 Volume offset:470 Expected:3 Got:2
61032 Mismatch Block::0 bo:475 Volume offset:475 Expected:1 Got:2
61033 Mismatch Block::0 bo:471 Volume offset:471 Expected:3 Got:2
61034 Mismatch Block::0 bo:476 Volume offset:476 Expected:1 Got:2
61035 Mismatch Block::0 bo:472 Volume offset:472 Expected:3 Got:2
61036 Mismatch Block::0 bo:477 Volume offset:477 Expected:1 Got:2
61037 Mismatch Block::0 bo:473 Volume offset:473 Expected:3 Got:2
61038 Mismatch Block::0 bo:478 Volume offset:478 Expected:1 Got:2
61039 Mismatch Block::0 bo:474 Volume offset:474 Expected:3 Got:2
61040 Mismatch Block::0 bo:479 Volume offset:479 Expected:1 Got:2
61041 Mismatch Block::0 bo:475 Volume offset:475 Expected:3 Got:2
61042 Mismatch Block::0 bo:480 Volume offset:480 Expected:1 Got:2
61043 Mismatch Block::0 bo:476 Volume offset:476 Expected:3 Got:2
61044 Mismatch Block::0 bo:481 Volume offset:481 Expected:1 Got:2
61045 Mismatch Block::0 bo:477 Volume offset:477 Expected:3 Got:2
61046 Mismatch Block::0 bo:482 Volume offset:482 Expected:1 Got:2
61047 Mismatch Block::0 bo:478 Volume offset:478 Expected:3 Got:2
61048 Mismatch Block::0 bo:483 Volume offset:483 Expected:1 Got:2
61049 Mismatch Block::0 bo:479 Volume offset:479 Expected:3 Got:2
61050 Mismatch Block::0 bo:484 Volume offset:484 Expected:1 Got:2
61051 Mismatch Block::0 bo:480 Volume offset:480 Expected:3 Got:2
61052 Mismatch Block::0 bo:485 Volume offset:485 Expected:1 Got:2
61053 Mismatch Block::0 bo:481 Volume offset:481 Expected:3 Got:2
61054 Mismatch Block::0 bo:486 Volume offset:486 Expected:1 Got:2
61055 Mismatch Block::0 bo:482 Volume offset:482 Expected:3 Got:2
61056 Mismatch Block::0 bo:487 Volume offset:487 Expected:1 Got:2
61057 Mismatch Block::0 bo:483 Volume offset:483 Expected:3 Got:2
61058 Mismatch Block::0 bo:488 Volume offset:488 Expected:1 Got:2
61059 Mismatch Block::0 bo:484 Volume offset:484 Expected:3 Got:2
61060 Mismatch Block::0 bo:489 Volume offset:489 Expected:1 Got:2
61061 Mismatch Block::0 bo:485 Volume offset:485 Expected:3 Got:2
61062 Mismatch Block::0 bo:490 Volume offset:490 Expected:1 Got:2
61063 Mismatch Block::0 bo:486 Volume offset:486 Expected:3 Got:2
61064 Mismatch Block::0 bo:491 Volume offset:491 Expected:1 Got:2
61065 Mismatch Block::0 bo:487 Volume offset:487 Expected:3 Got:2
61066 Mismatch Block::0 bo:492 Volume offset:492 Expected:1 Got:2
61067 Mismatch Block::0 bo:488 Volume offset:488 Expected:3 Got:2
61068 Mismatch Block::0 bo:493 Volume offset:493 Expected:1 Got:2
61069 Mismatch Block::0 bo:489 Volume offset:489 Expected:3 Got:2
61070 Mismatch Block::0 bo:494 Volume offset:494 Expected:1 Got:2
61071 Mismatch Block::0 bo:490 Volume offset:490 Expected:3 Got:2
61072 Mismatch Block::0 bo:495 Volume offset:495 Expected:1 Got:2
61073 Mismatch Block::0 bo:491 Volume offset:491 Expected:3 Got:2
61074 Mismatch Block::0 bo:496 Volume offset:496 Expected:1 Got:2
61075 Mismatch Block::0 bo:492 Volume offset:492 Expected:3 Got:2
61076 Mismatch Block::0 bo:497 Volume offset:497 Expected:1 Got:2
61077 Mismatch Block::0 bo:493 Volume offset:493 Expected:3 Got:2
61078 Mismatch Block::0 bo:498 Volume offset:498 Expected:1 Got:2
61079 Mismatch Block::0 bo:494 Volume offset:494 Expected:3 Got:2
61080 Mismatch Block::0 bo:499 Volume offset:499 Expected:1 Got:2
61081 Mismatch Block::0 bo:495 Volume offset:495 Expected:3 Got:2
61082 Mismatch Block::0 bo:500 Volume offset:500 Expected:1 Got:2
61083 Mismatch Block::0 bo:496 Volume offset:496 Expected:3 Got:2
61084 Mismatch Block::0 bo:501 Volume offset:501 Expected:1 Got:2
61085 Mismatch Block::0 bo:497 Volume offset:497 Expected:3 Got:2
61086 Mismatch Block::0 bo:502 Volume offset:502 Expected:1 Got:2
61087 Mismatch Block::0 bo:498 Volume offset:498 Expected:3 Got:2
61088 Mismatch Block::0 bo:503 Volume offset:503 Expected:1 Got:2
61089 Mismatch Block::0 bo:499 Volume offset:499 Expected:3 Got:2
61090 Mismatch Block::0 bo:504 Volume offset:504 Expected:1 Got:2
61091 Mismatch Block::0 bo:500 Volume offset:500 Expected:3 Got:2
61092 Mismatch Block::0 bo:505 Volume offset:505 Expected:1 Got:2
61093 Mismatch Block::0 bo:501 Volume offset:501 Expected:3 Got:2
61094 Mismatch Block::0 bo:506 Volume offset:506 Expected:1 Got:2
61095 Mismatch Block::0 bo:502 Volume offset:502 Expected:3 Got:2
61096 Mismatch Block::0 bo:507 Volume offset:507 Expected:1 Got:2
61097 Mismatch Block::0 bo:503 Volume offset:503 Expected:3 Got:2
61098 Mismatch Block::0 bo:508 Volume offset:508 Expected:1 Got:2
61099 Mismatch Block::0 bo:504 Volume offset:504 Expected:3 Got:2
61100 Mismatch Block::0 bo:509 Volume offset:509 Expected:1 Got:2
61101 Mismatch Block::0 bo:505 Volume offset:505 Expected:3 Got:2
61102 Mismatch Block::0 bo:510 Volume offset:510 Expected:1 Got:2
61103 Mismatch Block::0 bo:506 Volume offset:506 Expected:3 Got:2
61104 Mismatch Block::0 bo:511 Volume offset:511 Expected:1 Got:2
61105 Mismatch Block::0 bo:507 Volume offset:507 Expected:3 Got:2
61106 Mismatch Block::0 bo:508 Volume offset:508 Expected:3 Got:2
61107 Mismatch Block::0 bo:509 Volume offset:509 Expected:3 Got:2
61108 Mismatch Block::0 bo:510 Volume offset:510 Expected:3 Got:2
61109 Mismatch Block::0 bo:511 Volume offset:511 Expected:3 Got:2
61110 ok
61111 test test::test_read_compare ... ok
61112 test test::test_read_compare_span ... ok
61113 Mismatch Block::2 bo:2 Volume offset:1026 Expected:2 Got:9
61114 test test::test_read_compare_span_fail ... SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
61115 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
61116 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
61117 new cur is 1022 from min
61118 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
61119 new cur is 1023 from min
61120 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
61121 new cur is 1024 from cur
61122 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
61123 new cur is 1025 from cur
61124 SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
61125 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
61126 new cur is 1022 from min
61127 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
61128 new cur is 1023 from min
61129 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
61130 new cur is 1024 from cur
61131 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
61132 new cur is 1025 from cur
61133 ok
61134 test test::test_read_compare_fail_under ... ok
61135 SPEC v:252 min_av:253 cur_av:0 cm:1021 cc:1024
61136 SPEC v:253 min_av:253 cur_av:0 cm:1021 cc:1024
61137 new cur is 1021 from min
61138 SPEC v:254 min_av:253 cur_av:0 cm:1021 cc:1024
61139 new cur is 1022 from min
61140 SPEC v:255 min_av:253 cur_av:0 cm:1021 cc:1024
61141 new cur is 1023 from min
61142 SPEC v:0 min_av:253 cur_av:0 cm:1021 cc:1024
61143 new cur is 1024 from cur
61144 SPEC v:1 min_av:253 cur_av:0 cm:1021 cc:1024
61145 test test::test_read_compare_fail ... ok
61146 test test::test_read_compare_span_fail_2 ... ok
61147 SPEC v:254 min_av:255 cur_av:1 cm:1023 cc:1025
61148 SPEC v:255 min_av:255 cur_av:1 cm:1023 cc:1025
61149 new cur is 1023 from min
61150 SPEC v:0 min_av:255 cur_av:1 cm:1023 cc:1025
61151 new cur is 1024 from cur
61152 SPEC v:1 min_av:255 cur_av:1 cm:1023 cc:1025
61153 new cur is 1025 from cur
61154 SPEC v:2 min_av:255 cur_av:1 cm:1023 cc:1025
61155 test test::test_wl_commit_1024_range_no_update_below_rollover ... ok
61156 Mismatch Block::99 bo:511 Volume offset:51199 Expected:1 Got:9
61157 test test::test_wl_commit_1024_range_no_update_above_rollover ... ok
61158 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
61159 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
61160 new cur is 1022 from min
61161 SPEC v:255 min_av:254 cur_av:1 cm:1022 cc:1025
61162 new cur is 1023 from min
61163 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
61164 new cur is 1024 from cur
61165 SPEC v:1 min_av:254 cur_av:1 cm:1022 cc:1025
61166 new cur is 1025 from cur
61167 SPEC v:2 min_av:254 cur_av:1 cm:1022 cc:1025
61168 test test::test_wl_commit_1024_range_rollover_max_at ... ok
61169 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
61170 SPEC v:0 min_av:254 cur_av:1 cm:1022 cc:1025
61171 new cur is 1024 from cur
61172 Adjusting new cur to 1024
61173 SPEC v:254 min_av:254 cur_av:0 cm:1022 cc:1024
61174 new cur is 1022 from min
61175 SPEC v:255 min_av:254 cur_av:0 cm:1022 cc:1024
61176 new cur is 1023 from min
61177 test test::test_wl_commit_1024_range_rollover_min_at ... SPEC v:1 min_av:254 cur_av:0 cm:1022 cc:1024
61178 SPEC v:253 min_av:254 cur_av:1 cm:1022 cc:1025
61179 SPEC v:2 min_av:254 cur_av:0 cm:1022 cc:1024
61180 SPEC v:254 min_av:254 cur_av:1 cm:1022 cc:1025
61181 oknew cur is 1022 from min
61182 
61183 Adjusting new cur to 1022
61184 Shift 3, v:255 sv:1023 min:1022 cur:1022
61185 Shift 3, v:0 sv:768 min:1022 cur:1022
61186 Shift 3, v:1 sv:769 min:1022 cur:1022
61187 Shift 3, v:2 sv:770 min:1022 cur:1022
61188 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
61189 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
61190 new cur is 254 from min
61191 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
61192 new cur is 255 from min
61193 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
61194 new cur is 256 from cur
61195 test test::test_read_compare_large_fail ... SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
61196 new cur is 257 from cur
61197 ok
61198 test test::test_wl_commit_1024_range_rollover_range ... ok
61199 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
61200 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
61201 new cur is 254 from min
61202 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
61203 new cur is 255 from min
61204 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
61205 new cur is 256 from cur
61206 SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
61207 test test::test_wl_commit_1024_range_update_rollover_above ... new cur is 257 from cur
61208 ok
61209 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
61210 Shift 0, v:1 sv:1 min:2 cur:4
61211 Shift 0, v:2 sv:2 min:2 cur:4
61212 Shift 0, v:3 sv:3 min:2 cur:4
61213 Shift 0, v:4 sv:4 min:2 cur:4
61214 Shift 0, v:5 sv:5 min:2 cur:4
61215 test test::test_wl_commit_1024_range_update_rollover_below ... ok
61216 SPEC v:254 min_av:255 cur_av:1 cm:255 cc:257
61217 SPEC v:255 min_av:255 cur_av:1 cm:255 cc:257
61218 new cur is 255 from min
61219 SPEC v:0 min_av:255 cur_av:1 cm:255 cc:257
61220 new cur is 256 from cur
61221 SPEC v:1 min_av:255 cur_av:1 cm:255 cc:257
61222 new cur is 257 from cur
61223 SPEC v:2 min_av:255 cur_av:1 cm:255 cc:257
61224 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
61225 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
61226 new cur is 254 from min
61227 SPEC v:255 min_av:254 cur_av:1 cm:254 cc:257
61228 new cur is 255 from min
61229 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
61230 new cur is 256 from cur
61231 SPEC v:1 min_av:254 cur_av:1 cm:254 cc:257
61232 new cur is 257 from cur
61233 SPEC v:2 min_av:254 cur_av:1 cm:254 cc:257
61234 Shift 0, v:3 sv:3 min:2 cur:4
61235 Update block 1 to 3 (min:2 max:4 res:true)
61236 test test::test_wl_commit_range_no_update_above_rollover ... ok
61237 SPEC v:252 min_av:253 cur_av:0 cm:253 cc:256
61238 SPEC v:253 min_av:253 cur_av:0 cm:253 cc:256
61239 new cur is 253 from min
61240 SPEC v:254 min_av:253 cur_av:0 cm:253 cc:256
61241 new cur is 254 from min
61242 SPEC v:255 min_av:253 cur_av:0 cm:253 cc:256
61243 new cur is 255 from min
61244 SPEC v:0 min_av:253 cur_av:0 cm:253 cc:256
61245 new cur is 256 from cur
61246 SPEC v:1 min_av:253 cur_av:0 cm:253 cc:256
61247 test test::test_read_compare_large ... ok
61248 Shift 0, v:4 sv:4 min:2 cur:4
61249 test test::test_wl_commit_range_no_update_below_rollover ... ok
61250 Shift 0, v:2 sv:2 min:2 cur:4
61251 Update block 1 to 2 (min:2 max:4 res:true)
61252 test test::test_wl_commit_range ... ok
61253 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
61254 SPEC v:0 min_av:254 cur_av:1 cm:254 cc:257
61255 new cur is 256 from cur
61256 Adjusting new cur to 256
61257 SPEC v:254 min_av:254 cur_av:0 cm:254 cc:256
61258 new cur is 254 from min
61259 SPEC v:255 min_av:254 cur_av:0 cm:254 cc:256
61260 new cur is 255 from min
61261 SPEC v:1 min_av:254 cur_av:0 cm:254 cc:256
61262 SPEC v:2 min_av:254 cur_av:0 cm:254 cc:256
61263 test test::test_wl_commit_range_rollover_min_at ... ok
61264 SPEC v:253 min_av:254 cur_av:1 cm:254 cc:257
61265 SPEC v:254 min_av:254 cur_av:1 cm:254 cc:257
61266 new cur is 254 from min
61267 Adjusting new cur to 254
61268 Shift 0, v:255 sv:255 min:254 cur:254
61269 Shift 0, v:0 sv:0 min:254 cur:254
61270 Shift 0, v:1 sv:1 min:254 cur:254
61271 Shift 0, v:2 sv:2 min:254 cur:254
61272 test test::test_wl_commit_range_rollover_range ... ok
61273 test test::test_wl_commit_range_update ... ok
61274 test test::test_wl_commit_range_rollover_max_at ... ok
61275 Shift 0, v:1 sv:1 min:2 cur:4
61276 Out of Range Block::1 bo:1 Volume offset:513 Expected:4 Got:1
61277 Shift 0, v:1 sv:1 min:2 cur:4
61278 Out of Range Block::1 bo:2 Volume offset:514 Expected:4 Got:1
61279 Shift 0, v:1 sv:1 min:2 cur:4
61280 Out of Range Block::1 bo:3 Volume offset:515 Expected:4 Got:1
61281 Shift 0, v:1 sv:1 min:2 cur:4
61282 Out of Range Block::1 bo:4 Volume offset:516 Expected:4 Got:1
61283 Shift 0, v:1 sv:1 min:2 cur:4
61284 Out of Range Block::1 bo:5 Volume offset:517 Expected:4 Got:1
61285 Shift 0, v:1 sv:1 min:2 cur:4
61286 Out of Range Block::1 bo:6 Volume offset:518 Expected:4 Got:1
61287 Shift 0, v:1 sv:1 min:2 cur:4
61288 Out of Range Block::1 bo:7 Volume offset:519 Expected:4 Got:1
61289 Shift 0, v:1 sv:1 min:2 cur:4
61290 Out of Range Block::1 bo:8 Volume offset:520 Expected:4 Got:1
61291 Shift 0, v:1 sv:1 min:2 cur:4
61292 Out of Range Block::1 bo:9 Volume offset:521 Expected:4 Got:1
61293 Shift 0, v:1 sv:1 min:2 cur:4
61294 Out of Range Block::1 bo:10 Volume offset:522 Expected:4 Got:1
61295 Shift 0, v:1 sv:1 min:2 cur:4
61296 Out of Range Block::1 bo:11 Volume offset:523 Expected:4 Got:1
61297 Shift 0, v:1 sv:1 min:2 cur:4
61298 test test::test_wl_commit_range_update_max ... Out of Range Block::1 bo:12 Volume offset:524 Expected:4 Got:1
61299 Shift 0, v:1 sv:1 min:2 cur:4
61300 Out of Range Block::1 bo:13 Volume offset:525 Expected:4 Got:1
61301 Shift 0, v:1 sv:1 min:2 cur:4
61302 Out of Range Block::1 bo:14 Volume offset:526 Expected:4 Got:1
61303 Shift 0, v:1 sv:1 min:2 cur:4
61304 Out of Range Block::1 bo:15 Volume offset:527 Expected:4 Got:1
61305 Shift 0, v:1 sv:1 min:2 cur:4
61306 Out of Range Block::1 bo:16 Volume offset:528 Expected:4 Got:1
61307 Shift 0, v:1 sv:1 min:2 cur:4
61308 Out of Range Block::1 bo:17 Volume offset:529 Expected:4 Got:1
61309 Shift 0, v:1 sv:1 min:2 cur:4
61310 Out of Range Block::1 bo:18 Volume offset:530 Expected:4 Got:1
61311 Shift 0, v:1 sv:1 min:2 cur:4
61312 Out of Range Block::1 bo:19 Volume offset:531 Expected:4 Got:1
61313 Shift 0, v:1 sv:1 min:2 cur:4
61314 Out of Range Block::1 bo:20 Volume offset:532 Expected:4 Got:1
61315 Shift 0, v:1 sv:1 min:2 cur:4
61316 Out of Range Block::1 bo:21 Volume offset:533 Expected:4 Got:1
61317 Shift 0, v:1 sv:1 min:2 cur:4
61318 Out of Range Block::1 bo:22 Volume offset:534 Expected:4 Got:1
61319 Shift 0, v:1 sv:1 min:2 cur:4
61320 Out of Range Block::1 bo:23 Volume offset:535 Expected:4 Got:1
61321 Shift 0, v:1 sv:1 min:2 cur:4
61322 Out of Range Block::1 bo:24 Volume offset:536 Expected:4 Got:1
61323 Shift 0, v:1 sv:1 min:2 cur:4
61324 Out of Range Block::1 bo:25 Volume offset:537 Expected:4 Got:1
61325 Shift 0, v:1 sv:1 min:2 cur:4
61326 Out of Range Block::1 bo:26 Volume offset:538 Expected:4 Got:1
61327 Shift 0, v:1 sv:1 min:2 cur:4
61328 Out of Range Block::1 bo:27 Volume offset:539 Expected:4 Got:1
61329 Shift 0, v:1 sv:1 min:2 cur:4
61330 Out of Range Block::1 bo:28 Volume offset:540 Expected:4 Got:1
61331 Shift 0, v:1 sv:1 min:2 cur:4
61332 Out of Range Block::1 bo:29 Volume offset:541 Expected:4 Got:1
61333 Shift 0, v:1 sv:1 min:2 cur:4
61334 Out of Range Block::1 bo:30 Volume offset:542 Expected:4 Got:1
61335 Shift 0, v:1 sv:1 min:2 cur:4
61336 Out of Range Block::1 bo:31 Volume offset:543 Expected:4 Got:1
61337 Shift 0, v:1 sv:1 min:2 cur:4
61338 Out of Range Block::1 bo:32 Volume offset:544 Expected:4 Got:1
61339 Shift 0, v:1 sv:1 min:2 cur:4
61340 Out of Range Block::1 bo:33 Volume offset:545 Expected:4 Got:1
61341 Shift 0, v:1 sv:1 min:2 cur:4
61342 Out of Range Block::1 bo:34 Volume offset:546 Expected:4 Got:1
61343 Shift 0, v:1 sv:1 min:2 cur:4
61344 Out of Range Block::1 bo:35 Volume offset:547 Expected:4 Got:1
61345 Shift 0, v:1 sv:1 min:2 cur:4
61346 Out of Range Block::1 bo:36 Volume offset:548 Expected:4 Got:1
61347 Shift 0, v:1 sv:1 min:2 cur:4
61348 Out of Range Block::1 bo:37 Volume offset:549 Expected:4 Got:1
61349 Shift 0, v:1 sv:1 min:2 cur:4
61350 Out of Range Block::1 bo:38 Volume offset:550 Expected:4 Got:1
61351 Shift 0, v:1 sv:1 min:2 cur:4
61352 Out of Range Block::1 bo:39 Volume offset:551 Expected:4 Got:1
61353 Shift 0, v:1 sv:1 min:2 cur:4
61354 Out of Range Block::1 bo:40 Volume offset:552 Expected:4 Got:1
61355 Shift 0, v:1 sv:1 min:2 cur:4
61356 Out of Range Block::1 bo:41 Volume offset:553 Expected:4 Got:1
61357 Shift 0, v:1 sv:1 min:2 cur:4
61358 Out of Range Block::1 bo:42 Volume offset:554 Expected:4 Got:1
61359 Shift 0, v:1 sv:1 min:2 cur:4
61360 Out of Range Block::1 bo:43 Volume offset:555 Expected:4 Got:1
61361 Shift 0, v:1 sv:1 min:2 cur:4
61362 Out of Range Block::1 bo:44 Volume offset:556 Expected:4 Got:1
61363 Shift 0, v:1 sv:1 min:2 cur:4
61364 Out of Range Block::1 bo:45 Volume offset:557 Expected:4 Got:1
61365 Shift 0, v:1 sv:1 min:2 cur:4
61366 Out of Range Block::1 bo:46 Volume offset:558 Expected:4 Got:1
61367 Shift 0, v:1 sv:1 min:2 cur:4
61368 Out of Range Block::1 bo:47 Volume offset:559 Expected:4 Got:1
61369 Shift 0, v:1 sv:1 min:2 cur:4
61370 Out of Range Block::1 bo:48 Volume offset:560 Expected:4 Got:1
61371 Shift 0, v:1 sv:1 min:2 cur:4
61372 Out of Range Block::1 bo:49 Volume offset:561 Expected:4 Got:1
61373 Shift 0, v:1 sv:1 min:2 cur:4
61374 Out of Range Block::1 bo:50 Volume offset:562 Expected:4 Got:1
61375 Shift 0, v:1 sv:1 min:2 cur:4
61376 Out of Range Block::1 bo:51 Volume offset:563 Expected:4 Got:1
61377 Shift 0, v:1 sv:1 min:2 cur:4
61378 Out of Range Block::1 bo:52 Volume offset:564 Expected:4 Got:1
61379 Shift 0, v:1 sv:1 min:2 cur:4
61380 Out of Range Block::1 bo:53 Volume offset:565 Expected:4 Got:1
61381 Shift 0, v:1 sv:1 min:2 cur:4
61382 Out of Range Block::1 bo:54 Volume offset:566 Expected:4 Got:1
61383 Shift 0, v:1 sv:1 min:2 cur:4
61384 Out of Range Block::1 bo:55 Volume offset:567 Expected:4 Got:1
61385 Shift 0, v:1 sv:1 min:2 cur:4
61386 Out of Range Block::1 bo:56 Volume offset:568 Expected:4 Got:1
61387 Shift 0, v:1 sv:1 min:2 cur:4
61388 Out of Range Block::1 bo:57 Volume offset:569 Expected:4 Got:1
61389 Shift 0, v:1 sv:1 min:2 cur:4
61390 Out of Range Block::1 bo:58 Volume offset:570 Expected:4 Got:1
61391 Shift 0, v:1 sv:1 min:2 cur:4
61392 Out of Range Block::1 bo:59 Volume offset:571 Expected:4 Got:1
61393 Shift 0, v:1 sv:1 min:2 cur:4
61394 Out of Range Block::1 bo:60 Volume offset:572 Expected:4 Got:1
61395 Shift 0, v:1 sv:1 min:2 cur:4
61396 Out of Range Block::1 bo:61 Volume offset:573 Expected:4 Got:1
61397 Shift 0, v:1 sv:1 min:2 cur:4
61398 Out of Range Block::1 bo:62 Volume offset:574 Expected:4 Got:1
61399 Shift 0, v:1 sv:1 min:2 cur:4
61400 Out of Range Block::1 bo:63 Volume offset:575 Expected:4 Got:1
61401 Shift 0, v:1 sv:1 min:2 cur:4
61402 Out of Range Block::1 bo:64 Volume offset:576 Expected:4 Got:1
61403 Shift 0, v:1 sv:1 min:2 cur:4
61404 Out of Range Block::1 bo:65 Volume offset:577 Expected:4 Got:1
61405 Shift 0, v:1 sv:1 min:2 cur:4
61406 Out of Range Block::1 bo:66 Volume offset:578 Expected:4 Got:1
61407 Shift 0, v:1 sv:1 min:2 cur:4
61408 Out of Range Block::1 bo:67 Volume offset:579 Expected:4 Got:1
61409 Shift 0, v:1 sv:1 min:2 cur:4
61410 Out of Range Block::1 bo:68 Volume offset:580 Expected:4 Got:1
61411 Shift 0, v:1 sv:1 min:2 cur:4
61412 Out of Range Block::1 bo:69 Volume offset:581 Expected:4 Got:1
61413 Shift 0, v:1 sv:1 min:2 cur:4
61414 Out of Range Block::1 bo:70 Volume offset:582 Expected:4 Got:1
61415 Shift 0, v:1 sv:1 min:2 cur:4
61416 Out of Range Block::1 bo:71 Volume offset:583 Expected:4 Got:1
61417 Shift 0, v:1 sv:1 min:2 cur:4
61418 Out of Range Block::1 bo:72 Volume offset:584 Expected:4 Got:1
61419 Shift 0, v:1 sv:1 min:2 cur:4
61420 Out of Range Block::1 bo:73 Volume offset:585 Expected:4 Got:1
61421 Shift 0, v:1 sv:1 min:2 cur:4
61422 Out of Range Block::1 bo:74 Volume offset:586 Expected:4 Got:1
61423 Shift 0, v:1 sv:1 min:2 cur:4
61424 Out of Range Block::1 bo:75 Volume offset:587 Expected:4 Got:1
61425 Shift 0, v:1 sv:1 min:2 cur:4
61426 Out of Range Block::1 bo:76 Volume offset:588 Expected:4 Got:1
61427 Shift 0, v:1 sv:1 min:2 cur:4
61428 Out of Range Block::1 bo:77 Volume offset:589 Expected:4 Got:1
61429 Shift 0, v:1 sv:1 min:2 cur:4
61430 Out of Range Block::1 bo:78 Volume offset:590 Expected:4 Got:1
61431 Shift 0, v:1 sv:1 min:2 cur:4
61432 Out of Range Block::1 bo:79 Volume offset:591 Expected:4 Got:1
61433 Shift 0, v:1 sv:1 min:2 cur:4
61434 Out of Range Block::1 bo:80 Volume offset:592 Expected:4 Got:1
61435 Shift 0, v:1 sv:1 min:2 cur:4
61436 Out of Range Block::1 bo:81 Volume offset:593 Expected:4 Got:1
61437 Shift 0, v:1 sv:1 min:2 cur:4
61438 Out of Range Block::1 bo:82 Volume offset:594 Expected:4 Got:1
61439 Shift 0, v:1 sv:1 min:2 cur:4
61440 Out of Range Block::1 bo:83 Volume offset:595 Expected:4 Got:1
61441 Shift 0, v:1 sv:1 min:2 cur:4
61442 Out of Range Block::1 bo:84 Volume offset:596 Expected:4 Got:1
61443 Shift 0, v:1 sv:1 min:2 cur:4
61444 Out of Range Block::1 bo:85 Volume offset:597 Expected:4 Got:1
61445 Shift 0, v:1 sv:1 min:2 cur:4
61446 Out of Range Block::1 bo:86 Volume offset:598 Expected:4 Got:1
61447 Shift 0, v:1 sv:1 min:2 cur:4
61448 Out of Range Block::1 bo:87 Volume offset:599 Expected:4 Got:1
61449 Shift 0, v:1 sv:1 min:2 cur:4
61450 Out of Range Block::1 bo:88 Volume offset:600 Expected:4 Got:1
61451 Shift 0, v:1 sv:1 min:2 cur:4
61452 Out of Range Block::1 bo:89 Volume offset:601 Expected:4 Got:1
61453 Shift 0, v:1 sv:1 min:2 cur:4
61454 Out of Range Block::1 bo:90 Volume offset:602 Expected:4 Got:1
61455 Shift 0, v:1 sv:1 min:2 cur:4
61456 Out of Range Block::1 bo:91 Volume offset:603 Expected:4 Got:1
61457 Shift 0, v:1 sv:1 min:2 cur:4
61458 Out of Range Block::1 bo:92 Volume offset:604 Expected:4 Got:1
61459 Shift 0, v:1 sv:1 min:2 cur:4
61460 Out of Range Block::1 bo:93 Volume offset:605 Expected:4 Got:1
61461 Shift 0, v:1 sv:1 min:2 cur:4
61462 Out of Range Block::1 bo:94 Volume offset:606 Expected:4 Got:1
61463 Shift 0, v:1 sv:1 min:2 cur:4
61464 Out of Range Block::1 bo:95 Volume offset:607 Expected:4 Got:1
61465 Shift 0, v:1 sv:1 min:2 cur:4
61466 Out of Range Block::1 bo:96 Volume offset:608 Expected:4 Got:1
61467 Shift 0, v:1 sv:1 min:2 cur:4
61468 Out of Range Block::1 bo:97 Volume offset:609 Expected:4 Got:1
61469 Shift 0, v:1 sv:1 min:2 cur:4
61470 Out of Range Block::1 bo:98 Volume offset:610 Expected:4 Got:1
61471 Shift 0, v:1 sv:1 min:2 cur:4
61472 Out of Range Block::1 bo:99 Volume offset:611 Expected:4 Got:1
61473 Shift 0, v:1 sv:1 min:2 cur:4
61474 Out of Range Block::1 bo:100 Volume offset:612 Expected:4 Got:1
61475 Shift 0, v:1 sv:1 min:2 cur:4
61476 Out of Range Block::1 bo:101 Volume offset:613 Expected:4 Got:1
61477 Shift 0, v:1 sv:1 min:2 cur:4
61478 Out of Range Block::1 bo:102 Volume offset:614 Expected:4 Got:1
61479 Shift 0, v:1 sv:1 min:2 cur:4
61480 Out of Range Block::1 bo:103 Volume offset:615 Expected:4 Got:1
61481 Shift 0, v:1 sv:1 min:2 cur:4
61482 Out of Range Block::1 bo:104 Volume offset:616 Expected:4 Got:1
61483 Shift 0, v:1 sv:1 min:2 cur:4
61484 Out of Range Block::1 bo:105 Volume offset:617 Expected:4 Got:1
61485 Shift 0, v:1 sv:1 min:2 cur:4
61486 Out of Range Block::1 bo:106 Volume offset:618 Expected:4 Got:1
61487 Shift 0, v:1 sv:1 min:2 cur:4
61488 Out of Range Block::1 bo:107 Volume offset:619 Expected:4 Got:1
61489 Shift 0, v:1 sv:1 min:2 cur:4
61490 Out of Range Block::1 bo:108 Volume offset:620 Expected:4 Got:1
61491 Shift 0, v:1 sv:1 min:2 cur:4
61492 Out of Range Block::1 bo:109 Volume offset:621 Expected:4 Got:1
61493 Shift 0, v:1 sv:1 min:2 cur:4
61494 Out of Range Block::1 bo:110 Volume offset:622 Expected:4 Got:1
61495 Shift 0, v:1 sv:1 min:2 cur:4
61496 Out of Range Block::1 bo:111 Volume offset:623 Expected:4 Got:1
61497 Shift 0, v:1 sv:1 min:2 cur:4
61498 Out of Range Block::1 bo:112 Volume offset:624 Expected:4 Got:1
61499 Shift 0, v:1 sv:1 min:2 cur:4
61500 Out of Range Block::1 bo:113 Volume offset:625 Expected:4 Got:1
61501 Shift 0, v:1 sv:1 min:2 cur:4
61502 Out of Range Block::1 bo:114 Volume offset:626 Expected:4 Got:1
61503 Shift 0, v:1 sv:1 min:2 cur:4
61504 Out of Range Block::1 bo:115 Volume offset:627 Expected:4 Got:1
61505 Shift 0, v:1 sv:1 min:2 cur:4
61506 Out of Range Block::1 bo:116 Volume offset:628 Expected:4 Got:1
61507 Shift 0, v:1 sv:1 min:2 cur:4
61508 Out of Range Block::1 bo:117 Volume offset:629 Expected:4 Got:1
61509 Shift 0, v:1 sv:1 min:2 cur:4
61510 Out of Range Block::1 bo:118 Volume offset:630 Expected:4 Got:1
61511 Shift 0, v:1 sv:1 min:2 cur:4
61512 Out of Range Block::1 bo:119 Volume offset:631 Expected:4 Got:1
61513 Shift 0, v:1 sv:1 min:2 cur:4
61514 Out of Range Block::1 bo:120 Volume offset:632 Expected:4 Got:1
61515 Shift 0, v:1 sv:1 min:2 cur:4
61516 Out of Range Block::1 bo:121 Volume offset:633 Expected:4 Got:1
61517 Shift 0, v:1 sv:1 min:2 cur:4
61518 Out of Range Block::1 bo:122 Volume offset:634 Expected:4 Got:1
61519 Shift 0, v:1 sv:1 min:2 cur:4
61520 Out of Range Block::1 bo:123 Volume offset:635 Expected:4 Got:1
61521 Shift 0, v:1 sv:1 min:2 cur:4
61522 Out of Range Block::1 bo:124 Volume offset:636 Expected:4 Got:1
61523 Shift 0, v:1 sv:1 min:2 cur:4
61524 Out of Range Block::1 bo:125 Volume offset:637 Expected:4 Got:1
61525 Shift 0, v:1 sv:1 min:2 cur:4
61526 Out of Range Block::1 bo:126 Volume offset:638 Expected:4 Got:1
61527 Shift 0, v:1 sv:1 min:2 cur:4
61528 Out of Range Block::1 bo:127 Volume offset:639 Expected:4 Got:1
61529 okShift 0, v:1 sv:1 min:2 cur:4
61530 Out of Range Block::1 bo:128 Volume offset:640 Expected:4 Got:1
61531 Shift 0, v:1 sv:1 min:2 cur:4
61532 Out of Range Block::1 bo:129 Volume offset:641 Expected:4 Got:1
61533 Shift 0, v:1 sv:1 min:2 cur:4
61534 Out of Range Block::1 bo:130 Volume offset:642 Expected:4 Got:1
61535 Shift 0, v:1 sv:1 min:2 cur:4
61536 Out of Range Block::1 bo:131 Volume offset:643 Expected:4 Got:1
61537 Shift 0, v:1 sv:1 min:2 cur:4
61538 Out of Range Block::1 bo:132 Volume offset:644 Expected:4 Got:1
61539 Shift 0, v:1 sv:1 min:2 cur:4
61540 Out of Range Block::1 bo:133 Volume offset:645 Expected:4 Got:1
61541 Shift 0, v:1 sv:1 min:2 cur:4
61542 Out of Range Block::1 bo:134 Volume offset:646 Expected:4 Got:1
61543 Shift 0, v:1 sv:1 min:2 cur:4
61544 Out of Range Block::1 bo:135 Volume offset:647 Expected:4 Got:1
61545 Shift 0, v:1 sv:1 min:2 cur:4
61546 Out of Range Block::1 bo:136 Volume offset:648 Expected:4 Got:1
61547 Shift 0, v:1 sv:1 min:2 cur:4
61548 Out of Range Block::1 bo:137 Volume offset:649 Expected:4 Got:1
61549 Shift 0, v:1 sv:1 min:2 cur:4
61550 Out of Range Block::1 bo:138 Volume offset:650 Expected:4 Got:1
61551 Shift 0, v:1 sv:1 min:2 cur:4
61552 Out of Range Block::1 bo:139 Volume offset:651 Expected:4 Got:1
61553 Shift 0, v:1 sv:1 min:2 cur:4
61554 Out of Range Block::1 bo:140 Volume offset:652 Expected:4 Got:1
61555 Shift 0, v:1 sv:1 min:2 cur:4
61556 Out of Range Block::1 bo:141 Volume offset:653 Expected:4 Got:1
61557 Shift 0, v:1 sv:1 min:2 cur:4
61558 Out of Range Block::1 bo:142 Volume offset:654 Expected:4 Got:1
61559 Shift 0, v:1 sv:1 min:2 cur:4
61560 Out of Range Block::1 bo:143 Volume offset:655 Expected:4 Got:1
61561 Shift 0, v:1 sv:1 min:2 cur:4
61562 Out of Range Block::1 bo:144 Volume offset:656 Expected:4 Got:1
61563 Shift 0, v:1 sv:1 min:2 cur:4
61564 Out of Range Block::1 bo:145 Volume offset:657 Expected:4 Got:1
61565 Shift 0, v:1 sv:1 min:2 cur:4
61566 Out of Range Block::1 bo:146 Volume offset:658 Expected:4 Got:1
61567 Shift 0, v:1 sv:1 min:2 cur:4
61568 Out of Range Block::1 bo:147 Volume offset:659 Expected:4 Got:1
61569 Shift 0, v:1 sv:1 min:2 cur:4
61570 Out of Range Block::1 bo:148 Volume offset:660 Expected:4 Got:1
61571 Shift 0, v:1 sv:1 min:2 cur:4
61572 Out of Range Block::1 bo:149 Volume offset:661 Expected:4 Got:1
61573 Shift 0, v:1 sv:1 min:2 cur:4
61574 Out of Range Block::1 bo:150 Volume offset:662 Expected:4 Got:1
61575 Shift 0, v:1 sv:1 min:2 cur:4
61576 Out of Range Block::1 bo:151 Volume offset:663 Expected:4 Got:1
61577 Shift 0, v:1 sv:1 min:2 cur:4
61578 Out of Range Block::1 bo:152 Volume offset:664 Expected:4 Got:1
61579 Shift 0, v:1 sv:1 min:2 cur:4
61580 Out of Range Block::1 bo:153 Volume offset:665 Expected:4 Got:1
61581 Shift 0, v:1 sv:1 min:2 cur:4
61582 Out of Range Block::1 bo:154 Volume offset:666 Expected:4 Got:1
61583 Shift 0, v:1 sv:1 min:2 cur:4
61584 Out of Range Block::1 bo:155 Volume offset:667 Expected:4 Got:1
61585 Shift 0, v:1 sv:1 min:2 cur:4
61586 Out of Range Block::1 bo:156 Volume offset:668 Expected:4 Got:1
61587 Shift 0, v:1 sv:1 min:2 cur:4
61588 Out of Range Block::1 bo:157 Volume offset:669 Expected:4 Got:1
61589 Shift 0, v:1 sv:1 min:2 cur:4
61590 Out of Range Block::1 bo:158 Volume offset:670 Expected:4 Got:1
61591 Shift 0, v:1 sv:1 min:2 cur:4
61592 Out of Range Block::1 bo:159 Volume offset:671 Expected:4 Got:1
61593 Shift 0, v:1 sv:1 min:2 cur:4
61594 Out of Range Block::1 bo:160 Volume offset:672 Expected:4 Got:1
61595 Shift 0, v:1 sv:1 min:2 cur:4
61596 Out of Range Block::1 bo:161 Volume offset:673 Expected:4 Got:1
61597 Shift 0, v:1 sv:1 min:2 cur:4
61598 Out of Range Block::1 bo:162 Volume offset:674 Expected:4 Got:1
61599 Shift 0, v:1 sv:1 min:2 cur:4
61600 Out of Range Block::1 bo:163 Volume offset:675 Expected:4 Got:1
61601 Shift 0, v:1 sv:1 min:2 cur:4
61602 Out of Range Block::1 bo:164 Volume offset:676 Expected:4 Got:1
61603 Shift 0, v:1 sv:1 min:2 cur:4
61604 Out of Range Block::1 bo:165 Volume offset:677 Expected:4 Got:1
61605 Shift 0, v:1 sv:1 min:2 cur:4
61606 Out of Range Block::1 bo:166 Volume offset:678 Expected:4 Got:1
61607 Shift 0, v:1 sv:1 min:2 cur:4
61608 Out of Range Block::1 bo:167 Volume offset:679 Expected:4 Got:1
61609 Shift 0, v:1 sv:1 min:2 cur:4
61610 Out of Range Block::1 bo:168 Volume offset:680 Expected:4 Got:1
61611 Shift 0, v:1 sv:1 min:2 cur:4
61612 Out of Range Block::1 bo:169 Volume offset:681 Expected:4 Got:1
61613 Shift 0, v:1 sv:1 min:2 cur:4
61614 Out of Range Block::1 bo:170 Volume offset:682 Expected:4 Got:1
61615 Shift 0, v:1 sv:1 min:2 cur:4
61616 Out of Range Block::1 bo:171 Volume offset:683 Expected:4 Got:1
61617 Shift 0, v:1 sv:1 min:2 cur:4
61618 Out of Range Block::1 bo:172 Volume offset:684 Expected:4 Got:1
61619 Shift 0, v:1 sv:1 min:2 cur:4
61620 Out of Range Block::1 bo:173 Volume offset:685 Expected:4 Got:1
61621 Shift 0, v:1 sv:1 min:2 cur:4
61622 Out of Range Block::1 bo:174 Volume offset:686 Expected:4 Got:1
61623 Shift 0, v:1 sv:1 min:2 cur:4
61624 Out of Range Block::1 bo:175 Volume offset:687 Expected:4 Got:1
61625 Shift 0, v:1 sv:1 min:2 cur:4
61626 Out of Range Block::1 bo:176 Volume offset:688 Expected:4 Got:1
61627 Shift 0, v:1 sv:1 min:2 cur:4
61628 Out of Range Block::1 bo:177 Volume offset:689 Expected:4 Got:1
61629 Shift 0, v:1 sv:1 min:2 cur:4
61630 Out of Range Block::1 bo:178 Volume offset:690 Expected:4 Got:1
61631 Shift 0, v:1 sv:1 min:2 cur:4
61632 Out of Range Block::1 bo:179 Volume offset:691 Expected:4 Got:1
61633 Shift 0, v:1 sv:1 min:2 cur:4
61634 Out of Range Block::1 bo:180 Volume offset:692 Expected:4 Got:1
61635 Shift 0, v:1 sv:1 min:2 cur:4
61636 Out of Range Block::1 bo:181 Volume offset:693 Expected:4 Got:1
61637 Shift 0, v:1 sv:1 min:2 cur:4
61638 Out of Range Block::1 bo:182 Volume offset:694 Expected:4 Got:1
61639 Shift 0, v:1 sv:1 min:2 cur:4
61640 Out of Range Block::1 bo:183 Volume offset:695 Expected:4 Got:1
61641 Shift 0, v:1 sv:1 min:2 cur:4
61642 Out of Range Block::1 bo:184 Volume offset:696 Expected:4 Got:1
61643 Shift 0, v:1 sv:1 min:2 cur:4
61644 Out of Range Block::1 bo:185 Volume offset:697 Expected:4 Got:1
61645 Shift 0, v:1 sv:1 min:2 cur:4
61646 Out of Range Block::1 bo:186 Volume offset:698 Expected:4 Got:1
61647 Shift 0, v:1 sv:1 min:2 cur:4
61648 Out of Range Block::1 bo:187 Volume offset:699 Expected:4 Got:1
61649 Shift 0, v:1 sv:1 min:2 cur:4
61650 Out of Range Block::1 bo:188 Volume offset:700 Expected:4 Got:1
61651 Shift 0, v:1 sv:1 min:2 cur:4
61652 Out of Range Block::1 bo:189 Volume offset:701 Expected:4 Got:1
61653 Shift 0, v:1 sv:1 min:2 cur:4
61654 Out of Range Block::1 bo:190 Volume offset:702 Expected:4 Got:1
61655 Shift 0, v:1 sv:1 min:2 cur:4
61656 Out of Range Block::1 bo:191 Volume offset:703 Expected:4 Got:1
61657 Shift 0, v:1 sv:1 min:2 cur:4
61658 Out of Range Block::1 bo:192 Volume offset:704 Expected:4 Got:1
61659 Shift 0, v:1 sv:1 min:2 cur:4
61660 Out of Range Block::1 bo:193 Volume offset:705 Expected:4 Got:1
61661 Shift 0, v:1 sv:1 min:2 cur:4
61662 Out of Range Block::1 bo:194 Volume offset:706 Expected:4 Got:1
61663 Shift 0, v:1 sv:1 min:2 cur:4
61664 Out of Range Block::1 bo:195 Volume offset:707 Expected:4 Got:1
61665 Shift 0, v:1 sv:1 min:2 cur:4
61666 Out of Range Block::1 bo:196 Volume offset:708 Expected:4 Got:1
61667 Shift 0, v:1 sv:1 min:2 cur:4
61668 Out of Range Block::1 bo:197 Volume offset:709 Expected:4 Got:1
61669 Shift 0, v:1 sv:1 min:2 cur:4
61670 Out of Range Block::1 bo:198 Volume offset:710 Expected:4 Got:1
61671 Shift 0, v:1 sv:1 min:2 cur:4
61672 Out of Range Block::1 bo:199 Volume offset:711 Expected:4 Got:1
61673 Shift 0, v:1 sv:1 min:2 cur:4
61674 Out of Range Block::1 bo:200 Volume offset:712 Expected:4 Got:1
61675 Shift 0, v:1 sv:1 min:2 cur:4
61676 Out of Range Block::1 bo:201 Volume offset:713 Expected:4 Got:1
61677 Shift 0, v:1 sv:1 min:2 cur:4
61678 Out of Range Block::1 bo:202 Volume offset:714 Expected:4 Got:1
61679 Shift 0, v:1 sv:1 min:2 cur:4
61680 Out of Range Block::1 bo:203 Volume offset:715 Expected:4 Got:1
61681 Shift 0, v:1 sv:1 min:2 cur:4
61682 Out of Range Block::1 bo:204 Volume offset:716 Expected:4 Got:1
61683 Shift 0, v:1 sv:1 min:2 cur:4
61684 Out of Range Block::1 bo:205 Volume offset:717 Expected:4 Got:1
61685 Shift 0, v:1 sv:1 min:2 cur:4
61686 Out of Range Block::1 bo:206 Volume offset:718 Expected:4 Got:1
61687 Shift 0, v:1 sv:1 min:2 cur:4
61688 Out of Range Block::1 bo:207 Volume offset:719 Expected:4 Got:1
61689 Shift 0, v:1 sv:1 min:2 cur:4
61690 Out of Range Block::1 bo:208 Volume offset:720 Expected:4 Got:1
61691 Shift 0, v:1 sv:1 min:2 cur:4
61692 Out of Range Block::1 bo:209 Volume offset:721 Expected:4 Got:1
61693 Shift 0, v:1 sv:1 min:2 cur:4
61694 Out of Range Block::1 bo:210 Volume offset:722 Expected:4 Got:1
61695 Shift 0, v:1 sv:1 min:2 cur:4
61696 Out of Range Block::1 bo:211 Volume offset:723 Expected:4 Got:1
61697 Shift 0, v:1 sv:1 min:2 cur:4
61698 Out of Range Block::1 bo:212 Volume offset:724 Expected:4 Got:1
61699 Shift 0, v:1 sv:1 min:2 cur:4
61700 Out of Range Block::1 bo:213 Volume offset:725 Expected:4 Got:1
61701 Shift 0, v:1 sv:1 min:2 cur:4
61702 Out of Range Block::1 bo:214 Volume offset:726 Expected:4 Got:1
61703 Shift 0, v:1 sv:1 min:2 cur:4
61704 Out of Range Block::1 bo:215 Volume offset:727 Expected:4 Got:1
61705 Shift 0, v:1 sv:1 min:2 cur:4
61706 Out of Range Block::1 bo:216 Volume offset:728 Expected:4 Got:1
61707 Shift 0, v:1 sv:1 min:2 cur:4
61708 Out of Range Block::1 bo:217 Volume offset:729 Expected:4 Got:1
61709 Shift 0, v:1 sv:1 min:2 cur:4
61710 Out of Range Block::1 bo:218 Volume offset:730 Expected:4 Got:1
61711 Shift 0, v:1 sv:1 min:2 cur:4
61712 Out of Range Block::1 bo:219 Volume offset:731 Expected:4 Got:1
61713 Shift 0, v:1 sv:1 min:2 cur:4
61714 Out of Range Block::1 bo:220 Volume offset:732 Expected:4 Got:1
61715 Shift 0, v:1 sv:1 min:2 cur:4
61716 Out of Range Block::1 bo:221 Volume offset:733 Expected:4 Got:1
61717 Shift 0, v:1 sv:1 min:2 cur:4
61718 Out of Range Block::1 bo:222 Volume offset:734 Expected:4 Got:1
61719 Shift 0, v:1 sv:1 min:2 cur:4
61720 Out of Range Block::1 bo:223 Volume offset:735 Expected:4 Got:1
61721 Shift 0, v:1 sv:1 min:2 cur:4
61722 Out of Range Block::1 bo:224 Volume offset:736 Expected:4 Got:1
61723 Shift 0, v:1 sv:1 min:2 cur:4
61724 Out of Range Block::1 bo:225 Volume offset:737 Expected:4 Got:1
61725 Shift 0, v:1 sv:1 min:2 cur:4
61726 Out of Range Block::1 bo:226 Volume offset:738 Expected:4 Got:1
61727 
61728 Shift 0, v:1 sv:1 min:2 cur:4
61729 Out of Range Block::1 bo:227 Volume offset:739 Expected:4 Got:1
61730 Shift 0, v:1 sv:1 min:2 cur:4
61731 Out of Range Block::1 bo:228 Volume offset:740 Expected:4 Got:1
61732 Shift 0, v:1 sv:1 min:2 cur:4
61733 Out of Range Block::1 bo:229 Volume offset:741 Expected:4 Got:1
61734 Shift 0, v:1 sv:1 min:2 cur:4
61735 Out of Range Block::1 bo:230 Volume offset:742 Expected:4 Got:1
61736 Shift 0, v:1 sv:1 min:2 cur:4
61737 Out of Range Block::1 bo:231 Volume offset:743 Expected:4 Got:1
61738 Shift 0, v:1 sv:1 min:2 cur:4
61739 Out of Range Block::1 bo:232 Volume offset:744 Expected:4 Got:1
61740 Shift 0, v:1 sv:1 min:2 cur:4
61741 Out of Range Block::1 bo:233 Volume offset:745 Expected:4 Got:1
61742 Shift 0, v:1 sv:1 min:2 cur:4
61743 Out of Range Block::1 bo:234 Volume offset:746 Expected:4 Got:1
61744 Shift 0, v:1 sv:1 min:2 cur:4
61745 Out of Range Block::1 bo:235 Volume offset:747 Expected:4 Got:1
61746 Shift 0, v:1 sv:1 min:2 cur:4
61747 Out of Range Block::1 bo:236 Volume offset:748 Expected:4 Got:1
61748 Shift 0, v:1 sv:1 min:2 cur:4
61749 Out of Range Block::1 bo:237 Volume offset:749 Expected:4 Got:1
61750 Shift 0, v:1 sv:1 min:2 cur:4
61751 Out of Range Block::1 bo:238 Volume offset:750 Expected:4 Got:1
61752 Shift 0, v:1 sv:1 min:2 cur:4
61753 Out of Range Block::1 bo:239 Volume offset:751 Expected:4 Got:1
61754 Shift 0, v:1 sv:1 min:2 cur:4
61755 Out of Range Block::1 bo:240 Volume offset:752 Expected:4 Got:1
61756 Shift 0, v:1 sv:1 min:2 cur:4
61757 Out of Range Block::1 bo:241 Volume offset:753 Expected:4 Got:1
61758 Shift 0, v:1 sv:1 min:2 cur:4
61759 Out of Range Block::1 bo:242 Volume offset:754 Expected:4 Got:1
61760 Shift 0, v:1 sv:1 min:2 cur:4
61761 Out of Range Block::1 bo:243 Volume offset:755 Expected:4 Got:1
61762 Shift 0, v:1 sv:1 min:2 cur:4
61763 Out of Range Block::1 bo:244 Volume offset:756 Expected:4 Got:1
61764 Shift 0, v:1 sv:1 min:2 cur:4
61765 Out of Range Block::1 bo:245 Volume offset:757 Expected:4 Got:1
61766 Shift 0, v:1 sv:1 min:2 cur:4
61767 Out of Range Block::1 bo:246 Volume offset:758 Expected:4 Got:1
61768 Shift 0, v:1 sv:1 min:2 cur:4
61769 Out of Range Block::1 bo:247 Volume offset:759 Expected:4 Got:1
61770 Shift 0, v:1 sv:1 min:2 cur:4
61771 Out of Range Block::1 bo:248 Volume offset:760 Expected:4 Got:1
61772 Shift 0, v:1 sv:1 min:2 cur:4
61773 Out of Range Block::1 bo:249 Volume offset:761 Expected:4 Got:1
61774 Shift 0, v:1 sv:1 min:2 cur:4
61775 Out of Range Block::1 bo:250 Volume offset:762 Expected:4 Got:1
61776 Shift 0, v:1 sv:1 min:2 cur:4
61777 Out of Range Block::1 bo:251 Volume offset:763 Expected:4 Got:1
61778 Shift 0, v:1 sv:1 min:2 cur:4
61779 Out of Range Block::1 bo:252 Volume offset:764 Expected:4 Got:1
61780 Shift 0, v:1 sv:1 min:2 cur:4
61781 Out of Range Block::1 bo:253 Volume offset:765 Expected:4 Got:1
61782 Shift 0, v:1 sv:1 min:2 cur:4
61783 Out of Range Block::1 bo:254 Volume offset:766 Expected:4 Got:1
61784 Shift 0, v:1 sv:1 min:2 cur:4
61785 Out of Range Block::1 bo:255 Volume offset:767 Expected:4 Got:1
61786 Shift 0, v:1 sv:1 min:2 cur:4
61787 Out of Range Block::1 bo:256 Volume offset:768 Expected:4 Got:1
61788 Shift 0, v:1 sv:1 min:2 cur:4
61789 Out of Range Block::1 bo:257 Volume offset:769 Expected:4 Got:1
61790 Shift 0, v:1 sv:1 min:2 cur:4
61791 Out of Range Block::1 bo:258 Volume offset:770 Expected:4 Got:1
61792 Shift 0, v:1 sv:1 min:2 cur:4
61793 Out of Range Block::1 bo:259 Volume offset:771 Expected:4 Got:1
61794 Shift 0, v:1 sv:1 min:2 cur:4
61795 Out of Range Block::1 bo:260 Volume offset:772 Expected:4 Got:1
61796 Shift 0, v:1 sv:1 min:2 cur:4
61797 Out of Range Block::1 bo:261 Volume offset:773 Expected:4 Got:1
61798 Shift 0, v:1 sv:1 min:2 cur:4
61799 Out of Range Block::1 bo:262 Volume offset:774 Expected:4 Got:1
61800 Shift 0, v:1 sv:1 min:2 cur:4
61801 Out of Range Block::1 bo:263 Volume offset:775 Expected:4 Got:1
61802 Shift 0, v:1 sv:1 min:2 cur:4
61803 Out of Range Block::1 bo:264 Volume offset:776 Expected:4 Got:1
61804 Shift 0, v:1 sv:1 min:2 cur:4
61805 Out of Range Block::1 bo:265 Volume offset:777 Expected:4 Got:1
61806 Shift 0, v:1 sv:1 min:2 cur:4
61807 Out of Range Block::1 bo:266 Volume offset:778 Expected:4 Got:1
61808 Shift 0, v:1 sv:1 min:2 cur:4
61809 Out of Range Block::1 bo:267 Volume offset:779 Expected:4 Got:1
61810 Shift 0, v:1 sv:1 min:2 cur:4
61811 Out of Range Block::1 bo:268 Volume offset:780 Expected:4 Got:1
61812 Shift 0, v:1 sv:1 min:2 cur:4
61813 Out of Range Block::1 bo:269 Volume offset:781 Expected:4 Got:1
61814 Shift 0, v:1 sv:1 min:2 cur:4
61815 Out of Range Block::1 bo:270 Volume offset:782 Expected:4 Got:1
61816 Shift 0, v:1 sv:1 min:2 cur:4
61817 Out of Range Block::1 bo:271 Volume offset:783 Expected:4 Got:1
61818 Shift 0, v:1 sv:1 min:2 cur:4
61819 Out of Range Block::1 bo:272 Volume offset:784 Expected:4 Got:1
61820 Shift 0, v:1 sv:1 min:2 cur:4
61821 Out of Range Block::1 bo:273 Volume offset:785 Expected:4 Got:1
61822 Shift 0, v:1 sv:1 min:2 cur:4
61823 Out of Range Block::1 bo:274 Volume offset:786 Expected:4 Got:1
61824 Shift 0, v:1 sv:1 min:2 cur:4
61825 Out of Range Block::1 bo:275 Volume offset:787 Expected:4 Got:1
61826 Shift 0, v:1 sv:1 min:2 cur:4
61827 Out of Range Block::1 bo:276 Volume offset:788 Expected:4 Got:1
61828 Shift 0, v:1 sv:1 min:2 cur:4
61829 Out of Range Block::1 bo:277 Volume offset:789 Expected:4 Got:1
61830 Shift 0, v:1 sv:1 min:2 cur:4
61831 Out of Range Block::1 bo:278 Volume offset:790 Expected:4 Got:1
61832 Shift 0, v:1 sv:1 min:2 cur:4
61833 Out of Range Block::1 bo:279 Volume offset:791 Expected:4 Got:1
61834 Shift 0, v:1 sv:1 min:2 cur:4
61835 Out of Range Block::1 bo:280 Volume offset:792 Expected:4 Got:1
61836 Shift 0, v:1 sv:1 min:2 cur:4
61837 Out of Range Block::1 bo:281 Volume offset:793 Expected:4 Got:1
61838 Shift 0, v:1 sv:1 min:2 cur:4
61839 Out of Range Block::1 bo:282 Volume offset:794 Expected:4 Got:1
61840 Shift 0, v:1 sv:1 min:2 cur:4
61841 Out of Range Block::1 bo:283 Volume offset:795 Expected:4 Got:1
61842 Shift 0, v:1 sv:1 min:2 cur:4
61843 Out of Range Block::1 bo:284 Volume offset:796 Expected:4 Got:1
61844 Shift 0, v:1 sv:1 min:2 cur:4
61845 Out of Range Block::1 bo:285 Volume offset:797 Expected:4 Got:1
61846 Shift 0, v:1 sv:1 min:2 cur:4
61847 Out of Range Block::1 bo:286 Volume offset:798 Expected:4 Got:1
61848 Shift 0, v:1 sv:1 min:2 cur:4
61849 Out of Range Block::1 bo:287 Volume offset:799 Expected:4 Got:1
61850 Shift 0, v:1 sv:1 min:2 cur:4
61851 Out of Range Block::1 bo:288 Volume offset:800 Expected:4 Got:1
61852 Shift 0, v:1 sv:1 min:2 cur:4
61853 Out of Range Block::1 bo:289 Volume offset:801 Expected:4 Got:1
61854 Shift 0, v:1 sv:1 min:2 cur:4
61855 Out of Range Block::1 bo:290 Volume offset:802 Expected:4 Got:1
61856 Shift 0, v:1 sv:1 min:2 cur:4
61857 Out of Range Block::1 bo:291 Volume offset:803 Expected:4 Got:1
61858 Shift 0, v:1 sv:1 min:2 cur:4
61859 Out of Range Block::1 bo:292 Volume offset:804 Expected:4 Got:1
61860 Shift 0, v:1 sv:1 min:2 cur:4
61861 Out of Range Block::1 bo:293 Volume offset:805 Expected:4 Got:1
61862 Shift 0, v:1 sv:1 min:2 cur:4
61863 Out of Range Block::1 bo:294 Volume offset:806 Expected:4 Got:1
61864 Shift 0, v:1 sv:1 min:2 cur:4
61865 Out of Range Block::1 bo:295 Volume offset:807 Expected:4 Got:1
61866 Shift 0, v:1 sv:1 min:2 cur:4
61867 Out of Range Block::1 bo:296 Volume offset:808 Expected:4 Got:1
61868 Shift 0, v:1 sv:1 min:2 cur:4
61869 Out of Range Block::1 bo:297 Volume offset:809 Expected:4 Got:1
61870 Shift 0, v:1 sv:1 min:2 cur:4
61871 Out of Range Block::1 bo:298 Volume offset:810 Expected:4 Got:1
61872 Shift 0, v:1 sv:1 min:2 cur:4
61873 Out of Range Block::1 bo:299 Volume offset:811 Expected:4 Got:1
61874 Shift 0, v:1 sv:1 min:2 cur:4
61875 Out of Range Block::1 bo:300 Volume offset:812 Expected:4 Got:1
61876 test test::test_wl_commit_range_update_min ... Shift 0, v:1 sv:1 min:2 cur:4
61877 Out of Range Block::1 bo:301 Volume offset:813 Expected:4 Got:1
61878 Shift 0, v:1 sv:1 min:2 cur:4
61879 Out of Range Block::1 bo:302 Volume offset:814 Expected:4 Got:1
61880 Shift 0, v:1 sv:1 min:2 cur:4
61881 Out of Range Block::1 bo:303 Volume offset:815 Expected:4 Got:1
61882 Shift 0, v:1 sv:1 min:2 cur:4
61883 Out of Range Block::1 bo:304 Volume offset:816 Expected:4 Got:1
61884 Shift 0, v:1 sv:1 min:2 cur:4
61885 Out of Range Block::1 bo:305 Volume offset:817 Expected:4 Got:1
61886 Shift 0, v:1 sv:1 min:2 cur:4
61887 Out of Range Block::1 bo:306 Volume offset:818 Expected:4 Got:1
61888 Shift 0, v:1 sv:1 min:2 cur:4
61889 Out of Range Block::1 bo:307 Volume offset:819 Expected:4 Got:1
61890 Shift 0, v:1 sv:1 min:2 cur:4
61891 Out of Range Block::1 bo:308 Volume offset:820 Expected:4 Got:1
61892 Shift 0, v:1 sv:1 min:2 cur:4
61893 Out of Range Block::1 bo:309 Volume offset:821 Expected:4 Got:1
61894 Shift 0, v:1 sv:1 min:2 cur:4
61895 Out of Range Block::1 bo:310 Volume offset:822 Expected:4 Got:1
61896 Shift 0, v:1 sv:1 min:2 cur:4
61897 Out of Range Block::1 bo:311 Volume offset:823 Expected:4 Got:1
61898 Shift 0, v:1 sv:1 min:2 cur:4
61899 Out of Range Block::1 bo:312 Volume offset:824 Expected:4 Got:1
61900 Shift 0, v:1 sv:1 min:2 cur:4
61901 Out of Range Block::1 bo:313 Volume offset:825 Expected:4 Got:1
61902 Shift 0, v:1 sv:1 min:2 cur:4
61903 Out of Range Block::1 bo:314 Volume offset:826 Expected:4 Got:1
61904 Shift 0, v:1 sv:1 min:2 cur:4
61905 Out of Range Block::1 bo:315 Volume offset:827 Expected:4 Got:1
61906 Shift 0, v:1 sv:1 min:2 cur:4
61907 Out of Range Block::1 bo:316 Volume offset:828 Expected:4 Got:1
61908 Shift 0, v:1 sv:1 min:2 cur:4
61909 Out of Range Block::1 bo:317 Volume offset:829 Expected:4 Got:1
61910 Shift 0, v:1 sv:1 min:2 cur:4
61911 Out of Range Block::1 bo:318 Volume offset:830 Expected:4 Got:1
61912 Shift 0, v:1 sv:1 min:2 cur:4
61913 Out of Range Block::1 bo:319 Volume offset:831 Expected:4 Got:1
61914 Shift 0, v:1 sv:1 min:2 cur:4
61915 Out of Range Block::1 bo:320 Volume offset:832 Expected:4 Got:1
61916 Shift 0, v:1 sv:1 min:2 cur:4
61917 Out of Range Block::1 bo:321 Volume offset:833 Expected:4 Got:1
61918 Shift 0, v:1 sv:1 min:2 cur:4
61919 Out of Range Block::1 bo:322 Volume offset:834 Expected:4 Got:1
61920 Shift 0, v:1 sv:1 min:2 cur:4
61921 Out of Range Block::1 bo:323 Volume offset:835 Expected:4 Got:1
61922 Shift 0, v:1 sv:1 min:2 cur:4
61923 okOut of Range Block::1 bo:324 Volume offset:836 Expected:4 Got:1
61924 Shift 0, v:1 sv:1 min:2 cur:4
61925 Out of Range Block::1 bo:325 Volume offset:837 Expected:4 Got:1
61926 Shift 0, v:1 sv:1 min:2 cur:4
61927 Out of Range Block::1 bo:326 Volume offset:838 Expected:4 Got:1
61928 Shift 0, v:1 sv:1 min:2 cur:4
61929 Out of Range Block::1 bo:327 Volume offset:839 Expected:4 Got:1
61930 Shift 0, v:1 sv:1 min:2 cur:4
61931 Out of Range Block::1 bo:328 Volume offset:840 Expected:4 Got:1
61932 Shift 0, v:1 sv:1 min:2 cur:4
61933 Out of Range Block::1 bo:329 Volume offset:841 Expected:4 Got:1
61934 Shift 0, v:1 sv:1 min:2 cur:4
61935 Out of Range Block::1 bo:330 Volume offset:842 Expected:4 Got:1
61936 Shift 0, v:1 sv:1 min:2 cur:4
61937 Out of Range Block::1 bo:331 Volume offset:843 Expected:4 Got:1
61938 Shift 0, v:1 sv:1 min:2 cur:4
61939 Out of Range Block::1 bo:332 Volume offset:844 Expected:4 Got:1
61940 Shift 0, v:1 sv:1 min:2 cur:4
61941 Out of Range Block::1 bo:333 Volume offset:845 Expected:4 Got:1
61942 Shift 0, v:1 sv:1 min:2 cur:4
61943 Out of Range Block::1 bo:334 Volume offset:846 Expected:4 Got:1
61944 Shift 0, v:1 sv:1 min:2 cur:4
61945 Out of Range Block::1 bo:335 Volume offset:847 Expected:4 Got:1
61946 Shift 0, v:1 sv:1 min:2 cur:4
61947 Out of Range Block::1 bo:336 Volume offset:848 Expected:4 Got:1
61948 Shift 0, v:1 sv:1 min:2 cur:4
61949 Out of Range Block::1 bo:337 Volume offset:849 Expected:4 Got:1
61950 Shift 0, v:1 sv:1 min:2 cur:4
61951 Out of Range Block::1 bo:338 Volume offset:850 Expected:4 Got:1
61952 Shift 0, v:1 sv:1 min:2 cur:4
61953 Out of Range Block::1 bo:339 Volume offset:851 Expected:4 Got:1
61954 Shift 0, v:1 sv:1 min:2 cur:4
61955 Out of Range Block::1 bo:340 Volume offset:852 Expected:4 Got:1
61956 Shift 0, v:1 sv:1 min:2 cur:4
61957 Out of Range Block::1 bo:341 Volume offset:853 Expected:4 Got:1
61958 Shift 0, v:1 sv:1 min:2 cur:4
61959 Out of Range Block::1 bo:342 Volume offset:854 Expected:4 Got:1
61960 Shift 0, v:1 sv:1 min:2 cur:4
61961 Out of Range Block::1 bo:343 Volume offset:855 Expected:4 Got:1
61962 Shift 0, v:1 sv:1 min:2 cur:4
61963 Out of Range Block::1 bo:344 Volume offset:856 Expected:4 Got:1
61964 Shift 0, v:1 sv:1 min:2 cur:4
61965 Out of Range Block::1 bo:345 Volume offset:857 Expected:4 Got:1
61966 Shift 0, v:1 sv:1 min:2 cur:4
61967 Out of Range Block::1 bo:346 Volume offset:858 Expected:4 Got:1
61968 Shift 0, v:1 sv:1 min:2 cur:4
61969 Out of Range Block::1 bo:347 Volume offset:859 Expected:4 Got:1
61970 Shift 0, v:1 sv:1 min:2 cur:4
61971 Out of Range Block::1 bo:348 Volume offset:860 Expected:4 Got:1
61972 Shift 0, v:1 sv:1 min:2 cur:4
61973 Out of Range Block::1 bo:349 Volume offset:861 Expected:4 Got:1
61974 Shift 0, v:1 sv:1 min:2 cur:4
61975 Out of Range Block::1 bo:350 Volume offset:862 Expected:4 Got:1
61976 Shift 0, v:1 sv:1 min:2 cur:4
61977 Out of Range Block::1 bo:351 Volume offset:863 Expected:4 Got:1
61978 Shift 0, v:1 sv:1 min:2 cur:4
61979 Out of Range Block::1 bo:352 Volume offset:864 Expected:4 Got:1
61980 Shift 0, v:1 sv:1 min:2 cur:4
61981 Out of Range Block::1 bo:353 Volume offset:865 Expected:4 Got:1
61982 Shift 0, v:1 sv:1 min:2 cur:4
61983 Out of Range Block::1 bo:354 Volume offset:866 Expected:4 Got:1
61984 Shift 0, v:1 sv:1 min:2 cur:4
61985 Out of Range Block::1 bo:355 Volume offset:867 Expected:4 Got:1
61986 Shift 0, v:1 sv:1 min:2 cur:4
61987 Out of Range Block::1 bo:356 Volume offset:868 Expected:4 Got:1
61988 Shift 0, v:1 sv:1 min:2 cur:4
61989 Out of Range Block::1 bo:357 Volume offset:869 Expected:4 Got:1
61990 Shift 0, v:1 sv:1 min:2 cur:4
61991 Out of Range Block::1 bo:358 Volume offset:870 Expected:4 Got:1
61992 Shift 0, v:1 sv:1 min:2 cur:4
61993 Out of Range Block::1 bo:359 Volume offset:871 Expected:4 Got:1
61994 Shift 0, v:1 sv:1 min:2 cur:4
61995 Out of Range Block::1 bo:360 Volume offset:872 Expected:4 Got:1
61996 Shift 0, v:1 sv:1 min:2 cur:4
61997 Out of Range Block::1 bo:361 Volume offset:873 Expected:4 Got:1
61998 Shift 0, v:1 sv:1 min:2 cur:4
61999 Out of Range Block::1 bo:362 Volume offset:874 Expected:4 Got:1
62000 Shift 0, v:1 sv:1 min:2 cur:4
62001 Out of Range Block::1 bo:363 Volume offset:875 Expected:4 Got:1
62002 Shift 0, v:1 sv:1 min:2 cur:4
62003 Out of Range Block::1 bo:364 Volume offset:876 Expected:4 Got:1
62004 Shift 0, v:1 sv:1 min:2 cur:4
62005 Out of Range Block::1 bo:365 Volume offset:877 Expected:4 Got:1
62006 Shift 0, v:1 sv:1 min:2 cur:4
62007 Out of Range Block::1 bo:366 Volume offset:878 Expected:4 Got:1
62008 Shift 0, v:1 sv:1 min:2 cur:4
62009 Out of Range Block::1 bo:367 Volume offset:879 Expected:4 Got:1
62010 Shift 0, v:1 sv:1 min:2 cur:4
62011 Out of Range Block::1 bo:368 Volume offset:880 Expected:4 Got:1
62012 Shift 0, v:1 sv:1 min:2 cur:4
62013 Out of Range Block::1 bo:369 Volume offset:881 Expected:4 Got:1
62014 Shift 0, v:1 sv:1 min:2 cur:4
62015 Out of Range Block::1 bo:370 Volume offset:882 Expected:4 Got:1
62016 Shift 0, v:1 sv:1 min:2 cur:4
62017 Out of Range Block::1 bo:371 Volume offset:883 Expected:4 Got:1
62018 Shift 0, v:1 sv:1 min:2 cur:4
62019 Out of Range Block::1 bo:372 Volume offset:884 Expected:4 Got:1
62020 Shift 0, v:1 sv:1 min:2 cur:4
62021 Out of Range Block::1 bo:373 Volume offset:885 Expected:4 Got:1
62022 Shift 0, v:1 sv:1 min:2 cur:4
62023 Out of Range Block::1 bo:374 Volume offset:886 Expected:4 Got:1
62024 Shift 0, v:1 sv:1 min:2 cur:4
62025 Out of Range Block::1 bo:375 Volume offset:887 Expected:4 Got:1
62026 Shift 0, v:1 sv:1 min:2 cur:4
62027 Out of Range Block::1 bo:376 Volume offset:888 Expected:4 Got:1
62028 Shift 0, v:1 sv:1 min:2 cur:4
62029 Out of Range Block::1 bo:377 Volume offset:889 Expected:4 Got:1
62030 Shift 0, v:1 sv:1 min:2 cur:4
62031 Out of Range Block::1 bo:378 Volume offset:890 Expected:4 Got:1
62032 Shift 0, v:1 sv:1 min:2 cur:4
62033 Out of Range Block::1 bo:379 Volume offset:891 Expected:4 Got:1
62034 
62035 Shift 0, v:1 sv:1 min:2 cur:4
62036 Out of Range Block::1 bo:380 Volume offset:892 Expected:4 Got:1
62037 Shift 0, v:1 sv:1 min:2 cur:4
62038 Out of Range Block::1 bo:381 Volume offset:893 Expected:4 Got:1
62039 Shift 0, v:1 sv:1 min:2 cur:4
62040 Out of Range Block::1 bo:382 Volume offset:894 Expected:4 Got:1
62041 Shift 0, v:1 sv:1 min:2 cur:4
62042 Out of Range Block::1 bo:383 Volume offset:895 Expected:4 Got:1
62043 Shift 0, v:1 sv:1 min:2 cur:4
62044 Out of Range Block::1 bo:384 Volume offset:896 Expected:4 Got:1
62045 Shift 0, v:1 sv:1 min:2 cur:4
62046 Out of Range Block::1 bo:385 Volume offset:897 Expected:4 Got:1
62047 Shift 0, v:1 sv:1 min:2 cur:4
62048 Out of Range Block::1 bo:386 Volume offset:898 Expected:4 Got:1
62049 Shift 0, v:1 sv:1 min:2 cur:4
62050 Out of Range Block::1 bo:387 Volume offset:899 Expected:4 Got:1
62051 Shift 0, v:1 sv:1 min:2 cur:4
62052 Out of Range Block::1 bo:388 Volume offset:900 Expected:4 Got:1
62053 Shift 0, v:1 sv:1 min:2 cur:4
62054 Out of Range Block::1 bo:389 Volume offset:901 Expected:4 Got:1
62055 Shift 0, v:1 sv:1 min:2 cur:4
62056 Out of Range Block::1 bo:390 Volume offset:902 Expected:4 Got:1
62057 Shift 0, v:1 sv:1 min:2 cur:4
62058 Out of Range Block::1 bo:391 Volume offset:903 Expected:4 Got:1
62059 Shift 0, v:1 sv:1 min:2 cur:4
62060 Out of Range Block::1 bo:392 Volume offset:904 Expected:4 Got:1
62061 Shift 0, v:1 sv:1 min:2 cur:4
62062 Out of Range Block::1 bo:393 Volume offset:905 Expected:4 Got:1
62063 Shift 0, v:1 sv:1 min:2 cur:4
62064 Out of Range Block::1 bo:394 Volume offset:906 Expected:4 Got:1
62065 Shift 0, v:1 sv:1 min:2 cur:4
62066 Out of Range Block::1 bo:395 Volume offset:907 Expected:4 Got:1
62067 Shift 0, v:1 sv:1 min:2 cur:4
62068 Out of Range Block::1 bo:396 Volume offset:908 Expected:4 Got:1
62069 Shift 0, v:1 sv:1 min:2 cur:4
62070 Out of Range Block::1 bo:397 Volume offset:909 Expected:4 Got:1
62071 Shift 0, v:1 sv:1 min:2 cur:4
62072 Out of Range Block::1 bo:398 Volume offset:910 Expected:4 Got:1
62073 Shift 0, v:1 sv:1 min:2 cur:4
62074 Out of Range Block::1 bo:399 Volume offset:911 Expected:4 Got:1
62075 Shift 0, v:1 sv:1 min:2 cur:4
62076 Out of Range Block::1 bo:400 Volume offset:912 Expected:4 Got:1
62077 Shift 0, v:1 sv:1 min:2 cur:4
62078 Out of Range Block::1 bo:401 Volume offset:913 Expected:4 Got:1
62079 Shift 0, v:1 sv:1 min:2 cur:4
62080 Out of Range Block::1 bo:402 Volume offset:914 Expected:4 Got:1
62081 Shift 0, v:1 sv:1 min:2 cur:4
62082 Out of Range Block::1 bo:403 Volume offset:915 Expected:4 Got:1
62083 Shift 0, v:1 sv:1 min:2 cur:4
62084 Out of Range Block::1 bo:404 Volume offset:916 Expected:4 Got:1
62085 Shift 0, v:1 sv:1 min:2 cur:4
62086 Out of Range Block::1 bo:405 Volume offset:917 Expected:4 Got:1
62087 Shift 0, v:1 sv:1 min:2 cur:4
62088 Out of Range Block::1 bo:406 Volume offset:918 Expected:4 Got:1
62089 Shift 0, v:1 sv:1 min:2 cur:4
62090 Out of Range Block::1 bo:407 Volume offset:919 Expected:4 Got:1
62091 Shift 0, v:1 sv:1 min:2 cur:4
62092 Out of Range Block::1 bo:408 Volume offset:920 Expected:4 Got:1
62093 Shift 0, v:1 sv:1 min:2 cur:4
62094 Out of Range Block::1 bo:409 Volume offset:921 Expected:4 Got:1
62095 Shift 0, v:1 sv:1 min:2 cur:4
62096 Out of Range Block::1 bo:410 Volume offset:922 Expected:4 Got:1
62097 Shift 0, v:1 sv:1 min:2 cur:4
62098 Out of Range Block::1 bo:411 Volume offset:923 Expected:4 Got:1
62099 Shift 0, v:1 sv:1 min:2 cur:4
62100 Out of Range Block::1 bo:412 Volume offset:924 Expected:4 Got:1
62101 Shift 0, v:1 sv:1 min:2 cur:4
62102 Out of Range Block::1 bo:413 Volume offset:925 Expected:4 Got:1
62103 Shift 0, v:1 sv:1 min:2 cur:4
62104 Out of Range Block::1 bo:414 Volume offset:926 Expected:4 Got:1
62105 Shift 0, v:1 sv:1 min:2 cur:4
62106 Out of Range Block::1 bo:415 Volume offset:927 Expected:4 Got:1
62107 Shift 0, v:1 sv:1 min:2 cur:4
62108 Out of Range Block::1 bo:416 Volume offset:928 Expected:4 Got:1
62109 Shift 0, v:1 sv:1 min:2 cur:4
62110 Out of Range Block::1 bo:417 Volume offset:929 Expected:4 Got:1
62111 Shift 0, v:1 sv:1 min:2 cur:4
62112 Out of Range Block::1 bo:418 Volume offset:930 Expected:4 Got:1
62113 Shift 0, v:1 sv:1 min:2 cur:4
62114 Out of Range Block::1 bo:419 Volume offset:931 Expected:4 Got:1
62115 Shift 0, v:1 sv:1 min:2 cur:4
62116 Out of Range Block::1 bo:420 Volume offset:932 Expected:4 Got:1
62117 Shift 0, v:1 sv:1 min:2 cur:4
62118 Out of Range Block::1 bo:421 Volume offset:933 Expected:4 Got:1
62119 Shift 0, v:1 sv:1 min:2 cur:4
62120 Out of Range Block::1 bo:422 Volume offset:934 Expected:4 Got:1
62121 Shift 0, v:1 sv:1 min:2 cur:4
62122 Out of Range Block::1 bo:423 Volume offset:935 Expected:4 Got:1
62123 Shift 0, v:1 sv:1 min:2 cur:4
62124 Out of Range Block::1 bo:424 Volume offset:936 Expected:4 Got:1
62125 Shift 0, v:1 sv:1 min:2 cur:4
62126 Out of Range Block::1 bo:425 Volume offset:937 Expected:4 Got:1
62127 Shift 0, v:1 sv:1 min:2 cur:4
62128 Out of Range Block::1 bo:426 Volume offset:938 Expected:4 Got:1
62129 Shift 0, v:1 sv:1 min:2 cur:4
62130 Out of Range Block::1 bo:427 Volume offset:939 Expected:4 Got:1
62131 Shift 0, v:1 sv:1 min:2 cur:4
62132 Out of Range Block::1 bo:428 Volume offset:940 Expected:4 Got:1
62133 Shift 0, v:1 sv:1 min:2 cur:4
62134 Out of Range Block::1 bo:429 Volume offset:941 Expected:4 Got:1
62135 Shift 0, v:1 sv:1 min:2 cur:4
62136 Out of Range Block::1 bo:430 Volume offset:942 Expected:4 Got:1
62137 Shift 0, v:1 sv:1 min:2 cur:4
62138 Out of Range Block::1 bo:431 Volume offset:943 Expected:4 Got:1
62139 Shift 0, v:1 sv:1 min:2 cur:4
62140 Out of Range Block::1 bo:432 Volume offset:944 Expected:4 Got:1
62141 Shift 0, v:1 sv:1 min:2 cur:4
62142 Out of Range Block::1 bo:433 Volume offset:945 Expected:4 Got:1
62143 Shift 0, v:1 sv:1 min:2 cur:4
62144 Out of Range Block::1 bo:434 Volume offset:946 Expected:4 Got:1
62145 Shift 0, v:1 sv:1 min:2 cur:4
62146 Out of Range Block::1 bo:435 Volume offset:947 Expected:4 Got:1
62147 Shift 0, v:1 sv:1 min:2 cur:4
62148 Out of Range Block::1 bo:436 Volume offset:948 Expected:4 Got:1
62149 Shift 0, v:1 sv:1 min:2 cur:4
62150 Out of Range Block::1 bo:437 Volume offset:949 Expected:4 Got:1
62151 Shift 0, v:1 sv:1 min:2 cur:4
62152 Out of Range Block::1 bo:438 Volume offset:950 Expected:4 Got:1
62153 Shift 0, v:1 sv:1 min:2 cur:4
62154 Out of Range Block::1 bo:439 Volume offset:951 Expected:4 Got:1
62155 Shift 0, v:1 sv:1 min:2 cur:4
62156 Out of Range Block::1 bo:440 Volume offset:952 Expected:4 Got:1
62157 Shift 0, v:1 sv:1 min:2 cur:4
62158 Out of Range Block::1 bo:441 Volume offset:953 Expected:4 Got:1
62159 Shift 0, v:1 sv:1 min:2 cur:4
62160 Out of Range Block::1 bo:442 Volume offset:954 Expected:4 Got:1
62161 Shift 0, v:1 sv:1 min:2 cur:4
62162 Out of Range Block::1 bo:443 Volume offset:955 Expected:4 Got:1
62163 Shift 0, v:1 sv:1 min:2 cur:4
62164 Out of Range Block::1 bo:444 Volume offset:956 Expected:4 Got:1
62165 Shift 0, v:1 sv:1 min:2 cur:4
62166 Out of Range Block::1 bo:445 Volume offset:957 Expected:4 Got:1
62167 Shift 0, v:1 sv:1 min:2 cur:4
62168 Out of Range Block::1 bo:446 Volume offset:958 Expected:4 Got:1
62169 Shift 0, v:1 sv:1 min:2 cur:4
62170 Out of Range Block::1 bo:447 Volume offset:959 Expected:4 Got:1
62171 Shift 0, v:1 sv:1 min:2 cur:4
62172 Out of Range Block::1 bo:448 Volume offset:960 Expected:4 Got:1
62173 Shift 0, v:1 sv:1 min:2 cur:4
62174 Out of Range Block::1 bo:449 Volume offset:961 Expected:4 Got:1
62175 Shift 0, v:1 sv:1 min:2 cur:4
62176 Out of Range Block::1 bo:450 Volume offset:962 Expected:4 Got:1
62177 Shift 0, v:1 sv:1 min:2 cur:4
62178 Out of Range Block::1 bo:451 Volume offset:963 Expected:4 Got:1
62179 Shift 0, v:1 sv:1 min:2 cur:4
62180 Out of Range Block::1 bo:452 Volume offset:964 Expected:4 Got:1
62181 Shift 0, v:1 sv:1 min:2 cur:4
62182 Out of Range Block::1 bo:453 Volume offset:965 Expected:4 Got:1
62183 Shift 0, v:1 sv:1 min:2 cur:4
62184 Out of Range Block::1 bo:454 Volume offset:966 Expected:4 Got:1
62185 Shift 0, v:1 sv:1 min:2 cur:4
62186 Out of Range Block::1 bo:455 Volume offset:967 Expected:4 Got:1
62187 Shift 0, v:1 sv:1 min:2 cur:4
62188 Out of Range Block::1 bo:456 Volume offset:968 Expected:4 Got:1
62189 Shift 0, v:1 sv:1 min:2 cur:4
62190 Out of Range Block::1 bo:457 Volume offset:969 Expected:4 Got:1
62191 Shift 0, v:1 sv:1 min:2 cur:4
62192 Out of Range Block::1 bo:458 Volume offset:970 Expected:4 Got:1
62193 Shift 0, v:1 sv:1 min:2 cur:4
62194 Out of Range Block::1 bo:459 Volume offset:971 Expected:4 Got:1
62195 Shift 0, v:1 sv:1 min:2 cur:4
62196 Out of Range Block::1 bo:460 Volume offset:972 Expected:4 Got:1
62197 Shift 0, v:1 sv:1 min:2 cur:4
62198 Out of Range Block::1 bo:461 Volume offset:973 Expected:4 Got:1
62199 Shift 0, v:1 sv:1 min:2 cur:4
62200 Out of Range Block::1 bo:462 Volume offset:974 Expected:4 Got:1
62201 Shift 0, v:1 sv:1 min:2 cur:4
62202 Out of Range Block::1 bo:463 Volume offset:975 Expected:4 Got:1
62203 Shift 0, v:1 sv:1 min:2 cur:4
62204 Out of Range Block::1 bo:464 Volume offset:976 Expected:4 Got:1
62205 Shift 0, v:1 sv:1 min:2 cur:4
62206 Out of Range Block::1 bo:465 Volume offset:977 Expected:4 Got:1
62207 Shift 0, v:1 sv:1 min:2 cur:4
62208 Out of Range Block::1 bo:466 Volume offset:978 Expected:4 Got:1
62209 Shift 0, v:1 sv:1 min:2 cur:4
62210 Out of Range Block::1 bo:467 Volume offset:979 Expected:4 Got:1
62211 Shift 0, v:1 sv:1 min:2 cur:4
62212 Out of Range Block::1 bo:468 Volume offset:980 Expected:4 Got:1
62213 Shift 0, v:1 sv:1 min:2 cur:4
62214 Out of Range Block::1 bo:469 Volume offset:981 Expected:4 Got:1
62215 Shift 0, v:1 sv:1 min:2 cur:4
62216 Out of Range Block::1 bo:470 Volume offset:982 Expected:4 Got:1
62217 Shift 0, v:1 sv:1 min:2 cur:4
62218 Out of Range Block::1 bo:471 Volume offset:983 Expected:4 Got:1
62219 Shift 0, v:1 sv:1 min:2 cur:4
62220 Out of Range Block::1 bo:472 Volume offset:984 Expected:4 Got:1
62221 Shift 0, v:1 sv:1 min:2 cur:4
62222 Out of Range Block::1 bo:473 Volume offset:985 Expected:4 Got:1
62223 Shift 0, v:1 sv:1 min:2 cur:4
62224 Out of Range Block::1 bo:474 Volume offset:986 Expected:4 Got:1
62225 Shift 0, v:1 sv:1 min:2 cur:4
62226 Out of Range Block::1 bo:475 Volume offset:987 Expected:4 Got:1
62227 Shift 0, v:1 sv:1 min:2 cur:4
62228 Out of Range Block::1 bo:476 Volume offset:988 Expected:4 Got:1
62229 Shift 0, v:1 sv:1 min:2 cur:4
62230 Out of Range Block::1 bo:477 Volume offset:989 Expected:4 Got:1
62231 Shift 0, v:1 sv:1 min:2 cur:4
62232 Out of Range Block::1 bo:478 Volume offset:990 Expected:4 Got:1
62233 Shift 0, v:1 sv:1 min:2 cur:4
62234 Out of Range Block::1 bo:479 Volume offset:991 Expected:4 Got:1
62235 Shift 0, v:1 sv:1 min:2 cur:4
62236 Out of Range Block::1 bo:480 Volume offset:992 Expected:4 Got:1
62237 Shift 0, v:1 sv:1 min:2 cur:4
62238 Out of Range Block::1 bo:481 Volume offset:993 Expected:4 Got:1
62239 Shift 0, v:1 sv:1 min:2 cur:4
62240 Out of Range Block::1 bo:482 Volume offset:994 Expected:4 Got:1
62241 Shift 0, v:1 sv:1 min:2 cur:4
62242 Out of Range Block::1 bo:483 Volume offset:995 Expected:4 Got:1
62243 Shift 0, v:1 sv:1 min:2 cur:4
62244 Out of Range Block::1 bo:484 Volume offset:996 Expected:4 Got:1
62245 Shift 0, v:1 sv:1 min:2 cur:4
62246 Out of Range Block::1 bo:485 Volume offset:997 Expected:4 Got:1
62247 Shift 0, v:1 sv:1 min:2 cur:4
62248 Out of Range Block::1 bo:486 Volume offset:998 Expected:4 Got:1
62249 Shift 0, v:1 sv:1 min:2 cur:4
62250 Out of Range Block::1 bo:487 Volume offset:999 Expected:4 Got:1
62251 Shift 0, v:1 sv:1 min:2 cur:4
62252 Out of Range Block::1 bo:488 Volume offset:1000 Expected:4 Got:1
62253 Shift 0, v:1 sv:1 min:2 cur:4
62254 Out of Range Block::1 bo:489 Volume offset:1001 Expected:4 Got:1
62255 Shift 0, v:1 sv:1 min:2 cur:4
62256 Out of Range Block::1 bo:490 Volume offset:1002 Expected:4 Got:1
62257 Shift 0, v:1 sv:1 min:2 cur:4
62258 Out of Range Block::1 bo:491 Volume offset:1003 Expected:4 Got:1
62259 Shift 0, v:1 sv:1 min:2 cur:4
62260 Out of Range Block::1 bo:492 Volume offset:1004 Expected:4 Got:1
62261 Shift 0, v:1 sv:1 min:2 cur:4
62262 Out of Range Block::1 bo:493 Volume offset:1005 Expected:4 Got:1
62263 Shift 0, v:1 sv:1 min:2 cur:4
62264 Out of Range Block::1 bo:494 Volume offset:1006 Expected:4 Got:1
62265 Shift 0, v:1 sv:1 min:2 cur:4
62266 Out of Range Block::1 bo:495 Volume offset:1007 Expected:4 Got:1
62267 Shift 0, v:1 sv:1 min:2 cur:4
62268 Out of Range Block::1 bo:496 Volume offset:1008 Expected:4 Got:1
62269 Shift 0, v:1 sv:1 min:2 cur:4
62270 Out of Range Block::1 bo:497 Volume offset:1009 Expected:4 Got:1
62271 Shift 0, v:1 sv:1 min:2 cur:4
62272 Out of Range Block::1 bo:498 Volume offset:1010 Expected:4 Got:1
62273 Shift 0, v:1 sv:1 min:2 cur:4
62274 Out of Range Block::1 bo:499 Volume offset:1011 Expected:4 Got:1
62275 Shift 0, v:1 sv:1 min:2 cur:4
62276 Out of Range Block::1 bo:500 Volume offset:1012 Expected:4 Got:1
62277 Shift 0, v:1 sv:1 min:2 cur:4
62278 Out of Range Block::1 bo:501 Volume offset:1013 Expected:4 Got:1
62279 Shift 0, v:1 sv:1 min:2 cur:4
62280 Out of Range Block::1 bo:502 Volume offset:1014 Expected:4 Got:1
62281 Shift 0, v:1 sv:1 min:2 cur:4
62282 Out of Range Block::1 bo:503 Volume offset:1015 Expected:4 Got:1
62283 Shift 0, v:1 sv:1 min:2 cur:4
62284 Out of Range Block::1 bo:504 Volume offset:1016 Expected:4 Got:1
62285 Shift 0, v:1 sv:1 min:2 cur:4
62286 Out of Range Block::1 bo:505 Volume offset:1017 Expected:4 Got:1
62287 Shift 0, v:1 sv:1 min:2 cur:4
62288 Out of Range Block::1 bo:506 Volume offset:1018 Expected:4 Got:1
62289 Shift 0, v:1 sv:1 min:2 cur:4
62290 Out of Range Block::1 bo:507 Volume offset:1019 Expected:4 Got:1
62291 Shift 0, v:1 sv:1 min:2 cur:4
62292 Out of Range Block::1 bo:508 Volume offset:1020 Expected:4 Got:1
62293 Shift 0, v:1 sv:1 min:2 cur:4
62294 Out of Range Block::1 bo:509 Volume offset:1021 Expected:4 Got:1
62295 Shift 0, v:1 sv:1 min:2 cur:4
62296 Out of Range Block::1 bo:510 Volume offset:1022 Expected:4 Got:1
62297 Shift 0, v:1 sv:1 min:2 cur:4
62298 Out of Range Block::1 bo:511 Volume offset:1023 Expected:4 Got:1
62299 Shift 0, v:2 sv:2 min:2 cur:4
62300 Update block 1 to 2 (min:2 max:4 res:true)
62301 In Range Block::1 bo:1 Volume offset:513 Expected:4 Got:2
62302 Shift 0, v:2 sv:2 min:2 cur:2
62303 In Range Block::1 bo:2 Volume offset:514 Expected:4 Got:2
62304 Shift 0, v:2 sv:2 min:2 cur:2
62305 In Range Block::1 bo:3 Volume offset:515 Expected:4 Got:2
62306 Shift 0, v:2 sv:2 min:2 cur:2
62307 test test::test_wl_commit_range_update_rollover_above ... In Range Block::1 bo:4 Volume offset:516 Expected:4 Got:2
62308 Shift 0, v:2 sv:2 min:2 cur:2
62309 In Range Block::1 bo:5 Volume offset:517 Expected:4 Got:2
62310 Shift 0, v:2 sv:2 min:2 cur:2
62311 In Range Block::1 bo:6 Volume offset:518 Expected:4 Got:2
62312 Shift 0, v:2 sv:2 min:2 cur:2
62313 In Range Block::1 bo:7 Volume offset:519 Expected:4 Got:2
62314 Shift 0, v:2 sv:2 min:2 cur:2
62315 In Range Block::1 bo:8 Volume offset:520 Expected:4 Got:2
62316 Shift 0, v:2 sv:2 min:2 cur:2
62317 In Range Block::1 bo:9 Volume offset:521 Expected:4 Got:2
62318 Shift 0, v:2 sv:2 min:2 cur:2
62319 okIn Range Block::1 bo:10 Volume offset:522 Expected:4 Got:2
62320 Shift 0, v:2 sv:2 min:2 cur:2
62321 In Range Block::1 bo:11 Volume offset:523 Expected:4 Got:2
62322 Shift 0, v:2 sv:2 min:2 cur:2
62323 In Range Block::1 bo:12 Volume offset:524 Expected:4 Got:2
62324 Shift 0, v:2 sv:2 min:2 cur:2
62325 In Range Block::1 bo:13 Volume offset:525 Expected:4 Got:2
62326 Shift 0, v:2 sv:2 min:2 cur:2
62327 In Range Block::1 bo:14 Volume offset:526 Expected:4 Got:2
62328 Shift 0, v:2 sv:2 min:2 cur:2
62329 In Range Block::1 bo:15 Volume offset:527 Expected:4 Got:2
62330 Shift 0, v:2 sv:2 min:2 cur:2
62331 In Range Block::1 bo:16 Volume offset:528 Expected:4 Got:2
62332 Shift 0, v:2 sv:2 min:2 cur:2
62333 In Range Block::1 bo:17 Volume offset:529 Expected:4 Got:2
62334 Shift 0, v:2 sv:2 min:2 cur:2
62335 In Range Block::1 bo:18 Volume offset:530 Expected:4 Got:2
62336 Shift 0, v:2 sv:2 min:2 cur:2
62337 In Range Block::1 bo:19 Volume offset:531 Expected:4 Got:2
62338 Shift 0, v:2 sv:2 min:2 cur:2
62339 In Range Block::1 bo:20 Volume offset:532 Expected:4 Got:2
62340 Shift 0, v:2 sv:2 min:2 cur:2
62341 In Range Block::1 bo:21 Volume offset:533 Expected:4 Got:2
62342 Shift 0, v:2 sv:2 min:2 cur:2
62343 In Range Block::1 bo:22 Volume offset:534 Expected:4 Got:2
62344 Shift 0, v:2 sv:2 min:2 cur:2
62345 In Range Block::1 bo:23 Volume offset:535 Expected:4 Got:2
62346 Shift 0, v:2 sv:2 min:2 cur:2
62347 In Range Block::1 bo:24 Volume offset:536 Expected:4 Got:2
62348 Shift 0, v:2 sv:2 min:2 cur:2
62349 In Range Block::1 bo:25 Volume offset:537 Expected:4 Got:2
62350 Shift 0, v:2 sv:2 min:2 cur:2
62351 In Range Block::1 bo:26 Volume offset:538 Expected:4 Got:2
62352 Shift 0, v:2 sv:2 min:2 cur:2
62353 In Range Block::1 bo:27 Volume offset:539 Expected:4 Got:2
62354 Shift 0, v:2 sv:2 min:2 cur:2
62355 In Range Block::1 bo:28 Volume offset:540 Expected:4 Got:2
62356 Shift 0, v:2 sv:2 min:2 cur:2
62357 In Range Block::1 bo:29 Volume offset:541 Expected:4 Got:2
62358 Shift 0, v:2 sv:2 min:2 cur:2
62359 In Range Block::1 bo:30 Volume offset:542 Expected:4 Got:2
62360 Shift 0, v:2 sv:2 min:2 cur:2
62361 In Range Block::1 bo:31 Volume offset:543 Expected:4 Got:2
62362 Shift 0, v:2 sv:2 min:2 cur:2
62363 In Range Block::1 bo:32 Volume offset:544 Expected:4 Got:2
62364 Shift 0, v:2 sv:2 min:2 cur:2
62365 In Range Block::1 bo:33 Volume offset:545 Expected:4 Got:2
62366 Shift 0, v:2 sv:2 min:2 cur:2
62367 In Range Block::1 bo:34 Volume offset:546 Expected:4 Got:2
62368 Shift 0, v:2 sv:2 min:2 cur:2
62369 In Range Block::1 bo:35 Volume offset:547 Expected:4 Got:2
62370 Shift 0, v:2 sv:2 min:2 cur:2
62371 In Range Block::1 bo:36 Volume offset:548 Expected:4 Got:2
62372 Shift 0, v:2 sv:2 min:2 cur:2
62373 In Range Block::1 bo:37 Volume offset:549 Expected:4 Got:2
62374 Shift 0, v:2 sv:2 min:2 cur:2
62375 In Range Block::1 bo:38 Volume offset:550 Expected:4 Got:2
62376 Shift 0, v:2 sv:2 min:2 cur:2
62377 In Range Block::1 bo:39 Volume offset:551 Expected:4 Got:2
62378 Shift 0, v:2 sv:2 min:2 cur:2
62379 In Range Block::1 bo:40 Volume offset:552 Expected:4 Got:2
62380 Shift 0, v:2 sv:2 min:2 cur:2
62381 In Range Block::1 bo:41 Volume offset:553 Expected:4 Got:2
62382 Shift 0, v:2 sv:2 min:2 cur:2
62383 In Range Block::1 bo:42 Volume offset:554 Expected:4 Got:2
62384 Shift 0, v:2 sv:2 min:2 cur:2
62385 In Range Block::1 bo:43 Volume offset:555 Expected:4 Got:2
62386 Shift 0, v:2 sv:2 min:2 cur:2
62387 In Range Block::1 bo:44 Volume offset:556 Expected:4 Got:2
62388 Shift 0, v:2 sv:2 min:2 cur:2
62389 In Range Block::1 bo:45 Volume offset:557 Expected:4 Got:2
62390 Shift 0, v:2 sv:2 min:2 cur:2
62391 In Range Block::1 bo:46 Volume offset:558 Expected:4 Got:2
62392 Shift 0, v:2 sv:2 min:2 cur:2
62393 In Range Block::1 bo:47 Volume offset:559 Expected:4 Got:2
62394 Shift 0, v:2 sv:2 min:2 cur:2
62395 In Range Block::1 bo:48 Volume offset:560 Expected:4 Got:2
62396 Shift 0, v:2 sv:2 min:2 cur:2
62397 In Range Block::1 bo:49 Volume offset:561 Expected:4 Got:2
62398 Shift 0, v:2 sv:2 min:2 cur:2
62399 In Range Block::1 bo:50 Volume offset:562 Expected:4 Got:2
62400 Shift 0, v:2 sv:2 min:2 cur:2
62401 In Range Block::1 bo:51 Volume offset:563 Expected:4 Got:2
62402 Shift 0, v:2 sv:2 min:2 cur:2
62403 In Range Block::1 bo:52 Volume offset:564 Expected:4 Got:2
62404 Shift 0, v:2 sv:2 min:2 cur:2
62405 In Range Block::1 bo:53 Volume offset:565 Expected:4 Got:2
62406 Shift 0, v:2 sv:2 min:2 cur:2
62407 In Range Block::1 bo:54 Volume offset:566 Expected:4 Got:2
62408 Shift 0, v:2 sv:2 min:2 cur:2
62409 In Range Block::1 bo:55 Volume offset:567 Expected:4 Got:2
62410 Shift 0, v:2 sv:2 min:2 cur:2
62411 In Range Block::1 bo:56 Volume offset:568 Expected:4 Got:2
62412 Shift 0, v:2 sv:2 min:2 cur:2
62413 In Range Block::1 bo:57 Volume offset:569 Expected:4 Got:2
62414 Shift 0, v:2 sv:2 min:2 cur:2
62415 In Range Block::1 bo:58 Volume offset:570 Expected:4 Got:2
62416 Shift 0, v:2 sv:2 min:2 cur:2
62417 In Range Block::1 bo:59 Volume offset:571 Expected:4 Got:2
62418 Shift 0, v:2 sv:2 min:2 cur:2
62419 In Range Block::1 bo:60 Volume offset:572 Expected:4 Got:2
62420 Shift 0, v:2 sv:2 min:2 cur:2
62421 In Range Block::1 bo:61 Volume offset:573 Expected:4 Got:2
62422 Shift 0, v:2 sv:2 min:2 cur:2
62423 In Range Block::1 bo:62 Volume offset:574 Expected:4 Got:2
62424 Shift 0, v:2 sv:2 min:2 cur:2
62425 In Range Block::1 bo:63 Volume offset:575 Expected:4 Got:2
62426 Shift 0, v:2 sv:2 min:2 cur:2
62427 In Range Block::1 bo:64 Volume offset:576 Expected:4 Got:2
62428 Shift 0, v:2 sv:2 min:2 cur:2
62429 In Range Block::1 bo:65 Volume offset:577 Expected:4 Got:2
62430 Shift 0, v:2 sv:2 min:2 cur:2
62431 In Range Block::1 bo:66 Volume offset:578 Expected:4 Got:2
62432 Shift 0, v:2 sv:2 min:2 cur:2
62433 In Range Block::1 bo:67 Volume offset:579 Expected:4 Got:2
62434 Shift 0, v:2 sv:2 min:2 cur:2
62435 In Range Block::1 bo:68 Volume offset:580 Expected:4 Got:2
62436 Shift 0, v:2 sv:2 min:2 cur:2
62437 In Range Block::1 bo:69 Volume offset:581 Expected:4 Got:2
62438 Shift 0, v:2 sv:2 min:2 cur:2
62439 In Range Block::1 bo:70 Volume offset:582 Expected:4 Got:2
62440 Shift 0, v:2 sv:2 min:2 cur:2
62441 In Range Block::1 bo:71 Volume offset:583 Expected:4 Got:2
62442 Shift 0, v:2 sv:2 min:2 cur:2
62443 In Range Block::1 bo:72 Volume offset:584 Expected:4 Got:2
62444 Shift 0, v:2 sv:2 min:2 cur:2
62445 In Range Block::1 bo:73 Volume offset:585 Expected:4 Got:2
62446 Shift 0, v:2 sv:2 min:2 cur:2
62447 In Range Block::1 bo:74 Volume offset:586 Expected:4 Got:2
62448 Shift 0, v:2 sv:2 min:2 cur:2
62449 In Range Block::1 bo:75 Volume offset:587 Expected:4 Got:2
62450 Shift 0, v:2 sv:2 min:2 cur:2
62451 In Range Block::1 bo:76 Volume offset:588 Expected:4 Got:2
62452 Shift 0, v:2 sv:2 min:2 cur:2
62453 In Range Block::1 bo:77 Volume offset:589 Expected:4 Got:2
62454 Shift 0, v:2 sv:2 min:2 cur:2
62455 In Range Block::1 bo:78 Volume offset:590 Expected:4 Got:2
62456 Shift 0, v:2 sv:2 min:2 cur:2
62457 In Range Block::1 bo:79 Volume offset:591 Expected:4 Got:2
62458 Shift 0, v:2 sv:2 min:2 cur:2
62459 In Range Block::1 bo:80 Volume offset:592 Expected:4 Got:2
62460 Shift 0, v:2 sv:2 min:2 cur:2
62461 In Range Block::1 bo:81 Volume offset:593 Expected:4 Got:2
62462 Shift 0, v:2 sv:2 min:2 cur:2
62463 In Range Block::1 bo:82 Volume offset:594 Expected:4 Got:2
62464 
62465 Shift 0, v:2 sv:2 min:2 cur:2
62466 In Range Block::1 bo:83 Volume offset:595 Expected:4 Got:2
62467 Shift 0, v:2 sv:2 min:2 cur:2
62468 In Range Block::1 bo:84 Volume offset:596 Expected:4 Got:2
62469 Shift 0, v:2 sv:2 min:2 cur:2
62470 In Range Block::1 bo:85 Volume offset:597 Expected:4 Got:2
62471 Shift 0, v:2 sv:2 min:2 cur:2
62472 In Range Block::1 bo:86 Volume offset:598 Expected:4 Got:2
62473 Shift 0, v:2 sv:2 min:2 cur:2
62474 In Range Block::1 bo:87 Volume offset:599 Expected:4 Got:2
62475 Shift 0, v:2 sv:2 min:2 cur:2
62476 In Range Block::1 bo:88 Volume offset:600 Expected:4 Got:2
62477 Shift 0, v:2 sv:2 min:2 cur:2
62478 In Range Block::1 bo:89 Volume offset:601 Expected:4 Got:2
62479 Shift 0, v:2 sv:2 min:2 cur:2
62480 In Range Block::1 bo:90 Volume offset:602 Expected:4 Got:2
62481 Shift 0, v:2 sv:2 min:2 cur:2
62482 In Range Block::1 bo:91 Volume offset:603 Expected:4 Got:2
62483 Shift 0, v:2 sv:2 min:2 cur:2
62484 In Range Block::1 bo:92 Volume offset:604 Expected:4 Got:2
62485 Shift 0, v:2 sv:2 min:2 cur:2
62486 In Range Block::1 bo:93 Volume offset:605 Expected:4 Got:2
62487 Shift 0, v:2 sv:2 min:2 cur:2
62488 In Range Block::1 bo:94 Volume offset:606 Expected:4 Got:2
62489 Shift 0, v:2 sv:2 min:2 cur:2
62490 In Range Block::1 bo:95 Volume offset:607 Expected:4 Got:2
62491 Shift 0, v:2 sv:2 min:2 cur:2
62492 In Range Block::1 bo:96 Volume offset:608 Expected:4 Got:2
62493 Shift 0, v:2 sv:2 min:2 cur:2
62494 In Range Block::1 bo:97 Volume offset:609 Expected:4 Got:2
62495 Shift 0, v:2 sv:2 min:2 cur:2
62496 In Range Block::1 bo:98 Volume offset:610 Expected:4 Got:2
62497 Shift 0, v:2 sv:2 min:2 cur:2
62498 In Range Block::1 bo:99 Volume offset:611 Expected:4 Got:2
62499 Shift 0, v:2 sv:2 min:2 cur:2
62500 In Range Block::1 bo:100 Volume offset:612 Expected:4 Got:2
62501 Shift 0, v:2 sv:2 min:2 cur:2
62502 In Range Block::1 bo:101 Volume offset:613 Expected:4 Got:2
62503 Shift 0, v:2 sv:2 min:2 cur:2
62504 In Range Block::1 bo:102 Volume offset:614 Expected:4 Got:2
62505 Shift 0, v:2 sv:2 min:2 cur:2
62506 In Range Block::1 bo:103 Volume offset:615 Expected:4 Got:2
62507 Shift 0, v:2 sv:2 min:2 cur:2
62508 In Range Block::1 bo:104 Volume offset:616 Expected:4 Got:2
62509 Shift 0, v:2 sv:2 min:2 cur:2
62510 In Range Block::1 bo:105 Volume offset:617 Expected:4 Got:2
62511 Shift 0, v:2 sv:2 min:2 cur:2
62512 In Range Block::1 bo:106 Volume offset:618 Expected:4 Got:2
62513 Shift 0, v:2 sv:2 min:2 cur:2
62514 In Range Block::1 bo:107 Volume offset:619 Expected:4 Got:2
62515 Shift 0, v:2 sv:2 min:2 cur:2
62516 In Range Block::1 bo:108 Volume offset:620 Expected:4 Got:2
62517 Shift 0, v:2 sv:2 min:2 cur:2
62518 In Range Block::1 bo:109 Volume offset:621 Expected:4 Got:2
62519 Shift 0, v:2 sv:2 min:2 cur:2
62520 In Range Block::1 bo:110 Volume offset:622 Expected:4 Got:2
62521 Shift 0, v:2 sv:2 min:2 cur:2
62522 In Range Block::1 bo:111 Volume offset:623 Expected:4 Got:2
62523 Shift 0, v:2 sv:2 min:2 cur:2
62524 In Range Block::1 bo:112 Volume offset:624 Expected:4 Got:2
62525 Shift 0, v:2 sv:2 min:2 cur:2
62526 In Range Block::1 bo:113 Volume offset:625 Expected:4 Got:2
62527 Shift 0, v:2 sv:2 min:2 cur:2
62528 In Range Block::1 bo:114 Volume offset:626 Expected:4 Got:2
62529 Shift 0, v:2 sv:2 min:2 cur:2
62530 In Range Block::1 bo:115 Volume offset:627 Expected:4 Got:2
62531 Shift 0, v:2 sv:2 min:2 cur:2
62532 In Range Block::1 bo:116 Volume offset:628 Expected:4 Got:2
62533 Shift 0, v:2 sv:2 min:2 cur:2
62534 In Range Block::1 bo:117 Volume offset:629 Expected:4 Got:2
62535 Shift 0, v:2 sv:2 min:2 cur:2
62536 In Range Block::1 bo:118 Volume offset:630 Expected:4 Got:2
62537 test test::test_wl_commit_range_update_rollover_below ... Shift 0, v:2 sv:2 min:2 cur:2
62538 In Range Block::1 bo:119 Volume offset:631 Expected:4 Got:2
62539 Shift 0, v:2 sv:2 min:2 cur:2
62540 In Range Block::1 bo:120 Volume offset:632 Expected:4 Got:2
62541 Shift 0, v:2 sv:2 min:2 cur:2
62542 In Range Block::1 bo:121 Volume offset:633 Expected:4 Got:2
62543 Shift 0, v:2 sv:2 min:2 cur:2
62544 In Range Block::1 bo:122 Volume offset:634 Expected:4 Got:2
62545 Shift 0, v:2 sv:2 min:2 cur:2
62546 In Range Block::1 bo:123 Volume offset:635 Expected:4 Got:2
62547 Shift 0, v:2 sv:2 min:2 cur:2
62548 In Range Block::1 bo:124 Volume offset:636 Expected:4 Got:2
62549 Shift 0, v:2 sv:2 min:2 cur:2
62550 In Range Block::1 bo:125 Volume offset:637 Expected:4 Got:2
62551 Shift 0, v:2 sv:2 min:2 cur:2
62552 In Range Block::1 bo:126 Volume offset:638 Expected:4 Got:2
62553 Shift 0, v:2 sv:2 min:2 cur:2
62554 In Range Block::1 bo:127 Volume offset:639 Expected:4 Got:2
62555 Shift 0, v:2 sv:2 min:2 cur:2
62556 In Range Block::1 bo:128 Volume offset:640 Expected:4 Got:2
62557 Shift 0, v:2 sv:2 min:2 cur:2
62558 In Range Block::1 bo:129 Volume offset:641 Expected:4 Got:2
62559 Shift 0, v:2 sv:2 min:2 cur:2
62560 okIn Range Block::1 bo:130 Volume offset:642 Expected:4 Got:2
62561 Shift 0, v:2 sv:2 min:2 cur:2
62562 In Range Block::1 bo:131 Volume offset:643 Expected:4 Got:2
62563 Shift 0, v:2 sv:2 min:2 cur:2
62564 In Range Block::1 bo:132 Volume offset:644 Expected:4 Got:2
62565 Shift 0, v:2 sv:2 min:2 cur:2
62566 In Range Block::1 bo:133 Volume offset:645 Expected:4 Got:2
62567 Shift 0, v:2 sv:2 min:2 cur:2
62568 In Range Block::1 bo:134 Volume offset:646 Expected:4 Got:2
62569 Shift 0, v:2 sv:2 min:2 cur:2
62570 In Range Block::1 bo:135 Volume offset:647 Expected:4 Got:2
62571 Shift 0, v:2 sv:2 min:2 cur:2
62572 In Range Block::1 bo:136 Volume offset:648 Expected:4 Got:2
62573 Shift 0, v:2 sv:2 min:2 cur:2
62574 In Range Block::1 bo:137 Volume offset:649 Expected:4 Got:2
62575 Shift 0, v:2 sv:2 min:2 cur:2
62576 In Range Block::1 bo:138 Volume offset:650 Expected:4 Got:2
62577 Shift 0, v:2 sv:2 min:2 cur:2
62578 In Range Block::1 bo:139 Volume offset:651 Expected:4 Got:2
62579 Shift 0, v:2 sv:2 min:2 cur:2
62580 In Range Block::1 bo:140 Volume offset:652 Expected:4 Got:2
62581 Shift 0, v:2 sv:2 min:2 cur:2
62582 In Range Block::1 bo:141 Volume offset:653 Expected:4 Got:2
62583 Shift 0, v:2 sv:2 min:2 cur:2
62584 In Range Block::1 bo:142 Volume offset:654 Expected:4 Got:2
62585 
62586 Shift 0, v:2 sv:2 min:2 cur:2
62587 In Range Block::1 bo:143 Volume offset:655 Expected:4 Got:2
62588 Shift 0, v:2 sv:2 min:2 cur:2
62589 In Range Block::1 bo:144 Volume offset:656 Expected:4 Got:2
62590 Shift 0, v:2 sv:2 min:2 cur:2
62591 In Range Block::1 bo:145 Volume offset:657 Expected:4 Got:2
62592 Shift 0, v:2 sv:2 min:2 cur:2
62593 In Range Block::1 bo:146 Volume offset:658 Expected:4 Got:2
62594 Shift 0, v:2 sv:2 min:2 cur:2
62595 In Range Block::1 bo:147 Volume offset:659 Expected:4 Got:2
62596 Shift 0, v:2 sv:2 min:2 cur:2
62597 In Range Block::1 bo:148 Volume offset:660 Expected:4 Got:2
62598 Shift 0, v:2 sv:2 min:2 cur:2
62599 In Range Block::1 bo:149 Volume offset:661 Expected:4 Got:2
62600 Shift 0, v:2 sv:2 min:2 cur:2
62601 In Range Block::1 bo:150 Volume offset:662 Expected:4 Got:2
62602 Shift 0, v:2 sv:2 min:2 cur:2
62603 In Range Block::1 bo:151 Volume offset:663 Expected:4 Got:2
62604 Shift 0, v:2 sv:2 min:2 cur:2
62605 In Range Block::1 bo:152 Volume offset:664 Expected:4 Got:2
62606 Shift 0, v:2 sv:2 min:2 cur:2
62607 In Range Block::1 bo:153 Volume offset:665 Expected:4 Got:2
62608 Shift 0, v:2 sv:2 min:2 cur:2
62609 In Range Block::1 bo:154 Volume offset:666 Expected:4 Got:2
62610 Shift 0, v:2 sv:2 min:2 cur:2
62611 In Range Block::1 bo:155 Volume offset:667 Expected:4 Got:2
62612 Shift 0, v:2 sv:2 min:2 cur:2
62613 In Range Block::1 bo:156 Volume offset:668 Expected:4 Got:2
62614 Shift 0, v:2 sv:2 min:2 cur:2
62615 In Range Block::1 bo:157 Volume offset:669 Expected:4 Got:2
62616 Shift 0, v:2 sv:2 min:2 cur:2
62617 In Range Block::1 bo:158 Volume offset:670 Expected:4 Got:2
62618 Shift 0, v:2 sv:2 min:2 cur:2
62619 In Range Block::1 bo:159 Volume offset:671 Expected:4 Got:2
62620 Shift 0, v:2 sv:2 min:2 cur:2
62621 test test::test_wl_empty ... In Range Block::1 bo:160 Volume offset:672 Expected:4 Got:2
62622 Shift 0, v:2 sv:2 min:2 cur:2
62623 In Range Block::1 bo:161 Volume offset:673 Expected:4 Got:2
62624 Shift 0, v:2 sv:2 min:2 cur:2
62625 In Range Block::1 bo:162 Volume offset:674 Expected:4 Got:2
62626 Shift 0, v:2 sv:2 min:2 cur:2
62627 In Range Block::1 bo:163 Volume offset:675 Expected:4 Got:2
62628 Shift 0, v:2 sv:2 min:2 cur:2
62629 In Range Block::1 bo:164 Volume offset:676 Expected:4 Got:2
62630 Shift 0, v:2 sv:2 min:2 cur:2
62631 In Range Block::1 bo:165 Volume offset:677 Expected:4 Got:2
62632 Shift 0, v:2 sv:2 min:2 cur:2
62633 In Range Block::1 bo:166 Volume offset:678 Expected:4 Got:2
62634 Shift 0, v:2 sv:2 min:2 cur:2
62635 In Range Block::1 bo:167 Volume offset:679 Expected:4 Got:2
62636 Shift 0, v:2 sv:2 min:2 cur:2
62637 In Range Block::1 bo:168 Volume offset:680 Expected:4 Got:2
62638 Shift 0, v:2 sv:2 min:2 cur:2
62639 In Range Block::1 bo:169 Volume offset:681 Expected:4 Got:2
62640 Shift 0, v:2 sv:2 min:2 cur:2
62641 In Range Block::1 bo:170 Volume offset:682 Expected:4 Got:2
62642 Shift 0, v:2 sv:2 min:2 cur:2
62643 In Range Block::1 bo:171 Volume offset:683 Expected:4 Got:2
62644 Shift 0, v:2 sv:2 min:2 cur:2
62645 In Range Block::1 bo:172 Volume offset:684 Expected:4 Got:2
62646 Shift 0, v:2 sv:2 min:2 cur:2
62647 In Range Block::1 bo:173 Volume offset:685 Expected:4 Got:2
62648 Shift 0, v:2 sv:2 min:2 cur:2
62649 In Range Block::1 bo:174 Volume offset:686 Expected:4 Got:2
62650 Shift 0, v:2 sv:2 min:2 cur:2
62651 In Range Block::1 bo:175 Volume offset:687 Expected:4 Got:2
62652 Shift 0, v:2 sv:2 min:2 cur:2
62653 In Range Block::1 bo:176 Volume offset:688 Expected:4 Got:2
62654 Shift 0, v:2 sv:2 min:2 cur:2
62655 In Range Block::1 bo:177 Volume offset:689 Expected:4 Got:2
62656 Shift 0, v:2 sv:2 min:2 cur:2
62657 In Range Block::1 bo:178 Volume offset:690 Expected:4 Got:2
62658 Shift 0, v:2 sv:2 min:2 cur:2
62659 In Range Block::1 bo:179 Volume offset:691 Expected:4 Got:2
62660 Shift 0, v:2 sv:2 min:2 cur:2
62661 In Range Block::1 bo:180 Volume offset:692 Expected:4 Got:2
62662 Shift 0, v:2 sv:2 min:2 cur:2
62663 In Range Block::1 bo:181 Volume offset:693 Expected:4 Got:2
62664 Shift 0, v:2 sv:2 min:2 cur:2
62665 In Range Block::1 bo:182 Volume offset:694 Expected:4 Got:2
62666 Shift 0, v:2 sv:2 min:2 cur:2
62667 In Range Block::1 bo:183 Volume offset:695 Expected:4 Got:2
62668 Shift 0, v:2 sv:2 min:2 cur:2
62669 In Range Block::1 bo:184 Volume offset:696 Expected:4 Got:2
62670 Shift 0, v:2 sv:2 min:2 cur:2
62671 In Range Block::1 bo:185 Volume offset:697 Expected:4 Got:2
62672 Shift 0, v:2 sv:2 min:2 cur:2
62673 In Range Block::1 bo:186 Volume offset:698 Expected:4 Got:2
62674 Shift 0, v:2 sv:2 min:2 cur:2
62675 In Range Block::1 bo:187 Volume offset:699 Expected:4 Got:2
62676 Shift 0, v:2 sv:2 min:2 cur:2
62677 In Range Block::1 bo:188 Volume offset:700 Expected:4 Got:2
62678 Shift 0, v:2 sv:2 min:2 cur:2
62679 In Range Block::1 bo:189 Volume offset:701 Expected:4 Got:2
62680 Shift 0, v:2 sv:2 min:2 cur:2
62681 In Range Block::1 bo:190 Volume offset:702 Expected:4 Got:2
62682 Shift 0, v:2 sv:2 min:2 cur:2
62683 In Range Block::1 bo:191 Volume offset:703 Expected:4 Got:2
62684 Shift 0, v:2 sv:2 min:2 cur:2
62685 In Range Block::1 bo:192 Volume offset:704 Expected:4 Got:2
62686 Shift 0, v:2 sv:2 min:2 cur:2
62687 In Range Block::1 bo:193 Volume offset:705 Expected:4 Got:2
62688 Shift 0, v:2 sv:2 min:2 cur:2
62689 In Range Block::1 bo:194 Volume offset:706 Expected:4 Got:2
62690 Shift 0, v:2 sv:2 min:2 cur:2
62691 In Range Block::1 bo:195 Volume offset:707 Expected:4 Got:2
62692 Shift 0, v:2 sv:2 min:2 cur:2
62693 In Range Block::1 bo:196 Volume offset:708 Expected:4 Got:2
62694 Shift 0, v:2 sv:2 min:2 cur:2
62695 In Range Block::1 bo:197 Volume offset:709 Expected:4 Got:2
62696 Shift 0, v:2 sv:2 min:2 cur:2
62697 In Range Block::1 bo:198 Volume offset:710 Expected:4 Got:2
62698 Shift 0, v:2 sv:2 min:2 cur:2
62699 In Range Block::1 bo:199 Volume offset:711 Expected:4 Got:2
62700 Shift 0, v:2 sv:2 min:2 cur:2
62701 In Range Block::1 bo:200 Volume offset:712 Expected:4 Got:2
62702 Shift 0, v:2 sv:2 min:2 cur:2
62703 In Range Block::1 bo:201 Volume offset:713 Expected:4 Got:2
62704 Shift 0, v:2 sv:2 min:2 cur:2
62705 In Range Block::1 bo:202 Volume offset:714 Expected:4 Got:2
62706 Shift 0, v:2 sv:2 min:2 cur:2
62707 In Range Block::1 bo:203 Volume offset:715 Expected:4 Got:2
62708 Shift 0, v:2 sv:2 min:2 cur:2
62709 In Range Block::1 bo:204 Volume offset:716 Expected:4 Got:2
62710 Shift 0, v:2 sv:2 min:2 cur:2
62711 In Range Block::1 bo:205 Volume offset:717 Expected:4 Got:2
62712 Shift 0, v:2 sv:2 min:2 cur:2
62713 In Range Block::1 bo:206 Volume offset:718 Expected:4 Got:2
62714 Shift 0, v:2 sv:2 min:2 cur:2
62715 In Range Block::1 bo:207 Volume offset:719 Expected:4 Got:2
62716 Shift 0, v:2 sv:2 min:2 cur:2
62717 In Range Block::1 bo:208 Volume offset:720 Expected:4 Got:2
62718 Shift 0, v:2 sv:2 min:2 cur:2
62719 In Range Block::1 bo:209 Volume offset:721 Expected:4 Got:2
62720 Shift 0, v:2 sv:2 min:2 cur:2
62721 In Range Block::1 bo:210 Volume offset:722 Expected:4 Got:2
62722 Shift 0, v:2 sv:2 min:2 cur:2
62723 In Range Block::1 bo:211 Volume offset:723 Expected:4 Got:2
62724 Shift 0, v:2 sv:2 min:2 cur:2
62725 In Range Block::1 bo:212 Volume offset:724 Expected:4 Got:2
62726 Shift 0, v:2 sv:2 min:2 cur:2
62727 In Range Block::1 bo:213 Volume offset:725 Expected:4 Got:2
62728 Shift 0, v:2 sv:2 min:2 cur:2
62729 In Range Block::1 bo:214 Volume offset:726 Expected:4 Got:2
62730 Shift 0, v:2 sv:2 min:2 cur:2
62731 In Range Block::1 bo:215 Volume offset:727 Expected:4 Got:2
62732 Shift 0, v:2 sv:2 min:2 cur:2
62733 In Range Block::1 bo:216 Volume offset:728 Expected:4 Got:2
62734 Shift 0, v:2 sv:2 min:2 cur:2
62735 In Range Block::1 bo:217 Volume offset:729 Expected:4 Got:2
62736 Shift 0, v:2 sv:2 min:2 cur:2
62737 In Range Block::1 bo:218 Volume offset:730 Expected:4 Got:2
62738 Shift 0, v:2 sv:2 min:2 cur:2
62739 In Range Block::1 bo:219 Volume offset:731 Expected:4 Got:2
62740 Shift 0, v:2 sv:2 min:2 cur:2
62741 In Range Block::1 bo:220 Volume offset:732 Expected:4 Got:2
62742 Shift 0, v:2 sv:2 min:2 cur:2
62743 In Range Block::1 bo:221 Volume offset:733 Expected:4 Got:2
62744 Shift 0, v:2 sv:2 min:2 cur:2
62745 In Range Block::1 bo:222 Volume offset:734 Expected:4 Got:2
62746 Shift 0, v:2 sv:2 min:2 cur:2
62747 In Range Block::1 bo:223 Volume offset:735 Expected:4 Got:2
62748 Shift 0, v:2 sv:2 min:2 cur:2
62749 In Range Block::1 bo:224 Volume offset:736 Expected:4 Got:2
62750 Shift 0, v:2 sv:2 min:2 cur:2
62751 In Range Block::1 bo:225 Volume offset:737 Expected:4 Got:2
62752 Shift 0, v:2 sv:2 min:2 cur:2
62753 In Range Block::1 bo:226 Volume offset:738 Expected:4 Got:2
62754 Shift 0, v:2 sv:2 min:2 cur:2
62755 In Range Block::1 bo:227 Volume offset:739 Expected:4 Got:2
62756 Shift 0, v:2 sv:2 min:2 cur:2
62757 In Range Block::1 bo:228 Volume offset:740 Expected:4 Got:2
62758 Shift 0, v:2 sv:2 min:2 cur:2
62759 In Range Block::1 bo:229 Volume offset:741 Expected:4 Got:2
62760 Shift 0, v:2 sv:2 min:2 cur:2
62761 In Range Block::1 bo:230 Volume offset:742 Expected:4 Got:2
62762 Shift 0, v:2 sv:2 min:2 cur:2
62763 In Range Block::1 bo:231 Volume offset:743 Expected:4 Got:2
62764 Shift 0, v:2 sv:2 min:2 cur:2
62765 In Range Block::1 bo:232 Volume offset:744 Expected:4 Got:2
62766 Shift 0, v:2 sv:2 min:2 cur:2
62767 In Range Block::1 bo:233 Volume offset:745 Expected:4 Got:2
62768 Shift 0, v:2 sv:2 min:2 cur:2
62769 In Range Block::1 bo:234 Volume offset:746 Expected:4 Got:2
62770 Shift 0, v:2 sv:2 min:2 cur:2
62771 In Range Block::1 bo:235 Volume offset:747 Expected:4 Got:2
62772 Shift 0, v:2 sv:2 min:2 cur:2
62773 In Range Block::1 bo:236 Volume offset:748 Expected:4 Got:2
62774 Shift 0, v:2 sv:2 min:2 cur:2
62775 In Range Block::1 bo:237 Volume offset:749 Expected:4 Got:2
62776 Shift 0, v:2 sv:2 min:2 cur:2
62777 In Range Block::1 bo:238 Volume offset:750 Expected:4 Got:2
62778 Shift 0, v:2 sv:2 min:2 cur:2
62779 In Range Block::1 bo:239 Volume offset:751 Expected:4 Got:2
62780 Shift 0, v:2 sv:2 min:2 cur:2
62781 In Range Block::1 bo:240 Volume offset:752 Expected:4 Got:2
62782 Shift 0, v:2 sv:2 min:2 cur:2
62783 In Range Block::1 bo:241 Volume offset:753 Expected:4 Got:2
62784 Shift 0, v:2 sv:2 min:2 cur:2
62785 In Range Block::1 bo:242 Volume offset:754 Expected:4 Got:2
62786 Shift 0, v:2 sv:2 min:2 cur:2
62787 In Range Block::1 bo:243 Volume offset:755 Expected:4 Got:2
62788 Shift 0, v:2 sv:2 min:2 cur:2
62789 In Range Block::1 bo:244 Volume offset:756 Expected:4 Got:2
62790 Shift 0, v:2 sv:2 min:2 cur:2
62791 In Range Block::1 bo:245 Volume offset:757 Expected:4 Got:2
62792 Shift 0, v:2 sv:2 min:2 cur:2
62793 In Range Block::1 bo:246 Volume offset:758 Expected:4 Got:2
62794 Shift 0, v:2 sv:2 min:2 cur:2
62795 In Range Block::1 bo:247 Volume offset:759 Expected:4 Got:2
62796 Shift 0, v:2 sv:2 min:2 cur:2
62797 In Range Block::1 bo:248 Volume offset:760 Expected:4 Got:2
62798 Shift 0, v:2 sv:2 min:2 cur:2
62799 In Range Block::1 bo:249 Volume offset:761 Expected:4 Got:2
62800 Shift 0, v:2 sv:2 min:2 cur:2
62801 In Range Block::1 bo:250 Volume offset:762 Expected:4 Got:2
62802 Shift 0, v:2 sv:2 min:2 cur:2
62803 In Range Block::1 bo:251 Volume offset:763 Expected:4 Got:2
62804 Shift 0, v:2 sv:2 min:2 cur:2
62805 In Range Block::1 bo:252 Volume offset:764 Expected:4 Got:2
62806 Shift 0, v:2 sv:2 min:2 cur:2
62807 In Range Block::1 bo:253 Volume offset:765 Expected:4 Got:2
62808 Shift 0, v:2 sv:2 min:2 cur:2
62809 In Range Block::1 bo:254 Volume offset:766 Expected:4 Got:2
62810 Shift 0, v:2 sv:2 min:2 cur:2
62811 In Range Block::1 bo:255 Volume offset:767 Expected:4 Got:2
62812 Shift 0, v:2 sv:2 min:2 cur:2
62813 In Range Block::1 bo:256 Volume offset:768 Expected:4 Got:2
62814 okShift 0, v:2 sv:2 min:2 cur:2
62815 In Range Block::1 bo:257 Volume offset:769 Expected:4 Got:2
62816 Shift 0, v:2 sv:2 min:2 cur:2
62817 In Range Block::1 bo:258 Volume offset:770 Expected:4 Got:2
62818 Shift 0, v:2 sv:2 min:2 cur:2
62819 In Range Block::1 bo:259 Volume offset:771 Expected:4 Got:2
62820 Shift 0, v:2 sv:2 min:2 cur:2
62821 In Range Block::1 bo:260 Volume offset:772 Expected:4 Got:2
62822 Shift 0, v:2 sv:2 min:2 cur:2
62823 In Range Block::1 bo:261 Volume offset:773 Expected:4 Got:2
62824 Shift 0, v:2 sv:2 min:2 cur:2
62825 In Range Block::1 bo:262 Volume offset:774 Expected:4 Got:2
62826 Shift 0, v:2 sv:2 min:2 cur:2
62827 In Range Block::1 bo:263 Volume offset:775 Expected:4 Got:2
62828 Shift 0, v:2 sv:2 min:2 cur:2
62829 In Range Block::1 bo:264 Volume offset:776 Expected:4 Got:2
62830 Shift 0, v:2 sv:2 min:2 cur:2
62831 In Range Block::1 bo:265 Volume offset:777 Expected:4 Got:2
62832 Shift 0, v:2 sv:2 min:2 cur:2
62833 In Range Block::1 bo:266 Volume offset:778 Expected:4 Got:2
62834 Shift 0, v:2 sv:2 min:2 cur:2
62835 In Range Block::1 bo:267 Volume offset:779 Expected:4 Got:2
62836 Shift 0, v:2 sv:2 min:2 cur:2
62837 In Range Block::1 bo:268 Volume offset:780 Expected:4 Got:2
62838 Shift 0, v:2 sv:2 min:2 cur:2
62839 In Range Block::1 bo:269 Volume offset:781 Expected:4 Got:2
62840 Shift 0, v:2 sv:2 min:2 cur:2
62841 In Range Block::1 bo:270 Volume offset:782 Expected:4 Got:2
62842 Shift 0, v:2 sv:2 min:2 cur:2
62843 In Range Block::1 bo:271 Volume offset:783 Expected:4 Got:2
62844 Shift 0, v:2 sv:2 min:2 cur:2
62845 In Range Block::1 bo:272 Volume offset:784 Expected:4 Got:2
62846 Shift 0, v:2 sv:2 min:2 cur:2
62847 In Range Block::1 bo:273 Volume offset:785 Expected:4 Got:2
62848 Shift 0, v:2 sv:2 min:2 cur:2
62849 In Range Block::1 bo:274 Volume offset:786 Expected:4 Got:2
62850 Shift 0, v:2 sv:2 min:2 cur:2
62851 In Range Block::1 bo:275 Volume offset:787 Expected:4 Got:2
62852 Shift 0, v:2 sv:2 min:2 cur:2
62853 In Range Block::1 bo:276 Volume offset:788 Expected:4 Got:2
62854 Shift 0, v:2 sv:2 min:2 cur:2
62855 In Range Block::1 bo:277 Volume offset:789 Expected:4 Got:2
62856 Shift 0, v:2 sv:2 min:2 cur:2
62857 In Range Block::1 bo:278 Volume offset:790 Expected:4 Got:2
62858 Shift 0, v:2 sv:2 min:2 cur:2
62859 In Range Block::1 bo:279 Volume offset:791 Expected:4 Got:2
62860 Shift 0, v:2 sv:2 min:2 cur:2
62861 In Range Block::1 bo:280 Volume offset:792 Expected:4 Got:2
62862 Shift 0, v:2 sv:2 min:2 cur:2
62863 In Range Block::1 bo:281 Volume offset:793 Expected:4 Got:2
62864 Shift 0, v:2 sv:2 min:2 cur:2
62865 In Range Block::1 bo:282 Volume offset:794 Expected:4 Got:2
62866 Shift 0, v:2 sv:2 min:2 cur:2
62867 In Range Block::1 bo:283 Volume offset:795 Expected:4 Got:2
62868 Shift 0, v:2 sv:2 min:2 cur:2
62869 
62870 In Range Block::1 bo:284 Volume offset:796 Expected:4 Got:2
62871 Shift 0, v:2 sv:2 min:2 cur:2
62872 In Range Block::1 bo:285 Volume offset:797 Expected:4 Got:2
62873 Shift 0, v:2 sv:2 min:2 cur:2
62874 In Range Block::1 bo:286 Volume offset:798 Expected:4 Got:2
62875 Shift 0, v:2 sv:2 min:2 cur:2
62876 In Range Block::1 bo:287 Volume offset:799 Expected:4 Got:2
62877 Shift 0, v:2 sv:2 min:2 cur:2
62878 In Range Block::1 bo:288 Volume offset:800 Expected:4 Got:2
62879 Shift 0, v:2 sv:2 min:2 cur:2
62880 In Range Block::1 bo:289 Volume offset:801 Expected:4 Got:2
62881 Shift 0, v:2 sv:2 min:2 cur:2
62882 In Range Block::1 bo:290 Volume offset:802 Expected:4 Got:2
62883 Shift 0, v:2 sv:2 min:2 cur:2
62884 In Range Block::1 bo:291 Volume offset:803 Expected:4 Got:2
62885 Shift 0, v:2 sv:2 min:2 cur:2
62886 In Range Block::1 bo:292 Volume offset:804 Expected:4 Got:2
62887 Shift 0, v:2 sv:2 min:2 cur:2
62888 In Range Block::1 bo:293 Volume offset:805 Expected:4 Got:2
62889 Shift 0, v:2 sv:2 min:2 cur:2
62890 In Range Block::1 bo:294 Volume offset:806 Expected:4 Got:2
62891 Shift 0, v:2 sv:2 min:2 cur:2
62892 In Range Block::1 bo:295 Volume offset:807 Expected:4 Got:2
62893 Shift 0, v:2 sv:2 min:2 cur:2
62894 In Range Block::1 bo:296 Volume offset:808 Expected:4 Got:2
62895 Shift 0, v:2 sv:2 min:2 cur:2
62896 In Range Block::1 bo:297 Volume offset:809 Expected:4 Got:2
62897 Shift 0, v:2 sv:2 min:2 cur:2
62898 In Range Block::1 bo:298 Volume offset:810 Expected:4 Got:2
62899 Shift 0, v:2 sv:2 min:2 cur:2
62900 In Range Block::1 bo:299 Volume offset:811 Expected:4 Got:2
62901 Shift 0, v:2 sv:2 min:2 cur:2
62902 In Range Block::1 bo:300 Volume offset:812 Expected:4 Got:2
62903 Shift 0, v:2 sv:2 min:2 cur:2
62904 In Range Block::1 bo:301 Volume offset:813 Expected:4 Got:2
62905 Shift 0, v:2 sv:2 min:2 cur:2
62906 In Range Block::1 bo:302 Volume offset:814 Expected:4 Got:2
62907 Shift 0, v:2 sv:2 min:2 cur:2
62908 In Range Block::1 bo:303 Volume offset:815 Expected:4 Got:2
62909 Shift 0, v:2 sv:2 min:2 cur:2
62910 test test::test_wl_is_zero ... In Range Block::1 bo:304 Volume offset:816 Expected:4 Got:2
62911 Shift 0, v:2 sv:2 min:2 cur:2
62912 In Range Block::1 bo:305 Volume offset:817 Expected:4 Got:2
62913 Shift 0, v:2 sv:2 min:2 cur:2
62914 In Range Block::1 bo:306 Volume offset:818 Expected:4 Got:2
62915 Shift 0, v:2 sv:2 min:2 cur:2
62916 In Range Block::1 bo:307 Volume offset:819 Expected:4 Got:2
62917 Shift 0, v:2 sv:2 min:2 cur:2
62918 In Range Block::1 bo:308 Volume offset:820 Expected:4 Got:2
62919 Shift 0, v:2 sv:2 min:2 cur:2
62920 In Range Block::1 bo:309 Volume offset:821 Expected:4 Got:2
62921 Shift 0, v:2 sv:2 min:2 cur:2
62922 In Range Block::1 bo:310 Volume offset:822 Expected:4 Got:2
62923 Shift 0, v:2 sv:2 min:2 cur:2
62924 In Range Block::1 bo:311 Volume offset:823 Expected:4 Got:2
62925 Shift 0, v:2 sv:2 min:2 cur:2
62926 In Range Block::1 bo:312 Volume offset:824 Expected:4 Got:2
62927 Shift 0, v:2 sv:2 min:2 cur:2
62928 In Range Block::1 bo:313 Volume offset:825 Expected:4 Got:2
62929 Shift 0, v:2 sv:2 min:2 cur:2
62930 In Range Block::1 bo:314 Volume offset:826 Expected:4 Got:2
62931 Shift 0, v:2 sv:2 min:2 cur:2
62932 In Range Block::1 bo:315 Volume offset:827 Expected:4 Got:2
62933 Shift 0, v:2 sv:2 min:2 cur:2
62934 In Range Block::1 bo:316 Volume offset:828 Expected:4 Got:2
62935 Shift 0, v:2 sv:2 min:2 cur:2
62936 In Range Block::1 bo:317 Volume offset:829 Expected:4 Got:2
62937 Shift 0, v:2 sv:2 min:2 cur:2
62938 okIn Range Block::1 bo:318 Volume offset:830 Expected:4 Got:2
62939 Shift 0, v:2 sv:2 min:2 cur:2
62940 In Range Block::1 bo:319 Volume offset:831 Expected:4 Got:2
62941 Shift 0, v:2 sv:2 min:2 cur:2
62942 In Range Block::1 bo:320 Volume offset:832 Expected:4 Got:2
62943 Shift 0, v:2 sv:2 min:2 cur:2
62944 In Range Block::1 bo:321 Volume offset:833 Expected:4 Got:2
62945 Shift 0, v:2 sv:2 min:2 cur:2
62946 In Range Block::1 bo:322 Volume offset:834 Expected:4 Got:2
62947 Shift 0, v:2 sv:2 min:2 cur:2
62948 In Range Block::1 bo:323 Volume offset:835 Expected:4 Got:2
62949 Shift 0, v:2 sv:2 min:2 cur:2
62950 In Range Block::1 bo:324 Volume offset:836 Expected:4 Got:2
62951 Shift 0, v:2 sv:2 min:2 cur:2
62952 In Range Block::1 bo:325 Volume offset:837 Expected:4 Got:2
62953 Shift 0, v:2 sv:2 min:2 cur:2
62954 In Range Block::1 bo:326 Volume offset:838 Expected:4 Got:2
62955 Shift 0, v:2 sv:2 min:2 cur:2
62956 In Range Block::1 bo:327 Volume offset:839 Expected:4 Got:2
62957 Shift 0, v:2 sv:2 min:2 cur:2
62958 In Range Block::1 bo:328 Volume offset:840 Expected:4 Got:2
62959 Shift 0, v:2 sv:2 min:2 cur:2
62960 In Range Block::1 bo:329 Volume offset:841 Expected:4 Got:2
62961 Shift 0, v:2 sv:2 min:2 cur:2
62962 In Range Block::1 bo:330 Volume offset:842 Expected:4 Got:2
62963 Shift 0, v:2 sv:2 min:2 cur:2
62964 In Range Block::1 bo:331 Volume offset:843 Expected:4 Got:2
62965 Shift 0, v:2 sv:2 min:2 cur:2
62966 In Range Block::1 bo:332 Volume offset:844 Expected:4 Got:2
62967 Shift 0, v:2 sv:2 min:2 cur:2
62968 In Range Block::1 bo:333 Volume offset:845 Expected:4 Got:2
62969 Shift 0, v:2 sv:2 min:2 cur:2
62970 In Range Block::1 bo:334 Volume offset:846 Expected:4 Got:2
62971 Shift 0, v:2 sv:2 min:2 cur:2
62972 In Range Block::1 bo:335 Volume offset:847 Expected:4 Got:2
62973 Shift 0, v:2 sv:2 min:2 cur:2
62974 In Range Block::1 bo:336 Volume offset:848 Expected:4 Got:2
62975 Shift 0, v:2 sv:2 min:2 cur:2
62976 In Range Block::1 bo:337 Volume offset:849 Expected:4 Got:2
62977 Shift 0, v:2 sv:2 min:2 cur:2
62978 In Range Block::1 bo:338 Volume offset:850 Expected:4 Got:2
62979 Shift 0, v:2 sv:2 min:2 cur:2
62980 In Range Block::1 bo:339 Volume offset:851 Expected:4 Got:2
62981 Shift 0, v:2 sv:2 min:2 cur:2
62982 In Range Block::1 bo:340 Volume offset:852 Expected:4 Got:2
62983 Shift 0, v:2 sv:2 min:2 cur:2
62984 In Range Block::1 bo:341 Volume offset:853 Expected:4 Got:2
62985 Shift 0, v:2 sv:2 min:2 cur:2
62986 In Range Block::1 bo:342 Volume offset:854 Expected:4 Got:2
62987 Shift 0, v:2 sv:2 min:2 cur:2
62988 In Range Block::1 bo:343 Volume offset:855 Expected:4 Got:2
62989 Shift 0, v:2 sv:2 min:2 cur:2
62990 In Range Block::1 bo:344 Volume offset:856 Expected:4 Got:2
62991 Shift 0, v:2 sv:2 min:2 cur:2
62992 In Range Block::1 bo:345 Volume offset:857 Expected:4 Got:2
62993 Shift 0, v:2 sv:2 min:2 cur:2
62994 In Range Block::1 bo:346 Volume offset:858 Expected:4 Got:2
62995 Shift 0, v:2 sv:2 min:2 cur:2
62996 In Range Block::1 bo:347 Volume offset:859 Expected:4 Got:2
62997 Shift 0, v:2 sv:2 min:2 cur:2
62998 In Range Block::1 bo:348 Volume offset:860 Expected:4 Got:2
62999 Shift 0, v:2 sv:2 min:2 cur:2
63000 In Range Block::1 bo:349 Volume offset:861 Expected:4 Got:2
63001 Shift 0, v:2 sv:2 min:2 cur:2
63002 In Range Block::1 bo:350 Volume offset:862 Expected:4 Got:2
63003 Shift 0, v:2 sv:2 min:2 cur:2
63004 In Range Block::1 bo:351 Volume offset:863 Expected:4 Got:2
63005 Shift 0, v:2 sv:2 min:2 cur:2
63006 In Range Block::1 bo:352 Volume offset:864 Expected:4 Got:2
63007 Shift 0, v:2 sv:2 min:2 cur:2
63008 In Range Block::1 bo:353 Volume offset:865 Expected:4 Got:2
63009 Shift 0, v:2 sv:2 min:2 cur:2
63010 In Range Block::1 bo:354 Volume offset:866 Expected:4 Got:2
63011 Shift 0, v:2 sv:2 min:2 cur:2
63012 In Range Block::1 bo:355 Volume offset:867 Expected:4 Got:2
63013 Shift 0, v:2 sv:2 min:2 cur:2
63014 In Range Block::1 bo:356 Volume offset:868 Expected:4 Got:2
63015 Shift 0, v:2 sv:2 min:2 cur:2
63016 In Range Block::1 bo:357 Volume offset:869 Expected:4 Got:2
63017 Shift 0, v:2 sv:2 min:2 cur:2
63018 In Range Block::1 bo:358 Volume offset:870 Expected:4 Got:2
63019 Shift 0, v:2 sv:2 min:2 cur:2
63020 In Range Block::1 bo:359 Volume offset:871 Expected:4 Got:2
63021 Shift 0, v:2 sv:2 min:2 cur:2
63022 In Range Block::1 bo:360 Volume offset:872 Expected:4 Got:2
63023 Shift 0, v:2 sv:2 min:2 cur:2
63024 In Range Block::1 bo:361 Volume offset:873 Expected:4 Got:2
63025 Shift 0, v:2 sv:2 min:2 cur:2
63026 In Range Block::1 bo:362 Volume offset:874 Expected:4 Got:2
63027 Shift 0, v:2 sv:2 min:2 cur:2
63028 In Range Block::1 bo:363 Volume offset:875 Expected:4 Got:2
63029 Shift 0, v:2 sv:2 min:2 cur:2
63030 In Range Block::1 bo:364 Volume offset:876 Expected:4 Got:2
63031 Shift 0, v:2 sv:2 min:2 cur:2
63032 In Range Block::1 bo:365 Volume offset:877 Expected:4 Got:2
63033 Shift 0, v:2 sv:2 min:2 cur:2
63034 In Range Block::1 bo:366 Volume offset:878 Expected:4 Got:2
63035 Shift 0, v:2 sv:2 min:2 cur:2
63036 In Range Block::1 bo:367 Volume offset:879 Expected:4 Got:2
63037 Shift 0, v:2 sv:2 min:2 cur:2
63038 In Range Block::1 bo:368 Volume offset:880 Expected:4 Got:2
63039 Shift 0, v:2 sv:2 min:2 cur:2
63040 In Range Block::1 bo:369 Volume offset:881 Expected:4 Got:2
63041 Shift 0, v:2 sv:2 min:2 cur:2
63042 In Range Block::1 bo:370 Volume offset:882 Expected:4 Got:2
63043 Shift 0, v:2 sv:2 min:2 cur:2
63044 In Range Block::1 bo:371 Volume offset:883 Expected:4 Got:2
63045 Shift 0, v:2 sv:2 min:2 cur:2
63046 In Range Block::1 bo:372 Volume offset:884 Expected:4 Got:2
63047 Shift 0, v:2 sv:2 min:2 cur:2
63048 In Range Block::1 bo:373 Volume offset:885 Expected:4 Got:2
63049 Shift 0, v:2 sv:2 min:2 cur:2
63050 In Range Block::1 bo:374 Volume offset:886 Expected:4 Got:2
63051 Shift 0, v:2 sv:2 min:2 cur:2
63052 In Range Block::1 bo:375 Volume offset:887 Expected:4 Got:2
63053 Shift 0, v:2 sv:2 min:2 cur:2
63054 In Range Block::1 bo:376 Volume offset:888 Expected:4 Got:2
63055 Shift 0, v:2 sv:2 min:2 cur:2
63056 In Range Block::1 bo:377 Volume offset:889 Expected:4 Got:2
63057 Shift 0, v:2 sv:2 min:2 cur:2
63058 In Range Block::1 bo:378 Volume offset:890 Expected:4 Got:2
63059 Shift 0, v:2 sv:2 min:2 cur:2
63060 In Range Block::1 bo:379 Volume offset:891 Expected:4 Got:2
63061 Shift 0, v:2 sv:2 min:2 cur:2
63062 In Range Block::1 bo:380 Volume offset:892 Expected:4 Got:2
63063 Shift 0, v:2 sv:2 min:2 cur:2
63064 In Range Block::1 bo:381 Volume offset:893 Expected:4 Got:2
63065 Shift 0, v:2 sv:2 min:2 cur:2
63066 In Range Block::1 bo:382 Volume offset:894 Expected:4 Got:2
63067 Shift 0, v:2 sv:2 min:2 cur:2
63068 In Range Block::1 bo:383 Volume offset:895 Expected:4 Got:2
63069 Shift 0, v:2 sv:2 min:2 cur:2
63070 
63071 In Range Block::1 bo:384 Volume offset:896 Expected:4 Got:2
63072 Shift 0, v:2 sv:2 min:2 cur:2
63073 In Range Block::1 bo:385 Volume offset:897 Expected:4 Got:2
63074 Shift 0, v:2 sv:2 min:2 cur:2
63075 In Range Block::1 bo:386 Volume offset:898 Expected:4 Got:2
63076 Shift 0, v:2 sv:2 min:2 cur:2
63077 In Range Block::1 bo:387 Volume offset:899 Expected:4 Got:2
63078 Shift 0, v:2 sv:2 min:2 cur:2
63079 In Range Block::1 bo:388 Volume offset:900 Expected:4 Got:2
63080 Shift 0, v:2 sv:2 min:2 cur:2
63081 In Range Block::1 bo:389 Volume offset:901 Expected:4 Got:2
63082 Shift 0, v:2 sv:2 min:2 cur:2
63083 In Range Block::1 bo:390 Volume offset:902 Expected:4 Got:2
63084 Shift 0, v:2 sv:2 min:2 cur:2
63085 In Range Block::1 bo:391 Volume offset:903 Expected:4 Got:2
63086 Shift 0, v:2 sv:2 min:2 cur:2
63087 In Range Block::1 bo:392 Volume offset:904 Expected:4 Got:2
63088 Shift 0, v:2 sv:2 min:2 cur:2
63089 In Range Block::1 bo:393 Volume offset:905 Expected:4 Got:2
63090 Shift 0, v:2 sv:2 min:2 cur:2
63091 In Range Block::1 bo:394 Volume offset:906 Expected:4 Got:2
63092 Shift 0, v:2 sv:2 min:2 cur:2
63093 In Range Block::1 bo:395 Volume offset:907 Expected:4 Got:2
63094 Shift 0, v:2 sv:2 min:2 cur:2
63095 In Range Block::1 bo:396 Volume offset:908 Expected:4 Got:2
63096 Shift 0, v:2 sv:2 min:2 cur:2
63097 In Range Block::1 bo:397 Volume offset:909 Expected:4 Got:2
63098 Shift 0, v:2 sv:2 min:2 cur:2
63099 In Range Block::1 bo:398 Volume offset:910 Expected:4 Got:2
63100 Shift 0, v:2 sv:2 min:2 cur:2
63101 In Range Block::1 bo:399 Volume offset:911 Expected:4 Got:2
63102 Shift 0, v:2 sv:2 min:2 cur:2
63103 In Range Block::1 bo:400 Volume offset:912 Expected:4 Got:2
63104 Shift 0, v:2 sv:2 min:2 cur:2
63105 In Range Block::1 bo:401 Volume offset:913 Expected:4 Got:2
63106 Shift 0, v:2 sv:2 min:2 cur:2
63107 In Range Block::1 bo:402 Volume offset:914 Expected:4 Got:2
63108 Shift 0, v:2 sv:2 min:2 cur:2
63109 In Range Block::1 bo:403 Volume offset:915 Expected:4 Got:2
63110 Shift 0, v:2 sv:2 min:2 cur:2
63111 In Range Block::1 bo:404 Volume offset:916 Expected:4 Got:2
63112 Shift 0, v:2 sv:2 min:2 cur:2
63113 In Range Block::1 bo:405 Volume offset:917 Expected:4 Got:2
63114 Shift 0, v:2 sv:2 min:2 cur:2
63115 In Range Block::1 bo:406 Volume offset:918 Expected:4 Got:2
63116 Shift 0, v:2 sv:2 min:2 cur:2
63117 In Range Block::1 bo:407 Volume offset:919 Expected:4 Got:2
63118 Shift 0, v:2 sv:2 min:2 cur:2
63119 In Range Block::1 bo:408 Volume offset:920 Expected:4 Got:2
63120 Shift 0, v:2 sv:2 min:2 cur:2
63121 In Range Block::1 bo:409 Volume offset:921 Expected:4 Got:2
63122 Shift 0, v:2 sv:2 min:2 cur:2
63123 In Range Block::1 bo:410 Volume offset:922 Expected:4 Got:2
63124 Shift 0, v:2 sv:2 min:2 cur:2
63125 In Range Block::1 bo:411 Volume offset:923 Expected:4 Got:2
63126 Shift 0, v:2 sv:2 min:2 cur:2
63127 In Range Block::1 bo:412 Volume offset:924 Expected:4 Got:2
63128 Shift 0, v:2 sv:2 min:2 cur:2
63129 In Range Block::1 bo:413 Volume offset:925 Expected:4 Got:2
63130 Shift 0, v:2 sv:2 min:2 cur:2
63131 In Range Block::1 bo:414 Volume offset:926 Expected:4 Got:2
63132 Shift 0, v:2 sv:2 min:2 cur:2
63133 In Range Block::1 bo:415 Volume offset:927 Expected:4 Got:2
63134 Shift 0, v:2 sv:2 min:2 cur:2
63135 In Range Block::1 bo:416 Volume offset:928 Expected:4 Got:2
63136 Shift 0, v:2 sv:2 min:2 cur:2
63137 In Range Block::1 bo:417 Volume offset:929 Expected:4 Got:2
63138 Shift 0, v:2 sv:2 min:2 cur:2
63139 In Range Block::1 bo:418 Volume offset:930 Expected:4 Got:2
63140 Shift 0, v:2 sv:2 min:2 cur:2
63141 In Range Block::1 bo:419 Volume offset:931 Expected:4 Got:2
63142 Shift 0, v:2 sv:2 min:2 cur:2
63143 In Range Block::1 bo:420 Volume offset:932 Expected:4 Got:2
63144 Shift 0, v:2 sv:2 min:2 cur:2
63145 In Range Block::1 bo:421 Volume offset:933 Expected:4 Got:2
63146 Shift 0, v:2 sv:2 min:2 cur:2
63147 In Range Block::1 bo:422 Volume offset:934 Expected:4 Got:2
63148 Shift 0, v:2 sv:2 min:2 cur:2
63149 In Range Block::1 bo:423 Volume offset:935 Expected:4 Got:2
63150 Shift 0, v:2 sv:2 min:2 cur:2
63151 In Range Block::1 bo:424 Volume offset:936 Expected:4 Got:2
63152 Shift 0, v:2 sv:2 min:2 cur:2
63153 In Range Block::1 bo:425 Volume offset:937 Expected:4 Got:2
63154 Shift 0, v:2 sv:2 min:2 cur:2
63155 In Range Block::1 bo:426 Volume offset:938 Expected:4 Got:2
63156 Shift 0, v:2 sv:2 min:2 cur:2
63157 In Range Block::1 bo:427 Volume offset:939 Expected:4 Got:2
63158 Shift 0, v:2 sv:2 min:2 cur:2
63159 In Range Block::1 bo:428 Volume offset:940 Expected:4 Got:2
63160 Shift 0, v:2 sv:2 min:2 cur:2
63161 In Range Block::1 bo:429 Volume offset:941 Expected:4 Got:2
63162 Shift 0, v:2 sv:2 min:2 cur:2
63163 In Range Block::1 bo:430 Volume offset:942 Expected:4 Got:2
63164 Shift 0, v:2 sv:2 min:2 cur:2
63165 In Range Block::1 bo:431 Volume offset:943 Expected:4 Got:2
63166 Shift 0, v:2 sv:2 min:2 cur:2
63167 In Range Block::1 bo:432 Volume offset:944 Expected:4 Got:2
63168 Shift 0, v:2 sv:2 min:2 cur:2
63169 In Range Block::1 bo:433 Volume offset:945 Expected:4 Got:2
63170 Shift 0, v:2 sv:2 min:2 cur:2
63171 In Range Block::1 bo:434 Volume offset:946 Expected:4 Got:2
63172 Shift 0, v:2 sv:2 min:2 cur:2
63173 In Range Block::1 bo:435 Volume offset:947 Expected:4 Got:2
63174 Shift 0, v:2 sv:2 min:2 cur:2
63175 In Range Block::1 bo:436 Volume offset:948 Expected:4 Got:2
63176 Shift 0, v:2 sv:2 min:2 cur:2
63177 In Range Block::1 bo:437 Volume offset:949 Expected:4 Got:2
63178 Shift 0, v:2 sv:2 min:2 cur:2
63179 In Range Block::1 bo:438 Volume offset:950 Expected:4 Got:2
63180 Shift 0, v:2 sv:2 min:2 cur:2
63181 In Range Block::1 bo:439 Volume offset:951 Expected:4 Got:2
63182 Shift 0, v:2 sv:2 min:2 cur:2
63183 In Range Block::1 bo:440 Volume offset:952 Expected:4 Got:2
63184 Shift 0, v:2 sv:2 min:2 cur:2
63185 In Range Block::1 bo:441 Volume offset:953 Expected:4 Got:2
63186 Shift 0, v:2 sv:2 min:2 cur:2
63187 In Range Block::1 bo:442 Volume offset:954 Expected:4 Got:2
63188 Shift 0, v:2 sv:2 min:2 cur:2
63189 In Range Block::1 bo:443 Volume offset:955 Expected:4 Got:2
63190 Shift 0, v:2 sv:2 min:2 cur:2
63191 In Range Block::1 bo:444 Volume offset:956 Expected:4 Got:2
63192 Shift 0, v:2 sv:2 min:2 cur:2
63193 In Range Block::1 bo:445 Volume offset:957 Expected:4 Got:2
63194 Shift 0, v:2 sv:2 min:2 cur:2
63195 In Range Block::1 bo:446 Volume offset:958 Expected:4 Got:2
63196 Shift 0, v:2 sv:2 min:2 cur:2
63197 In Range Block::1 bo:447 Volume offset:959 Expected:4 Got:2
63198 Shift 0, v:2 sv:2 min:2 cur:2
63199 In Range Block::1 bo:448 Volume offset:960 Expected:4 Got:2
63200 Shift 0, v:2 sv:2 min:2 cur:2
63201 In Range Block::1 bo:449 Volume offset:961 Expected:4 Got:2
63202 Shift 0, v:2 sv:2 min:2 cur:2
63203 In Range Block::1 bo:450 Volume offset:962 Expected:4 Got:2
63204 Shift 0, v:2 sv:2 min:2 cur:2
63205 In Range Block::1 bo:451 Volume offset:963 Expected:4 Got:2
63206 Shift 0, v:2 sv:2 min:2 cur:2
63207 In Range Block::1 bo:452 Volume offset:964 Expected:4 Got:2
63208 Shift 0, v:2 sv:2 min:2 cur:2
63209 In Range Block::1 bo:453 Volume offset:965 Expected:4 Got:2
63210 Shift 0, v:2 sv:2 min:2 cur:2
63211 In Range Block::1 bo:454 Volume offset:966 Expected:4 Got:2
63212 Shift 0, v:2 sv:2 min:2 cur:2
63213 In Range Block::1 bo:455 Volume offset:967 Expected:4 Got:2
63214 Shift 0, v:2 sv:2 min:2 cur:2
63215 In Range Block::1 bo:456 Volume offset:968 Expected:4 Got:2
63216 Shift 0, v:2 sv:2 min:2 cur:2
63217 In Range Block::1 bo:457 Volume offset:969 Expected:4 Got:2
63218 test test::test_wl_set ... Shift 0, v:2 sv:2 min:2 cur:2
63219 In Range Block::1 bo:458 Volume offset:970 Expected:4 Got:2
63220 Shift 0, v:2 sv:2 min:2 cur:2
63221 In Range Block::1 bo:459 Volume offset:971 Expected:4 Got:2
63222 Shift 0, v:2 sv:2 min:2 cur:2
63223 In Range Block::1 bo:460 Volume offset:972 Expected:4 Got:2
63224 Shift 0, v:2 sv:2 min:2 cur:2
63225 In Range Block::1 bo:461 Volume offset:973 Expected:4 Got:2
63226 Shift 0, v:2 sv:2 min:2 cur:2
63227 In Range Block::1 bo:462 Volume offset:974 Expected:4 Got:2
63228 Shift 0, v:2 sv:2 min:2 cur:2
63229 In Range Block::1 bo:463 Volume offset:975 Expected:4 Got:2
63230 Shift 0, v:2 sv:2 min:2 cur:2
63231 In Range Block::1 bo:464 Volume offset:976 Expected:4 Got:2
63232 Shift 0, v:2 sv:2 min:2 cur:2
63233 In Range Block::1 bo:465 Volume offset:977 Expected:4 Got:2
63234 Shift 0, v:2 sv:2 min:2 cur:2
63235 In Range Block::1 bo:466 Volume offset:978 Expected:4 Got:2
63236 Shift 0, v:2 sv:2 min:2 cur:2
63237 In Range Block::1 bo:467 Volume offset:979 Expected:4 Got:2
63238 Shift 0, v:2 sv:2 min:2 cur:2
63239 In Range Block::1 bo:468 Volume offset:980 Expected:4 Got:2
63240 Shift 0, v:2 sv:2 min:2 cur:2
63241 In Range Block::1 bo:469 Volume offset:981 Expected:4 Got:2
63242 Shift 0, v:2 sv:2 min:2 cur:2
63243 In Range Block::1 bo:470 Volume offset:982 Expected:4 Got:2
63244 Shift 0, v:2 sv:2 min:2 cur:2
63245 In Range Block::1 bo:471 Volume offset:983 Expected:4 Got:2
63246 Shift 0, v:2 sv:2 min:2 cur:2
63247 In Range Block::1 bo:472 Volume offset:984 Expected:4 Got:2
63248 Shift 0, v:2 sv:2 min:2 cur:2
63249 In Range Block::1 bo:473 Volume offset:985 Expected:4 Got:2
63250 Shift 0, v:2 sv:2 min:2 cur:2
63251 In Range Block::1 bo:474 Volume offset:986 Expected:4 Got:2
63252 Shift 0, v:2 sv:2 min:2 cur:2
63253 In Range Block::1 bo:475 Volume offset:987 Expected:4 Got:2
63254 Shift 0, v:2 sv:2 min:2 cur:2
63255 In Range Block::1 bo:476 Volume offset:988 Expected:4 Got:2
63256 Shift 0, v:2 sv:2 min:2 cur:2
63257 In Range Block::1 bo:477 Volume offset:989 Expected:4 Got:2
63258 Shift 0, v:2 sv:2 min:2 cur:2
63259 In Range Block::1 bo:478 Volume offset:990 Expected:4 Got:2
63260 Shift 0, v:2 sv:2 min:2 cur:2
63261 In Range Block::1 bo:479 Volume offset:991 Expected:4 Got:2
63262 Shift 0, v:2 sv:2 min:2 cur:2
63263 In Range Block::1 bo:480 Volume offset:992 Expected:4 Got:2
63264 Shift 0, v:2 sv:2 min:2 cur:2
63265 In Range Block::1 bo:481 Volume offset:993 Expected:4 Got:2
63266 Shift 0, v:2 sv:2 min:2 cur:2
63267 In Range Block::1 bo:482 Volume offset:994 Expected:4 Got:2
63268 Shift 0, v:2 sv:2 min:2 cur:2
63269 In Range Block::1 bo:483 Volume offset:995 Expected:4 Got:2
63270 Shift 0, v:2 sv:2 min:2 cur:2
63271 In Range Block::1 bo:484 Volume offset:996 Expected:4 Got:2
63272 Shift 0, v:2 sv:2 min:2 cur:2
63273 In Range Block::1 bo:485 Volume offset:997 Expected:4 Got:2
63274 Shift 0, v:2 sv:2 min:2 cur:2
63275 In Range Block::1 bo:486 Volume offset:998 Expected:4 Got:2
63276 Shift 0, v:2 sv:2 min:2 cur:2
63277 In Range Block::1 bo:487 Volume offset:999 Expected:4 Got:2
63278 Shift 0, v:2 sv:2 min:2 cur:2
63279 In Range Block::1 bo:488 Volume offset:1000 Expected:4 Got:2
63280 Shift 0, v:2 sv:2 min:2 cur:2
63281 In Range Block::1 bo:489 Volume offset:1001 Expected:4 Got:2
63282 Shift 0, v:2 sv:2 min:2 cur:2
63283 In Range Block::1 bo:490 Volume offset:1002 Expected:4 Got:2
63284 Shift 0, v:2 sv:2 min:2 cur:2
63285 In Range Block::1 bo:491 Volume offset:1003 Expected:4 Got:2
63286 Shift 0, v:2 sv:2 min:2 cur:2
63287 In Range Block::1 bo:492 Volume offset:1004 Expected:4 Got:2
63288 Shift 0, v:2 sv:2 min:2 cur:2
63289 In Range Block::1 bo:493 Volume offset:1005 Expected:4 Got:2
63290 Shift 0, v:2 sv:2 min:2 cur:2
63291 In Range Block::1 bo:494 Volume offset:1006 Expected:4 Got:2
63292 Shift 0, v:2 sv:2 min:2 cur:2
63293 In Range Block::1 bo:495 Volume offset:1007 Expected:4 Got:2
63294 Shift 0, v:2 sv:2 min:2 cur:2
63295 In Range Block::1 bo:496 Volume offset:1008 Expected:4 Got:2
63296 Shift 0, v:2 sv:2 min:2 cur:2
63297 In Range Block::1 bo:497 Volume offset:1009 Expected:4 Got:2
63298 Shift 0, v:2 sv:2 min:2 cur:2
63299 In Range Block::1 bo:498 Volume offset:1010 Expected:4 Got:2
63300 Shift 0, v:2 sv:2 min:2 cur:2
63301 In Range Block::1 bo:499 Volume offset:1011 Expected:4 Got:2
63302 Shift 0, v:2 sv:2 min:2 cur:2
63303 In Range Block::1 bo:500 Volume offset:1012 Expected:4 Got:2
63304 Shift 0, v:2 sv:2 min:2 cur:2
63305 In Range Block::1 bo:501 Volume offset:1013 Expected:4 Got:2
63306 Shift 0, v:2 sv:2 min:2 cur:2
63307 In Range Block::1 bo:502 Volume offset:1014 Expected:4 Got:2
63308 Shift 0, v:2 sv:2 min:2 cur:2
63309 In Range Block::1 bo:503 Volume offset:1015 Expected:4 Got:2
63310 Shift 0, v:2 sv:2 min:2 cur:2
63311 In Range Block::1 bo:504 Volume offset:1016 Expected:4 Got:2
63312 Shift 0, v:2 sv:2 min:2 cur:2
63313 In Range Block::1 bo:505 Volume offset:1017 Expected:4 Got:2
63314 Shift 0, v:2 sv:2 min:2 cur:2
63315 In Range Block::1 bo:506 Volume offset:1018 Expected:4 Got:2
63316 Shift 0, v:2 sv:2 min:2 cur:2
63317 In Range Block::1 bo:507 Volume offset:1019 Expected:4 Got:2
63318 Shift 0, v:2 sv:2 min:2 cur:2
63319 In Range Block::1 bo:508 Volume offset:1020 Expected:4 Got:2
63320 Shift 0, v:2 sv:2 min:2 cur:2
63321 In Range Block::1 bo:509 Volume offset:1021 Expected:4 Got:2
63322 Shift 0, v:2 sv:2 min:2 cur:2
63323 In Range Block::1 bo:510 Volume offset:1022 Expected:4 Got:2
63324 okShift 0, v:2 sv:2 min:2 cur:2
63325 In Range Block::1 bo:511 Volume offset:1023 Expected:4 Got:2
63326 
63327 Shift 0, v:4 sv:4 min:2 cur:2
63328 Out of Range Block::1 bo:1 Volume offset:513 Expected:2 Got:4
63329 Shift 0, v:4 sv:4 min:2 cur:2
63330 Out of Range Block::1 bo:2 Volume offset:514 Expected:2 Got:4
63331 Shift 0, v:4 sv:4 min:2 cur:2
63332 Out of Range Block::1 bo:3 Volume offset:515 Expected:2 Got:4
63333 Shift 0, v:4 sv:4 min:2 cur:2
63334 Out of Range Block::1 bo:4 Volume offset:516 Expected:2 Got:4
63335 Shift 0, v:4 sv:4 min:2 cur:2
63336 Out of Range Block::1 bo:5 Volume offset:517 Expected:2 Got:4
63337 Shift 0, v:4 sv:4 min:2 cur:2
63338 Out of Range Block::1 bo:6 Volume offset:518 Expected:2 Got:4
63339 Shift 0, v:4 sv:4 min:2 cur:2
63340 Out of Range Block::1 bo:7 Volume offset:519 Expected:2 Got:4
63341 Shift 0, v:4 sv:4 min:2 cur:2
63342 Out of Range Block::1 bo:8 Volume offset:520 Expected:2 Got:4
63343 Shift 0, v:4 sv:4 min:2 cur:2
63344 Out of Range Block::1 bo:9 Volume offset:521 Expected:2 Got:4
63345 Shift 0, v:4 sv:4 min:2 cur:2
63346 Out of Range Block::1 bo:10 Volume offset:522 Expected:2 Got:4
63347 test test::test_wl_update ... Shift 0, v:4 sv:4 min:2 cur:2
63348 Out of Range Block::1 bo:11 Volume offset:523 Expected:2 Got:4
63349 Shift 0, v:4 sv:4 min:2 cur:2
63350 Out of Range Block::1 bo:12 Volume offset:524 Expected:2 Got:4
63351 Shift 0, v:4 sv:4 min:2 cur:2
63352 Out of Range Block::1 bo:13 Volume offset:525 Expected:2 Got:4
63353 Shift 0, v:4 sv:4 min:2 cur:2
63354 Out of Range Block::1 bo:14 Volume offset:526 Expected:2 Got:4
63355 Shift 0, v:4 sv:4 min:2 cur:2
63356 Out of Range Block::1 bo:15 Volume offset:527 Expected:2 Got:4
63357 Shift 0, v:4 sv:4 min:2 cur:2
63358 Out of Range Block::1 bo:16 Volume offset:528 Expected:2 Got:4
63359 Shift 0, v:4 sv:4 min:2 cur:2
63360 Out of Range Block::1 bo:17 Volume offset:529 Expected:2 Got:4
63361 Shift 0, v:4 sv:4 min:2 cur:2
63362 Out of Range Block::1 bo:18 Volume offset:530 Expected:2 Got:4
63363 Shift 0, v:4 sv:4 min:2 cur:2
63364 Out of Range Block::1 bo:19 Volume offset:531 Expected:2 Got:4
63365 Shift 0, v:4 sv:4 min:2 cur:2
63366 Out of Range Block::1 bo:20 Volume offset:532 Expected:2 Got:4
63367 Shift 0, v:4 sv:4 min:2 cur:2
63368 Out of Range Block::1 bo:21 Volume offset:533 Expected:2 Got:4
63369 Shift 0, v:4 sv:4 min:2 cur:2
63370 Out of Range Block::1 bo:22 Volume offset:534 Expected:2 Got:4
63371 Shift 0, v:4 sv:4 min:2 cur:2
63372 Out of Range Block::1 bo:23 Volume offset:535 Expected:2 Got:4
63373 Shift 0, v:4 sv:4 min:2 cur:2
63374 Out of Range Block::1 bo:24 Volume offset:536 Expected:2 Got:4
63375 Shift 0, v:4 sv:4 min:2 cur:2
63376 Out of Range Block::1 bo:25 Volume offset:537 Expected:2 Got:4
63377 Shift 0, v:4 sv:4 min:2 cur:2
63378 Out of Range Block::1 bo:26 Volume offset:538 Expected:2 Got:4
63379 Shift 0, v:4 sv:4 min:2 cur:2
63380 Out of Range Block::1 bo:27 Volume offset:539 Expected:2 Got:4
63381 Shift 0, v:4 sv:4 min:2 cur:2
63382 Out of Range Block::1 bo:28 Volume offset:540 Expected:2 Got:4
63383 Shift 0, v:4 sv:4 min:2 cur:2
63384 Out of Range Block::1 bo:29 Volume offset:541 Expected:2 Got:4
63385 Shift 0, v:4 sv:4 min:2 cur:2
63386 Out of Range Block::1 bo:30 Volume offset:542 Expected:2 Got:4
63387 Shift 0, v:4 sv:4 min:2 cur:2
63388 Out of Range Block::1 bo:31 Volume offset:543 Expected:2 Got:4
63389 okShift 0, v:4 sv:4 min:2 cur:2
63390 Out of Range Block::1 bo:32 Volume offset:544 Expected:2 Got:4
63391 Shift 0, v:4 sv:4 min:2 cur:2
63392 Out of Range Block::1 bo:33 Volume offset:545 Expected:2 Got:4
63393 Shift 0, v:4 sv:4 min:2 cur:2
63394 Out of Range Block::1 bo:34 Volume offset:546 Expected:2 Got:4
63395 Shift 0, v:4 sv:4 min:2 cur:2
63396 Out of Range Block::1 bo:35 Volume offset:547 Expected:2 Got:4
63397 Shift 0, v:4 sv:4 min:2 cur:2
63398 Out of Range Block::1 bo:36 Volume offset:548 Expected:2 Got:4
63399 Shift 0, v:4 sv:4 min:2 cur:2
63400 Out of Range Block::1 bo:37 Volume offset:549 Expected:2 Got:4
63401 Shift 0, v:4 sv:4 min:2 cur:2
63402 Out of Range Block::1 bo:38 Volume offset:550 Expected:2 Got:4
63403 Shift 0, v:4 sv:4 min:2 cur:2
63404 Out of Range Block::1 bo:39 Volume offset:551 Expected:2 Got:4
63405 Shift 0, v:4 sv:4 min:2 cur:2
63406 Out of Range Block::1 bo:40 Volume offset:552 Expected:2 Got:4
63407 Shift 0, v:4 sv:4 min:2 cur:2
63408 Out of Range Block::1 bo:41 Volume offset:553 Expected:2 Got:4
63409 Shift 0, v:4 sv:4 min:2 cur:2
63410 Out of Range Block::1 bo:42 Volume offset:554 Expected:2 Got:4
63411 Shift 0, v:4 sv:4 min:2 cur:2
63412 Out of Range Block::1 bo:43 Volume offset:555 Expected:2 Got:4
63413 Shift 0, v:4 sv:4 min:2 cur:2
63414 Out of Range Block::1 bo:44 Volume offset:556 Expected:2 Got:4
63415 Shift 0, v:4 sv:4 min:2 cur:2
63416 Out of Range Block::1 bo:45 Volume offset:557 Expected:2 Got:4
63417 Shift 0, v:4 sv:4 min:2 cur:2
63418 Out of Range Block::1 bo:46 Volume offset:558 Expected:2 Got:4
63419 Shift 0, v:4 sv:4 min:2 cur:2
63420 Out of Range Block::1 bo:47 Volume offset:559 Expected:2 Got:4
63421 Shift 0, v:4 sv:4 min:2 cur:2
63422 Out of Range Block::1 bo:48 Volume offset:560 Expected:2 Got:4
63423 Shift 0, v:4 sv:4 min:2 cur:2
63424 Out of Range Block::1 bo:49 Volume offset:561 Expected:2 Got:4
63425 Shift 0, v:4 sv:4 min:2 cur:2
63426 Out of Range Block::1 bo:50 Volume offset:562 Expected:2 Got:4
63427 Shift 0, v:4 sv:4 min:2 cur:2
63428 Out of Range Block::1 bo:51 Volume offset:563 Expected:2 Got:4
63429 Shift 0, v:4 sv:4 min:2 cur:2
63430 Out of Range Block::1 bo:52 Volume offset:564 Expected:2 Got:4
63431 Shift 0, v:4 sv:4 min:2 cur:2
63432 Out of Range Block::1 bo:53 Volume offset:565 Expected:2 Got:4
63433 Shift 0, v:4 sv:4 min:2 cur:2
63434 Out of Range Block::1 bo:54 Volume offset:566 Expected:2 Got:4
63435 Shift 0, v:4 sv:4 min:2 cur:2
63436 Out of Range Block::1 bo:55 Volume offset:567 Expected:2 Got:4
63437 Shift 0, v:4 sv:4 min:2 cur:2
63438 Out of Range Block::1 bo:56 Volume offset:568 Expected:2 Got:4
63439 Shift 0, v:4 sv:4 min:2 cur:2
63440 Out of Range Block::1 bo:57 Volume offset:569 Expected:2 Got:4
63441 Shift 0, v:4 sv:4 min:2 cur:2
63442 Out of Range Block::1 bo:58 Volume offset:570 Expected:2 Got:4
63443 Shift 0, v:4 sv:4 min:2 cur:2
63444 Out of Range Block::1 bo:59 Volume offset:571 Expected:2 Got:4
63445 Shift 0, v:4 sv:4 min:2 cur:2
63446 Out of Range Block::1 bo:60 Volume offset:572 Expected:2 Got:4
63447 Shift 0, v:4 sv:4 min:2 cur:2
63448 Out of Range Block::1 bo:61 Volume offset:573 Expected:2 Got:4
63449 Shift 0, v:4 sv:4 min:2 cur:2
63450 Out of Range Block::1 bo:62 Volume offset:574 Expected:2 Got:4
63451 Shift 0, v:4 sv:4 min:2 cur:2
63452 Out of Range Block::1 bo:63 Volume offset:575 Expected:2 Got:4
63453 Shift 0, v:4 sv:4 min:2 cur:2
63454 Out of Range Block::1 bo:64 Volume offset:576 Expected:2 Got:4
63455 Shift 0, v:4 sv:4 min:2 cur:2
63456 Out of Range Block::1 bo:65 Volume offset:577 Expected:2 Got:4
63457 Shift 0, v:4 sv:4 min:2 cur:2
63458 Out of Range Block::1 bo:66 Volume offset:578 Expected:2 Got:4
63459 Shift 0, v:4 sv:4 min:2 cur:2
63460 Out of Range Block::1 bo:67 Volume offset:579 Expected:2 Got:4
63461 Shift 0, v:4 sv:4 min:2 cur:2
63462 Out of Range Block::1 bo:68 Volume offset:580 Expected:2 Got:4
63463 Shift 0, v:4 sv:4 min:2 cur:2
63464 Out of Range Block::1 bo:69 Volume offset:581 Expected:2 Got:4
63465 Shift 0, v:4 sv:4 min:2 cur:2
63466 Out of Range Block::1 bo:70 Volume offset:582 Expected:2 Got:4
63467 Shift 0, v:4 sv:4 min:2 cur:2
63468 Out of Range Block::1 bo:71 Volume offset:583 Expected:2 Got:4
63469 Shift 0, v:4 sv:4 min:2 cur:2
63470 Out of Range Block::1 bo:72 Volume offset:584 Expected:2 Got:4
63471 Shift 0, v:4 sv:4 min:2 cur:2
63472 Out of Range Block::1 bo:73 Volume offset:585 Expected:2 Got:4
63473 Shift 0, v:4 sv:4 min:2 cur:2
63474 Out of Range Block::1 bo:74 Volume offset:586 Expected:2 Got:4
63475 Shift 0, v:4 sv:4 min:2 cur:2
63476 Out of Range Block::1 bo:75 Volume offset:587 Expected:2 Got:4
63477 Shift 0, v:4 sv:4 min:2 cur:2
63478 Out of Range Block::1 bo:76 Volume offset:588 Expected:2 Got:4
63479 Shift 0, v:4 sv:4 min:2 cur:2
63480 Out of Range Block::1 bo:77 Volume offset:589 Expected:2 Got:4
63481 Shift 0, v:4 sv:4 min:2 cur:2
63482 Out of Range Block::1 bo:78 Volume offset:590 Expected:2 Got:4
63483 Shift 0, v:4 sv:4 min:2 cur:2
63484 Out of Range Block::1 bo:79 Volume offset:591 Expected:2 Got:4
63485 Shift 0, v:4 sv:4 min:2 cur:2
63486 Out of Range Block::1 bo:80 Volume offset:592 Expected:2 Got:4
63487 Shift 0, v:4 sv:4 min:2 cur:2
63488 Out of Range Block::1 bo:81 Volume offset:593 Expected:2 Got:4
63489 Shift 0, v:4 sv:4 min:2 cur:2
63490 Out of Range Block::1 bo:82 Volume offset:594 Expected:2 Got:4
63491 Shift 0, v:4 sv:4 min:2 cur:2
63492 Out of Range Block::1 bo:83 Volume offset:595 Expected:2 Got:4
63493 Shift 0, v:4 sv:4 min:2 cur:2
63494 Out of Range Block::1 bo:84 Volume offset:596 Expected:2 Got:4
63495 Shift 0, v:4 sv:4 min:2 cur:2
63496 Out of Range Block::1 bo:85 Volume offset:597 Expected:2 Got:4
63497 Shift 0, v:4 sv:4 min:2 cur:2
63498 Out of Range Block::1 bo:86 Volume offset:598 Expected:2 Got:4
63499 Shift 0, v:4 sv:4 min:2 cur:2
63500 Out of Range Block::1 bo:87 Volume offset:599 Expected:2 Got:4
63501 Shift 0, v:4 sv:4 min:2 cur:2
63502 Out of Range Block::1 bo:88 Volume offset:600 Expected:2 Got:4
63503 Shift 0, v:4 sv:4 min:2 cur:2
63504 Out of Range Block::1 bo:89 Volume offset:601 Expected:2 Got:4
63505 Shift 0, v:4 sv:4 min:2 cur:2
63506 Out of Range Block::1 bo:90 Volume offset:602 Expected:2 Got:4
63507 Shift 0, v:4 sv:4 min:2 cur:2
63508 Out of Range Block::1 bo:91 Volume offset:603 Expected:2 Got:4
63509 Shift 0, v:4 sv:4 min:2 cur:2
63510 Out of Range Block::1 bo:92 Volume offset:604 Expected:2 Got:4
63511 Shift 0, v:4 sv:4 min:2 cur:2
63512 Out of Range Block::1 bo:93 Volume offset:605 Expected:2 Got:4
63513 Shift 0, v:4 sv:4 min:2 cur:2
63514 Out of Range Block::1 bo:94 Volume offset:606 Expected:2 Got:4
63515 Shift 0, v:4 sv:4 min:2 cur:2
63516 Out of Range Block::1 bo:95 Volume offset:607 Expected:2 Got:4
63517 Shift 0, v:4 sv:4 min:2 cur:2
63518 Out of Range Block::1 bo:96 Volume offset:608 Expected:2 Got:4
63519 Shift 0, v:4 sv:4 min:2 cur:2
63520 Out of Range Block::1 bo:97 Volume offset:609 Expected:2 Got:4
63521 Shift 0, v:4 sv:4 min:2 cur:2
63522 Out of Range Block::1 bo:98 Volume offset:610 Expected:2 Got:4
63523 Shift 0, v:4 sv:4 min:2 cur:2
63524 Out of Range Block::1 bo:99 Volume offset:611 Expected:2 Got:4
63525 Shift 0, v:4 sv:4 min:2 cur:2
63526 Out of Range Block::1 bo:100 Volume offset:612 Expected:2 Got:4
63527 Shift 0, v:4 sv:4 min:2 cur:2
63528 Out of Range Block::1 bo:101 Volume offset:613 Expected:2 Got:4
63529 Shift 0, v:4 sv:4 min:2 cur:2
63530 Out of Range Block::1 bo:102 Volume offset:614 Expected:2 Got:4
63531 Shift 0, v:4 sv:4 min:2 cur:2
63532 Out of Range Block::1 bo:103 Volume offset:615 Expected:2 Got:4
63533 Shift 0, v:4 sv:4 min:2 cur:2
63534 Out of Range Block::1 bo:104 Volume offset:616 Expected:2 Got:4
63535 Shift 0, v:4 sv:4 min:2 cur:2
63536 Out of Range Block::1 bo:105 Volume offset:617 Expected:2 Got:4
63537 Shift 0, v:4 sv:4 min:2 cur:2
63538 Out of Range Block::1 bo:106 Volume offset:618 Expected:2 Got:4
63539 Shift 0, v:4 sv:4 min:2 cur:2
63540 Out of Range Block::1 bo:107 Volume offset:619 Expected:2 Got:4
63541 Shift 0, v:4 sv:4 min:2 cur:2
63542 Out of Range Block::1 bo:108 Volume offset:620 Expected:2 Got:4
63543 Shift 0, v:4 sv:4 min:2 cur:2
63544 Out of Range Block::1 bo:109 Volume offset:621 Expected:2 Got:4
63545 Shift 0, v:4 sv:4 min:2 cur:2
63546 Out of Range Block::1 bo:110 Volume offset:622 Expected:2 Got:4
63547 Shift 0, v:4 sv:4 min:2 cur:2
63548 Out of Range Block::1 bo:111 Volume offset:623 Expected:2 Got:4
63549 Shift 0, v:4 sv:4 min:2 cur:2
63550 Out of Range Block::1 bo:112 Volume offset:624 Expected:2 Got:4
63551 Shift 0, v:4 sv:4 min:2 cur:2
63552 Out of Range Block::1 bo:113 Volume offset:625 Expected:2 Got:4
63553 Shift 0, v:4 sv:4 min:2 cur:2
63554 Out of Range Block::1 bo:114 Volume offset:626 Expected:2 Got:4
63555 Shift 0, v:4 sv:4 min:2 cur:2
63556 Out of Range Block::1 bo:115 Volume offset:627 Expected:2 Got:4
63557 Shift 0, v:4 sv:4 min:2 cur:2
63558 Out of Range Block::1 bo:116 Volume offset:628 Expected:2 Got:4
63559 Shift 0, v:4 sv:4 min:2 cur:2
63560 Out of Range Block::1 bo:117 Volume offset:629 Expected:2 Got:4
63561 Shift 0, v:4 sv:4 min:2 cur:2
63562 Out of Range Block::1 bo:118 Volume offset:630 Expected:2 Got:4
63563 Shift 0, v:4 sv:4 min:2 cur:2
63564 Out of Range Block::1 bo:119 Volume offset:631 Expected:2 Got:4
63565 Shift 0, v:4 sv:4 min:2 cur:2
63566 Out of Range Block::1 bo:120 Volume offset:632 Expected:2 Got:4
63567 Shift 0, v:4 sv:4 min:2 cur:2
63568 Out of Range Block::1 bo:121 Volume offset:633 Expected:2 Got:4
63569 Shift 0, v:4 sv:4 min:2 cur:2
63570 Out of Range Block::1 bo:122 Volume offset:634 Expected:2 Got:4
63571 Shift 0, v:4 sv:4 min:2 cur:2
63572 Out of Range Block::1 bo:123 Volume offset:635 Expected:2 Got:4
63573 Shift 0, v:4 sv:4 min:2 cur:2
63574 Out of Range Block::1 bo:124 Volume offset:636 Expected:2 Got:4
63575 Shift 0, v:4 sv:4 min:2 cur:2
63576 Out of Range Block::1 bo:125 Volume offset:637 Expected:2 Got:4
63577 Shift 0, v:4 sv:4 min:2 cur:2
63578 Out of Range Block::1 bo:126 Volume offset:638 Expected:2 Got:4
63579 Shift 0, v:4 sv:4 min:2 cur:2
63580 Out of Range Block::1 bo:127 Volume offset:639 Expected:2 Got:4
63581 Shift 0, v:4 sv:4 min:2 cur:2
63582 Out of Range Block::1 bo:128 Volume offset:640 Expected:2 Got:4
63583 Shift 0, v:4 sv:4 min:2 cur:2
63584 Out of Range Block::1 bo:129 Volume offset:641 Expected:2 Got:4
63585 Shift 0, v:4 sv:4 min:2 cur:2
63586 Out of Range Block::1 bo:130 Volume offset:642 Expected:2 Got:4
63587 
63588 Shift 0, v:4 sv:4 min:2 cur:2
63589 Out of Range Block::1 bo:131 Volume offset:643 Expected:2 Got:4
63590 Shift 0, v:4 sv:4 min:2 cur:2
63591 Out of Range Block::1 bo:132 Volume offset:644 Expected:2 Got:4
63592 Shift 0, v:4 sv:4 min:2 cur:2
63593 Out of Range Block::1 bo:133 Volume offset:645 Expected:2 Got:4
63594 Shift 0, v:4 sv:4 min:2 cur:2
63595 Out of Range Block::1 bo:134 Volume offset:646 Expected:2 Got:4
63596 Shift 0, v:4 sv:4 min:2 cur:2
63597 Out of Range Block::1 bo:135 Volume offset:647 Expected:2 Got:4
63598 Shift 0, v:4 sv:4 min:2 cur:2
63599 Out of Range Block::1 bo:136 Volume offset:648 Expected:2 Got:4
63600 Shift 0, v:4 sv:4 min:2 cur:2
63601 Out of Range Block::1 bo:137 Volume offset:649 Expected:2 Got:4
63602 Shift 0, v:4 sv:4 min:2 cur:2
63603 Out of Range Block::1 bo:138 Volume offset:650 Expected:2 Got:4
63604 Shift 0, v:4 sv:4 min:2 cur:2
63605 Out of Range Block::1 bo:139 Volume offset:651 Expected:2 Got:4
63606 Shift 0, v:4 sv:4 min:2 cur:2
63607 Out of Range Block::1 bo:140 Volume offset:652 Expected:2 Got:4
63608 Shift 0, v:4 sv:4 min:2 cur:2
63609 Out of Range Block::1 bo:141 Volume offset:653 Expected:2 Got:4
63610 Shift 0, v:4 sv:4 min:2 cur:2
63611 Out of Range Block::1 bo:142 Volume offset:654 Expected:2 Got:4
63612 Shift 0, v:4 sv:4 min:2 cur:2
63613 Out of Range Block::1 bo:143 Volume offset:655 Expected:2 Got:4
63614 Shift 0, v:4 sv:4 min:2 cur:2
63615 Out of Range Block::1 bo:144 Volume offset:656 Expected:2 Got:4
63616 Shift 0, v:4 sv:4 min:2 cur:2
63617 Out of Range Block::1 bo:145 Volume offset:657 Expected:2 Got:4
63618 Shift 0, v:4 sv:4 min:2 cur:2
63619 Out of Range Block::1 bo:146 Volume offset:658 Expected:2 Got:4
63620 Shift 0, v:4 sv:4 min:2 cur:2
63621 Out of Range Block::1 bo:147 Volume offset:659 Expected:2 Got:4
63622 Shift 0, v:4 sv:4 min:2 cur:2
63623 Out of Range Block::1 bo:148 Volume offset:660 Expected:2 Got:4
63624 Shift 0, v:4 sv:4 min:2 cur:2
63625 Out of Range Block::1 bo:149 Volume offset:661 Expected:2 Got:4
63626 Shift 0, v:4 sv:4 min:2 cur:2
63627 Out of Range Block::1 bo:150 Volume offset:662 Expected:2 Got:4
63628 Shift 0, v:4 sv:4 min:2 cur:2
63629 Out of Range Block::1 bo:151 Volume offset:663 Expected:2 Got:4
63630 Shift 0, v:4 sv:4 min:2 cur:2
63631 Out of Range Block::1 bo:152 Volume offset:664 Expected:2 Got:4
63632 Shift 0, v:4 sv:4 min:2 cur:2
63633 Out of Range Block::1 bo:153 Volume offset:665 Expected:2 Got:4
63634 Shift 0, v:4 sv:4 min:2 cur:2
63635 Out of Range Block::1 bo:154 Volume offset:666 Expected:2 Got:4
63636 Shift 0, v:4 sv:4 min:2 cur:2
63637 Out of Range Block::1 bo:155 Volume offset:667 Expected:2 Got:4
63638 Shift 0, v:4 sv:4 min:2 cur:2
63639 Out of Range Block::1 bo:156 Volume offset:668 Expected:2 Got:4
63640 Shift 0, v:4 sv:4 min:2 cur:2
63641 Out of Range Block::1 bo:157 Volume offset:669 Expected:2 Got:4
63642 Shift 0, v:4 sv:4 min:2 cur:2
63643 Out of Range Block::1 bo:158 Volume offset:670 Expected:2 Got:4
63644 Shift 0, v:4 sv:4 min:2 cur:2
63645 Out of Range Block::1 bo:159 Volume offset:671 Expected:2 Got:4
63646 Shift 0, v:4 sv:4 min:2 cur:2
63647 Out of Range Block::1 bo:160 Volume offset:672 Expected:2 Got:4
63648 Shift 0, v:4 sv:4 min:2 cur:2
63649 Out of Range Block::1 bo:161 Volume offset:673 Expected:2 Got:4
63650 Shift 0, v:4 sv:4 min:2 cur:2
63651 Out of Range Block::1 bo:162 Volume offset:674 Expected:2 Got:4
63652 Shift 0, v:4 sv:4 min:2 cur:2
63653 Out of Range Block::1 bo:163 Volume offset:675 Expected:2 Got:4
63654 Shift 0, v:4 sv:4 min:2 cur:2
63655 Out of Range Block::1 bo:164 Volume offset:676 Expected:2 Got:4
63656 Shift 0, v:4 sv:4 min:2 cur:2
63657 Out of Range Block::1 bo:165 Volume offset:677 Expected:2 Got:4
63658 Shift 0, v:4 sv:4 min:2 cur:2
63659 Out of Range Block::1 bo:166 Volume offset:678 Expected:2 Got:4
63660 Shift 0, v:4 sv:4 min:2 cur:2
63661 Out of Range Block::1 bo:167 Volume offset:679 Expected:2 Got:4
63662 Shift 0, v:4 sv:4 min:2 cur:2
63663 Out of Range Block::1 bo:168 Volume offset:680 Expected:2 Got:4
63664 Shift 0, v:4 sv:4 min:2 cur:2
63665 Out of Range Block::1 bo:169 Volume offset:681 Expected:2 Got:4
63666 Shift 0, v:4 sv:4 min:2 cur:2
63667 Out of Range Block::1 bo:170 Volume offset:682 Expected:2 Got:4
63668 Shift 0, v:4 sv:4 min:2 cur:2
63669 Out of Range Block::1 bo:171 Volume offset:683 Expected:2 Got:4
63670 Shift 0, v:4 sv:4 min:2 cur:2
63671 Out of Range Block::1 bo:172 Volume offset:684 Expected:2 Got:4
63672 Shift 0, v:4 sv:4 min:2 cur:2
63673 Out of Range Block::1 bo:173 Volume offset:685 Expected:2 Got:4
63674 Shift 0, v:4 sv:4 min:2 cur:2
63675 Out of Range Block::1 bo:174 Volume offset:686 Expected:2 Got:4
63676 Shift 0, v:4 sv:4 min:2 cur:2
63677 Out of Range Block::1 bo:175 Volume offset:687 Expected:2 Got:4
63678 Shift 0, v:4 sv:4 min:2 cur:2
63679 Out of Range Block::1 bo:176 Volume offset:688 Expected:2 Got:4
63680 Shift 0, v:4 sv:4 min:2 cur:2
63681 Out of Range Block::1 bo:177 Volume offset:689 Expected:2 Got:4
63682 Shift 0, v:4 sv:4 min:2 cur:2
63683 Out of Range Block::1 bo:178 Volume offset:690 Expected:2 Got:4
63684 Shift 0, v:4 sv:4 min:2 cur:2
63685 Out of Range Block::1 bo:179 Volume offset:691 Expected:2 Got:4
63686 Shift 0, v:4 sv:4 min:2 cur:2
63687 Out of Range Block::1 bo:180 Volume offset:692 Expected:2 Got:4
63688 Shift 0, v:4 sv:4 min:2 cur:2
63689 Out of Range Block::1 bo:181 Volume offset:693 Expected:2 Got:4
63690 test test::test_wl_update_commit ... Shift 0, v:4 sv:4 min:2 cur:2
63691 Out of Range Block::1 bo:182 Volume offset:694 Expected:2 Got:4
63692 Shift 0, v:4 sv:4 min:2 cur:2
63693 Out of Range Block::1 bo:183 Volume offset:695 Expected:2 Got:4
63694 Shift 0, v:4 sv:4 min:2 cur:2
63695 Out of Range Block::1 bo:184 Volume offset:696 Expected:2 Got:4
63696 Shift 0, v:4 sv:4 min:2 cur:2
63697 Out of Range Block::1 bo:185 Volume offset:697 Expected:2 Got:4
63698 Shift 0, v:4 sv:4 min:2 cur:2
63699 Out of Range Block::1 bo:186 Volume offset:698 Expected:2 Got:4
63700 Shift 0, v:4 sv:4 min:2 cur:2
63701 Out of Range Block::1 bo:187 Volume offset:699 Expected:2 Got:4
63702 Shift 0, v:4 sv:4 min:2 cur:2
63703 Out of Range Block::1 bo:188 Volume offset:700 Expected:2 Got:4
63704 Shift 0, v:4 sv:4 min:2 cur:2
63705 Out of Range Block::1 bo:189 Volume offset:701 Expected:2 Got:4
63706 Shift 0, v:4 sv:4 min:2 cur:2
63707 Out of Range Block::1 bo:190 Volume offset:702 Expected:2 Got:4
63708 Shift 0, v:4 sv:4 min:2 cur:2
63709 Out of Range Block::1 bo:191 Volume offset:703 Expected:2 Got:4
63710 Shift 0, v:4 sv:4 min:2 cur:2
63711 Out of Range Block::1 bo:192 Volume offset:704 Expected:2 Got:4
63712 Shift 0, v:4 sv:4 min:2 cur:2
63713 Out of Range Block::1 bo:193 Volume offset:705 Expected:2 Got:4
63714 Shift 0, v:4 sv:4 min:2 cur:2
63715 Out of Range Block::1 bo:194 Volume offset:706 Expected:2 Got:4
63716 Shift 0, v:4 sv:4 min:2 cur:2
63717 Out of Range Block::1 bo:195 Volume offset:707 Expected:2 Got:4
63718 Shift 0, v:4 sv:4 min:2 cur:2
63719 Out of Range Block::1 bo:196 Volume offset:708 Expected:2 Got:4
63720 Shift 0, v:4 sv:4 min:2 cur:2
63721 Out of Range Block::1 bo:197 Volume offset:709 Expected:2 Got:4
63722 Shift 0, v:4 sv:4 min:2 cur:2
63723 Out of Range Block::1 bo:198 Volume offset:710 Expected:2 Got:4
63724 Shift 0, v:4 sv:4 min:2 cur:2
63725 Out of Range Block::1 bo:199 Volume offset:711 Expected:2 Got:4
63726 Shift 0, v:4 sv:4 min:2 cur:2
63727 Out of Range Block::1 bo:200 Volume offset:712 Expected:2 Got:4
63728 Shift 0, v:4 sv:4 min:2 cur:2
63729 Out of Range Block::1 bo:201 Volume offset:713 Expected:2 Got:4
63730 Shift 0, v:4 sv:4 min:2 cur:2
63731 Out of Range Block::1 bo:202 Volume offset:714 Expected:2 Got:4
63732 Shift 0, v:4 sv:4 min:2 cur:2
63733 Out of Range Block::1 bo:203 Volume offset:715 Expected:2 Got:4
63734 Shift 0, v:4 sv:4 min:2 cur:2
63735 Out of Range Block::1 bo:204 Volume offset:716 Expected:2 Got:4
63736 Shift 0, v:4 sv:4 min:2 cur:2
63737 Out of Range Block::1 bo:205 Volume offset:717 Expected:2 Got:4
63738 Shift 0, v:4 sv:4 min:2 cur:2
63739 Out of Range Block::1 bo:206 Volume offset:718 Expected:2 Got:4
63740 Shift 0, v:4 sv:4 min:2 cur:2
63741 Out of Range Block::1 bo:207 Volume offset:719 Expected:2 Got:4
63742 Shift 0, v:4 sv:4 min:2 cur:2
63743 Out of Range Block::1 bo:208 Volume offset:720 Expected:2 Got:4
63744 Shift 0, v:4 sv:4 min:2 cur:2
63745 Out of Range Block::1 bo:209 Volume offset:721 Expected:2 Got:4
63746 Shift 0, v:4 sv:4 min:2 cur:2
63747 Out of Range Block::1 bo:210 Volume offset:722 Expected:2 Got:4
63748 Shift 0, v:4 sv:4 min:2 cur:2
63749 Out of Range Block::1 bo:211 Volume offset:723 Expected:2 Got:4
63750 Shift 0, v:4 sv:4 min:2 cur:2
63751 Out of Range Block::1 bo:212 Volume offset:724 Expected:2 Got:4
63752 Shift 0, v:4 sv:4 min:2 cur:2
63753 Out of Range Block::1 bo:213 Volume offset:725 Expected:2 Got:4
63754 Shift 0, v:4 sv:4 min:2 cur:2
63755 Out of Range Block::1 bo:214 Volume offset:726 Expected:2 Got:4
63756 Shift 0, v:4 sv:4 min:2 cur:2
63757 Out of Range Block::1 bo:215 Volume offset:727 Expected:2 Got:4
63758 Shift 0, v:4 sv:4 min:2 cur:2
63759 Out of Range Block::1 bo:216 Volume offset:728 Expected:2 Got:4
63760 Shift 0, v:4 sv:4 min:2 cur:2
63761 Out of Range Block::1 bo:217 Volume offset:729 Expected:2 Got:4
63762 Shift 0, v:4 sv:4 min:2 cur:2
63763 Out of Range Block::1 bo:218 Volume offset:730 Expected:2 Got:4
63764 Shift 0, v:4 sv:4 min:2 cur:2
63765 Out of Range Block::1 bo:219 Volume offset:731 Expected:2 Got:4
63766 Shift 0, v:4 sv:4 min:2 cur:2
63767 Out of Range Block::1 bo:220 Volume offset:732 Expected:2 Got:4
63768 Shift 0, v:4 sv:4 min:2 cur:2
63769 Out of Range Block::1 bo:221 Volume offset:733 Expected:2 Got:4
63770 Shift 0, v:4 sv:4 min:2 cur:2
63771 Out of Range Block::1 bo:222 Volume offset:734 Expected:2 Got:4
63772 Shift 0, v:4 sv:4 min:2 cur:2
63773 Out of Range Block::1 bo:223 Volume offset:735 Expected:2 Got:4
63774 Shift 0, v:4 sv:4 min:2 cur:2
63775 Out of Range Block::1 bo:224 Volume offset:736 Expected:2 Got:4
63776 Shift 0, v:4 sv:4 min:2 cur:2
63777 Out of Range Block::1 bo:225 Volume offset:737 Expected:2 Got:4
63778 Shift 0, v:4 sv:4 min:2 cur:2
63779 Out of Range Block::1 bo:226 Volume offset:738 Expected:2 Got:4
63780 Shift 0, v:4 sv:4 min:2 cur:2
63781 Out of Range Block::1 bo:227 Volume offset:739 Expected:2 Got:4
63782 Shift 0, v:4 sv:4 min:2 cur:2
63783 Out of Range Block::1 bo:228 Volume offset:740 Expected:2 Got:4
63784 Shift 0, v:4 sv:4 min:2 cur:2
63785 Out of Range Block::1 bo:229 Volume offset:741 Expected:2 Got:4
63786 Shift 0, v:4 sv:4 min:2 cur:2
63787 Out of Range Block::1 bo:230 Volume offset:742 Expected:2 Got:4
63788 Shift 0, v:4 sv:4 min:2 cur:2
63789 Out of Range Block::1 bo:231 Volume offset:743 Expected:2 Got:4
63790 Shift 0, v:4 sv:4 min:2 cur:2
63791 Out of Range Block::1 bo:232 Volume offset:744 Expected:2 Got:4
63792 Shift 0, v:4 sv:4 min:2 cur:2
63793 Out of Range Block::1 bo:233 Volume offset:745 Expected:2 Got:4
63794 Shift 0, v:4 sv:4 min:2 cur:2
63795 Out of Range Block::1 bo:234 Volume offset:746 Expected:2 Got:4
63796 Shift 0, v:4 sv:4 min:2 cur:2
63797 Out of Range Block::1 bo:235 Volume offset:747 Expected:2 Got:4
63798 Shift 0, v:4 sv:4 min:2 cur:2
63799 Out of Range Block::1 bo:236 Volume offset:748 Expected:2 Got:4
63800 Shift 0, v:4 sv:4 min:2 cur:2
63801 Out of Range Block::1 bo:237 Volume offset:749 Expected:2 Got:4
63802 Shift 0, v:4 sv:4 min:2 cur:2
63803 Out of Range Block::1 bo:238 Volume offset:750 Expected:2 Got:4
63804 Shift 0, v:4 sv:4 min:2 cur:2
63805 Out of Range Block::1 bo:239 Volume offset:751 Expected:2 Got:4
63806 Shift 0, v:4 sv:4 min:2 cur:2
63807 Out of Range Block::1 bo:240 Volume offset:752 Expected:2 Got:4
63808 Shift 0, v:4 sv:4 min:2 cur:2
63809 Out of Range Block::1 bo:241 Volume offset:753 Expected:2 Got:4
63810 Shift 0, v:4 sv:4 min:2 cur:2
63811 Out of Range Block::1 bo:242 Volume offset:754 Expected:2 Got:4
63812 Shift 0, v:4 sv:4 min:2 cur:2
63813 Out of Range Block::1 bo:243 Volume offset:755 Expected:2 Got:4
63814 Shift 0, v:4 sv:4 min:2 cur:2
63815 Out of Range Block::1 bo:244 Volume offset:756 Expected:2 Got:4
63816 Shift 0, v:4 sv:4 min:2 cur:2
63817 Out of Range Block::1 bo:245 Volume offset:757 Expected:2 Got:4
63818 Shift 0, v:4 sv:4 min:2 cur:2
63819 Out of Range Block::1 bo:246 Volume offset:758 Expected:2 Got:4
63820 Shift 0, v:4 sv:4 min:2 cur:2
63821 Out of Range Block::1 bo:247 Volume offset:759 Expected:2 Got:4
63822 Shift 0, v:4 sv:4 min:2 cur:2
63823 Out of Range Block::1 bo:248 Volume offset:760 Expected:2 Got:4
63824 Shift 0, v:4 sv:4 min:2 cur:2
63825 Out of Range Block::1 bo:249 Volume offset:761 Expected:2 Got:4
63826 Shift 0, v:4 sv:4 min:2 cur:2
63827 Out of Range Block::1 bo:250 Volume offset:762 Expected:2 Got:4
63828 Shift 0, v:4 sv:4 min:2 cur:2
63829 Out of Range Block::1 bo:251 Volume offset:763 Expected:2 Got:4
63830 Shift 0, v:4 sv:4 min:2 cur:2
63831 Out of Range Block::1 bo:252 Volume offset:764 Expected:2 Got:4
63832 Shift 0, v:4 sv:4 min:2 cur:2
63833 Out of Range Block::1 bo:253 Volume offset:765 Expected:2 Got:4
63834 Shift 0, v:4 sv:4 min:2 cur:2
63835 Out of Range Block::1 bo:254 Volume offset:766 Expected:2 Got:4
63836 Shift 0, v:4 sv:4 min:2 cur:2
63837 Out of Range Block::1 bo:255 Volume offset:767 Expected:2 Got:4
63838 Shift 0, v:4 sv:4 min:2 cur:2
63839 Out of Range Block::1 bo:256 Volume offset:768 Expected:2 Got:4
63840 Shift 0, v:4 sv:4 min:2 cur:2
63841 Out of Range Block::1 bo:257 Volume offset:769 Expected:2 Got:4
63842 Shift 0, v:4 sv:4 min:2 cur:2
63843 Out of Range Block::1 bo:258 Volume offset:770 Expected:2 Got:4
63844 Shift 0, v:4 sv:4 min:2 cur:2
63845 Out of Range Block::1 bo:259 Volume offset:771 Expected:2 Got:4
63846 Shift 0, v:4 sv:4 min:2 cur:2
63847 Out of Range Block::1 bo:260 Volume offset:772 Expected:2 Got:4
63848 Shift 0, v:4 sv:4 min:2 cur:2
63849 Out of Range Block::1 bo:261 Volume offset:773 Expected:2 Got:4
63850 Shift 0, v:4 sv:4 min:2 cur:2
63851 Out of Range Block::1 bo:262 Volume offset:774 Expected:2 Got:4
63852 Shift 0, v:4 sv:4 min:2 cur:2
63853 Out of Range Block::1 bo:263 Volume offset:775 Expected:2 Got:4
63854 Shift 0, v:4 sv:4 min:2 cur:2
63855 Out of Range Block::1 bo:264 Volume offset:776 Expected:2 Got:4
63856 okShift 0, v:4 sv:4 min:2 cur:2
63857 Out of Range Block::1 bo:265 Volume offset:777 Expected:2 Got:4
63858 Shift 0, v:4 sv:4 min:2 cur:2
63859 Out of Range Block::1 bo:266 Volume offset:778 Expected:2 Got:4
63860 Shift 0, v:4 sv:4 min:2 cur:2
63861 Out of Range Block::1 bo:267 Volume offset:779 Expected:2 Got:4
63862 Shift 0, v:4 sv:4 min:2 cur:2
63863 Out of Range Block::1 bo:268 Volume offset:780 Expected:2 Got:4
63864 Shift 0, v:4 sv:4 min:2 cur:2
63865 Out of Range Block::1 bo:269 Volume offset:781 Expected:2 Got:4
63866 Shift 0, v:4 sv:4 min:2 cur:2
63867 Out of Range Block::1 bo:270 Volume offset:782 Expected:2 Got:4
63868 Shift 0, v:4 sv:4 min:2 cur:2
63869 Out of Range Block::1 bo:271 Volume offset:783 Expected:2 Got:4
63870 Shift 0, v:4 sv:4 min:2 cur:2
63871 Out of Range Block::1 bo:272 Volume offset:784 Expected:2 Got:4
63872 Shift 0, v:4 sv:4 min:2 cur:2
63873 Out of Range Block::1 bo:273 Volume offset:785 Expected:2 Got:4
63874 Shift 0, v:4 sv:4 min:2 cur:2
63875 Out of Range Block::1 bo:274 Volume offset:786 Expected:2 Got:4
63876 Shift 0, v:4 sv:4 min:2 cur:2
63877 Out of Range Block::1 bo:275 Volume offset:787 Expected:2 Got:4
63878 Shift 0, v:4 sv:4 min:2 cur:2
63879 Out of Range Block::1 bo:276 Volume offset:788 Expected:2 Got:4
63880 Shift 0, v:4 sv:4 min:2 cur:2
63881 Out of Range Block::1 bo:277 Volume offset:789 Expected:2 Got:4
63882 Shift 0, v:4 sv:4 min:2 cur:2
63883 Out of Range Block::1 bo:278 Volume offset:790 Expected:2 Got:4
63884 Shift 0, v:4 sv:4 min:2 cur:2
63885 Out of Range Block::1 bo:279 Volume offset:791 Expected:2 Got:4
63886 Shift 0, v:4 sv:4 min:2 cur:2
63887 Out of Range Block::1 bo:280 Volume offset:792 Expected:2 Got:4
63888 Shift 0, v:4 sv:4 min:2 cur:2
63889 Out of Range Block::1 bo:281 Volume offset:793 Expected:2 Got:4
63890 Shift 0, v:4 sv:4 min:2 cur:2
63891 Out of Range Block::1 bo:282 Volume offset:794 Expected:2 Got:4
63892 Shift 0, v:4 sv:4 min:2 cur:2
63893 Out of Range Block::1 bo:283 Volume offset:795 Expected:2 Got:4
63894 Shift 0, v:4 sv:4 min:2 cur:2
63895 Out of Range Block::1 bo:284 Volume offset:796 Expected:2 Got:4
63896 Shift 0, v:4 sv:4 min:2 cur:2
63897 Out of Range Block::1 bo:285 Volume offset:797 Expected:2 Got:4
63898 Shift 0, v:4 sv:4 min:2 cur:2
63899 Out of Range Block::1 bo:286 Volume offset:798 Expected:2 Got:4
63900 Shift 0, v:4 sv:4 min:2 cur:2
63901 Out of Range Block::1 bo:287 Volume offset:799 Expected:2 Got:4
63902 Shift 0, v:4 sv:4 min:2 cur:2
63903 Out of Range Block::1 bo:288 Volume offset:800 Expected:2 Got:4
63904 Shift 0, v:4 sv:4 min:2 cur:2
63905 Out of Range Block::1 bo:289 Volume offset:801 Expected:2 Got:4
63906 Shift 0, v:4 sv:4 min:2 cur:2
63907 Out of Range Block::1 bo:290 Volume offset:802 Expected:2 Got:4
63908 Shift 0, v:4 sv:4 min:2 cur:2
63909 Out of Range Block::1 bo:291 Volume offset:803 Expected:2 Got:4
63910 Shift 0, v:4 sv:4 min:2 cur:2
63911 Out of Range Block::1 bo:292 Volume offset:804 Expected:2 Got:4
63912 Shift 0, v:4 sv:4 min:2 cur:2
63913 Out of Range Block::1 bo:293 Volume offset:805 Expected:2 Got:4
63914 Shift 0, v:4 sv:4 min:2 cur:2
63915 Out of Range Block::1 bo:294 Volume offset:806 Expected:2 Got:4
63916 Shift 0, v:4 sv:4 min:2 cur:2
63917 Out of Range Block::1 bo:295 Volume offset:807 Expected:2 Got:4
63918 
63919 Shift 0, v:4 sv:4 min:2 cur:2
63920 Out of Range Block::1 bo:296 Volume offset:808 Expected:2 Got:4
63921 Shift 0, v:4 sv:4 min:2 cur:2
63922 Out of Range Block::1 bo:297 Volume offset:809 Expected:2 Got:4
63923 Shift 0, v:4 sv:4 min:2 cur:2
63924 Out of Range Block::1 bo:298 Volume offset:810 Expected:2 Got:4
63925 Shift 0, v:4 sv:4 min:2 cur:2
63926 Out of Range Block::1 bo:299 Volume offset:811 Expected:2 Got:4
63927 Shift 0, v:4 sv:4 min:2 cur:2
63928 Out of Range Block::1 bo:300 Volume offset:812 Expected:2 Got:4
63929 Shift 0, v:4 sv:4 min:2 cur:2
63930 Out of Range Block::1 bo:301 Volume offset:813 Expected:2 Got:4
63931 Shift 0, v:4 sv:4 min:2 cur:2
63932 Out of Range Block::1 bo:302 Volume offset:814 Expected:2 Got:4
63933 Shift 0, v:4 sv:4 min:2 cur:2
63934 Out of Range Block::1 bo:303 Volume offset:815 Expected:2 Got:4
63935 Shift 0, v:4 sv:4 min:2 cur:2
63936 Out of Range Block::1 bo:304 Volume offset:816 Expected:2 Got:4
63937 Shift 0, v:4 sv:4 min:2 cur:2
63938 Out of Range Block::1 bo:305 Volume offset:817 Expected:2 Got:4
63939 Shift 0, v:4 sv:4 min:2 cur:2
63940 Out of Range Block::1 bo:306 Volume offset:818 Expected:2 Got:4
63941 Shift 0, v:4 sv:4 min:2 cur:2
63942 Out of Range Block::1 bo:307 Volume offset:819 Expected:2 Got:4
63943 Shift 0, v:4 sv:4 min:2 cur:2
63944 Out of Range Block::1 bo:308 Volume offset:820 Expected:2 Got:4
63945 Shift 0, v:4 sv:4 min:2 cur:2
63946 Out of Range Block::1 bo:309 Volume offset:821 Expected:2 Got:4
63947 Shift 0, v:4 sv:4 min:2 cur:2
63948 Out of Range Block::1 bo:310 Volume offset:822 Expected:2 Got:4
63949 Shift 0, v:4 sv:4 min:2 cur:2
63950 Out of Range Block::1 bo:311 Volume offset:823 Expected:2 Got:4
63951 Shift 0, v:4 sv:4 min:2 cur:2
63952 Out of Range Block::1 bo:312 Volume offset:824 Expected:2 Got:4
63953 Shift 0, v:4 sv:4 min:2 cur:2
63954 Out of Range Block::1 bo:313 Volume offset:825 Expected:2 Got:4
63955 Shift 0, v:4 sv:4 min:2 cur:2
63956 Out of Range Block::1 bo:314 Volume offset:826 Expected:2 Got:4
63957 Shift 0, v:4 sv:4 min:2 cur:2
63958 Out of Range Block::1 bo:315 Volume offset:827 Expected:2 Got:4
63959 Shift 0, v:4 sv:4 min:2 cur:2
63960 Out of Range Block::1 bo:316 Volume offset:828 Expected:2 Got:4
63961 Shift 0, v:4 sv:4 min:2 cur:2
63962 Out of Range Block::1 bo:317 Volume offset:829 Expected:2 Got:4
63963 Shift 0, v:4 sv:4 min:2 cur:2
63964 Out of Range Block::1 bo:318 Volume offset:830 Expected:2 Got:4
63965 Shift 0, v:4 sv:4 min:2 cur:2
63966 Out of Range Block::1 bo:319 Volume offset:831 Expected:2 Got:4
63967 Shift 0, v:4 sv:4 min:2 cur:2
63968 Out of Range Block::1 bo:320 Volume offset:832 Expected:2 Got:4
63969 Shift 0, v:4 sv:4 min:2 cur:2
63970 Out of Range Block::1 bo:321 Volume offset:833 Expected:2 Got:4
63971 Shift 0, v:4 sv:4 min:2 cur:2
63972 Out of Range Block::1 bo:322 Volume offset:834 Expected:2 Got:4
63973 Shift 0, v:4 sv:4 min:2 cur:2
63974 Out of Range Block::1 bo:323 Volume offset:835 Expected:2 Got:4
63975 Shift 0, v:4 sv:4 min:2 cur:2
63976 Out of Range Block::1 bo:324 Volume offset:836 Expected:2 Got:4
63977 Shift 0, v:4 sv:4 min:2 cur:2
63978 Out of Range Block::1 bo:325 Volume offset:837 Expected:2 Got:4
63979 Shift 0, v:4 sv:4 min:2 cur:2
63980 Out of Range Block::1 bo:326 Volume offset:838 Expected:2 Got:4
63981 Shift 0, v:4 sv:4 min:2 cur:2
63982 Out of Range Block::1 bo:327 Volume offset:839 Expected:2 Got:4
63983 Shift 0, v:4 sv:4 min:2 cur:2
63984 Out of Range Block::1 bo:328 Volume offset:840 Expected:2 Got:4
63985 Shift 0, v:4 sv:4 min:2 cur:2
63986 Out of Range Block::1 bo:329 Volume offset:841 Expected:2 Got:4
63987 Shift 0, v:4 sv:4 min:2 cur:2
63988 Out of Range Block::1 bo:330 Volume offset:842 Expected:2 Got:4
63989 Shift 0, v:4 sv:4 min:2 cur:2
63990 Out of Range Block::1 bo:331 Volume offset:843 Expected:2 Got:4
63991 Shift 0, v:4 sv:4 min:2 cur:2
63992 Out of Range Block::1 bo:332 Volume offset:844 Expected:2 Got:4
63993 Shift 0, v:4 sv:4 min:2 cur:2
63994 Out of Range Block::1 bo:333 Volume offset:845 Expected:2 Got:4
63995 Shift 0, v:4 sv:4 min:2 cur:2
63996 Out of Range Block::1 bo:334 Volume offset:846 Expected:2 Got:4
63997 Shift 0, v:4 sv:4 min:2 cur:2
63998 Out of Range Block::1 bo:335 Volume offset:847 Expected:2 Got:4
63999 Shift 0, v:4 sv:4 min:2 cur:2
64000 Out of Range Block::1 bo:336 Volume offset:848 Expected:2 Got:4
64001 Shift 0, v:4 sv:4 min:2 cur:2
64002 Out of Range Block::1 bo:337 Volume offset:849 Expected:2 Got:4
64003 Shift 0, v:4 sv:4 min:2 cur:2
64004 Out of Range Block::1 bo:338 Volume offset:850 Expected:2 Got:4
64005 Shift 0, v:4 sv:4 min:2 cur:2
64006 Out of Range Block::1 bo:339 Volume offset:851 Expected:2 Got:4
64007 Shift 0, v:4 sv:4 min:2 cur:2
64008 Out of Range Block::1 bo:340 Volume offset:852 Expected:2 Got:4
64009 Shift 0, v:4 sv:4 min:2 cur:2
64010 Out of Range Block::1 bo:341 Volume offset:853 Expected:2 Got:4
64011 Shift 0, v:4 sv:4 min:2 cur:2
64012 Out of Range Block::1 bo:342 Volume offset:854 Expected:2 Got:4
64013 Shift 0, v:4 sv:4 min:2 cur:2
64014 Out of Range Block::1 bo:343 Volume offset:855 Expected:2 Got:4
64015 Shift 0, v:4 sv:4 min:2 cur:2
64016 Out of Range Block::1 bo:344 Volume offset:856 Expected:2 Got:4
64017 Shift 0, v:4 sv:4 min:2 cur:2
64018 Out of Range Block::1 bo:345 Volume offset:857 Expected:2 Got:4
64019 Shift 0, v:4 sv:4 min:2 cur:2
64020 Out of Range Block::1 bo:346 Volume offset:858 Expected:2 Got:4
64021 Shift 0, v:4 sv:4 min:2 cur:2
64022 Out of Range Block::1 bo:347 Volume offset:859 Expected:2 Got:4
64023 Shift 0, v:4 sv:4 min:2 cur:2
64024 Out of Range Block::1 bo:348 Volume offset:860 Expected:2 Got:4
64025 Shift 0, v:4 sv:4 min:2 cur:2
64026 Out of Range Block::1 bo:349 Volume offset:861 Expected:2 Got:4
64027 Shift 0, v:4 sv:4 min:2 cur:2
64028 Out of Range Block::1 bo:350 Volume offset:862 Expected:2 Got:4
64029 Shift 0, v:4 sv:4 min:2 cur:2
64030 Out of Range Block::1 bo:351 Volume offset:863 Expected:2 Got:4
64031 Shift 0, v:4 sv:4 min:2 cur:2
64032 Out of Range Block::1 bo:352 Volume offset:864 Expected:2 Got:4
64033 Shift 0, v:4 sv:4 min:2 cur:2
64034 Out of Range Block::1 bo:353 Volume offset:865 Expected:2 Got:4
64035 Shift 0, v:4 sv:4 min:2 cur:2
64036 Out of Range Block::1 bo:354 Volume offset:866 Expected:2 Got:4
64037 Shift 0, v:4 sv:4 min:2 cur:2
64038 Out of Range Block::1 bo:355 Volume offset:867 Expected:2 Got:4
64039 Shift 0, v:4 sv:4 min:2 cur:2
64040 Out of Range Block::1 bo:356 Volume offset:868 Expected:2 Got:4
64041 Shift 0, v:4 sv:4 min:2 cur:2
64042 Out of Range Block::1 bo:357 Volume offset:869 Expected:2 Got:4
64043 Shift 0, v:4 sv:4 min:2 cur:2
64044 Out of Range Block::1 bo:358 Volume offset:870 Expected:2 Got:4
64045 Shift 0, v:4 sv:4 min:2 cur:2
64046 Out of Range Block::1 bo:359 Volume offset:871 Expected:2 Got:4
64047 Shift 0, v:4 sv:4 min:2 cur:2
64048 Out of Range Block::1 bo:360 Volume offset:872 Expected:2 Got:4
64049 Shift 0, v:4 sv:4 min:2 cur:2
64050 Out of Range Block::1 bo:361 Volume offset:873 Expected:2 Got:4
64051 Shift 0, v:4 sv:4 min:2 cur:2
64052 Out of Range Block::1 bo:362 Volume offset:874 Expected:2 Got:4
64053 Shift 0, v:4 sv:4 min:2 cur:2
64054 Out of Range Block::1 bo:363 Volume offset:875 Expected:2 Got:4
64055 Shift 0, v:4 sv:4 min:2 cur:2
64056 Out of Range Block::1 bo:364 Volume offset:876 Expected:2 Got:4
64057 Shift 0, v:4 sv:4 min:2 cur:2
64058 Out of Range Block::1 bo:365 Volume offset:877 Expected:2 Got:4
64059 Shift 0, v:4 sv:4 min:2 cur:2
64060 Out of Range Block::1 bo:366 Volume offset:878 Expected:2 Got:4
64061 Shift 0, v:4 sv:4 min:2 cur:2
64062 Out of Range Block::1 bo:367 Volume offset:879 Expected:2 Got:4
64063 Shift 0, v:4 sv:4 min:2 cur:2
64064 Out of Range Block::1 bo:368 Volume offset:880 Expected:2 Got:4
64065 Shift 0, v:4 sv:4 min:2 cur:2
64066 Out of Range Block::1 bo:369 Volume offset:881 Expected:2 Got:4
64067 Shift 0, v:4 sv:4 min:2 cur:2
64068 Out of Range Block::1 bo:370 Volume offset:882 Expected:2 Got:4
64069 Shift 0, v:4 sv:4 min:2 cur:2
64070 Out of Range Block::1 bo:371 Volume offset:883 Expected:2 Got:4
64071 Shift 0, v:4 sv:4 min:2 cur:2
64072 Out of Range Block::1 bo:372 Volume offset:884 Expected:2 Got:4
64073 Shift 0, v:4 sv:4 min:2 cur:2
64074 Out of Range Block::1 bo:373 Volume offset:885 Expected:2 Got:4
64075 Shift 0, v:4 sv:4 min:2 cur:2
64076 Out of Range Block::1 bo:374 Volume offset:886 Expected:2 Got:4
64077 Shift 0, v:4 sv:4 min:2 cur:2
64078 Out of Range Block::1 bo:375 Volume offset:887 Expected:2 Got:4
64079 Shift 0, v:4 sv:4 min:2 cur:2
64080 Out of Range Block::1 bo:376 Volume offset:888 Expected:2 Got:4
64081 Shift 0, v:4 sv:4 min:2 cur:2
64082 Out of Range Block::1 bo:377 Volume offset:889 Expected:2 Got:4
64083 Shift 0, v:4 sv:4 min:2 cur:2
64084 Out of Range Block::1 bo:378 Volume offset:890 Expected:2 Got:4
64085 Shift 0, v:4 sv:4 min:2 cur:2
64086 Out of Range Block::1 bo:379 Volume offset:891 Expected:2 Got:4
64087 Shift 0, v:4 sv:4 min:2 cur:2
64088 Out of Range Block::1 bo:380 Volume offset:892 Expected:2 Got:4
64089 Shift 0, v:4 sv:4 min:2 cur:2
64090 Out of Range Block::1 bo:381 Volume offset:893 Expected:2 Got:4
64091 Shift 0, v:4 sv:4 min:2 cur:2
64092 Out of Range Block::1 bo:382 Volume offset:894 Expected:2 Got:4
64093 Shift 0, v:4 sv:4 min:2 cur:2
64094 Out of Range Block::1 bo:383 Volume offset:895 Expected:2 Got:4
64095 Shift 0, v:4 sv:4 min:2 cur:2
64096 Out of Range Block::1 bo:384 Volume offset:896 Expected:2 Got:4
64097 Shift 0, v:4 sv:4 min:2 cur:2
64098 Out of Range Block::1 bo:385 Volume offset:897 Expected:2 Got:4
64099 Shift 0, v:4 sv:4 min:2 cur:2
64100 Out of Range Block::1 bo:386 Volume offset:898 Expected:2 Got:4
64101 Shift 0, v:4 sv:4 min:2 cur:2
64102 Out of Range Block::1 bo:387 Volume offset:899 Expected:2 Got:4
64103 Shift 0, v:4 sv:4 min:2 cur:2
64104 Out of Range Block::1 bo:388 Volume offset:900 Expected:2 Got:4
64105 Shift 0, v:4 sv:4 min:2 cur:2
64106 Out of Range Block::1 bo:389 Volume offset:901 Expected:2 Got:4
64107 Shift 0, v:4 sv:4 min:2 cur:2
64108 Out of Range Block::1 bo:390 Volume offset:902 Expected:2 Got:4
64109 Shift 0, v:4 sv:4 min:2 cur:2
64110 Out of Range Block::1 bo:391 Volume offset:903 Expected:2 Got:4
64111 Shift 0, v:4 sv:4 min:2 cur:2
64112 test test::test_wl_update_commit_2 ... Out of Range Block::1 bo:392 Volume offset:904 Expected:2 Got:4
64113 Shift 0, v:4 sv:4 min:2 cur:2
64114 Out of Range Block::1 bo:393 Volume offset:905 Expected:2 Got:4
64115 Shift 0, v:4 sv:4 min:2 cur:2
64116 Out of Range Block::1 bo:394 Volume offset:906 Expected:2 Got:4
64117 Shift 0, v:4 sv:4 min:2 cur:2
64118 Out of Range Block::1 bo:395 Volume offset:907 Expected:2 Got:4
64119 Shift 0, v:4 sv:4 min:2 cur:2
64120 Out of Range Block::1 bo:396 Volume offset:908 Expected:2 Got:4
64121 Shift 0, v:4 sv:4 min:2 cur:2
64122 Out of Range Block::1 bo:397 Volume offset:909 Expected:2 Got:4
64123 Shift 0, v:4 sv:4 min:2 cur:2
64124 Out of Range Block::1 bo:398 Volume offset:910 Expected:2 Got:4
64125 Shift 0, v:4 sv:4 min:2 cur:2
64126 Out of Range Block::1 bo:399 Volume offset:911 Expected:2 Got:4
64127 Shift 0, v:4 sv:4 min:2 cur:2
64128 Out of Range Block::1 bo:400 Volume offset:912 Expected:2 Got:4
64129 Shift 0, v:4 sv:4 min:2 cur:2
64130 Out of Range Block::1 bo:401 Volume offset:913 Expected:2 Got:4
64131 Shift 0, v:4 sv:4 min:2 cur:2
64132 Out of Range Block::1 bo:402 Volume offset:914 Expected:2 Got:4
64133 Shift 0, v:4 sv:4 min:2 cur:2
64134 Out of Range Block::1 bo:403 Volume offset:915 Expected:2 Got:4
64135 Shift 0, v:4 sv:4 min:2 cur:2
64136 Out of Range Block::1 bo:404 Volume offset:916 Expected:2 Got:4
64137 Shift 0, v:4 sv:4 min:2 cur:2
64138 Out of Range Block::1 bo:405 Volume offset:917 Expected:2 Got:4
64139 Shift 0, v:4 sv:4 min:2 cur:2
64140 Out of Range Block::1 bo:406 Volume offset:918 Expected:2 Got:4
64141 Shift 0, v:4 sv:4 min:2 cur:2
64142 Out of Range Block::1 bo:407 Volume offset:919 Expected:2 Got:4
64143 Shift 0, v:4 sv:4 min:2 cur:2
64144 Out of Range Block::1 bo:408 Volume offset:920 Expected:2 Got:4
64145 Shift 0, v:4 sv:4 min:2 cur:2
64146 Out of Range Block::1 bo:409 Volume offset:921 Expected:2 Got:4
64147 okShift 0, v:4 sv:4 min:2 cur:2
64148 Out of Range Block::1 bo:410 Volume offset:922 Expected:2 Got:4
64149 Shift 0, v:4 sv:4 min:2 cur:2
64150 Out of Range Block::1 bo:411 Volume offset:923 Expected:2 Got:4
64151 Shift 0, v:4 sv:4 min:2 cur:2
64152 Out of Range Block::1 bo:412 Volume offset:924 Expected:2 Got:4
64153 Shift 0, v:4 sv:4 min:2 cur:2
64154 Out of Range Block::1 bo:413 Volume offset:925 Expected:2 Got:4
64155 Shift 0, v:4 sv:4 min:2 cur:2
64156 Out of Range Block::1 bo:414 Volume offset:926 Expected:2 Got:4
64157 Shift 0, v:4 sv:4 min:2 cur:2
64158 Out of Range Block::1 bo:415 Volume offset:927 Expected:2 Got:4
64159 Shift 0, v:4 sv:4 min:2 cur:2
64160 Out of Range Block::1 bo:416 Volume offset:928 Expected:2 Got:4
64161 Shift 0, v:4 sv:4 min:2 cur:2
64162 Out of Range Block::1 bo:417 Volume offset:929 Expected:2 Got:4
64163 Shift 0, v:4 sv:4 min:2 cur:2
64164 Out of Range Block::1 bo:418 Volume offset:930 Expected:2 Got:4
64165 Shift 0, v:4 sv:4 min:2 cur:2
64166 Out of Range Block::1 bo:419 Volume offset:931 Expected:2 Got:4
64167 Shift 0, v:4 sv:4 min:2 cur:2
64168 Out of Range Block::1 bo:420 Volume offset:932 Expected:2 Got:4
64169 Shift 0, v:4 sv:4 min:2 cur:2
64170 Out of Range Block::1 bo:421 Volume offset:933 Expected:2 Got:4
64171 Shift 0, v:4 sv:4 min:2 cur:2
64172 Out of Range Block::1 bo:422 Volume offset:934 Expected:2 Got:4
64173 Shift 0, v:4 sv:4 min:2 cur:2
64174 Out of Range Block::1 bo:423 Volume offset:935 Expected:2 Got:4
64175 Shift 0, v:4 sv:4 min:2 cur:2
64176 Out of Range Block::1 bo:424 Volume offset:936 Expected:2 Got:4
64177 Shift 0, v:4 sv:4 min:2 cur:2
64178 Out of Range Block::1 bo:425 Volume offset:937 Expected:2 Got:4
64179 
64180 Shift 0, v:4 sv:4 min:2 cur:2
64181 Out of Range Block::1 bo:426 Volume offset:938 Expected:2 Got:4
64182 Shift 0, v:4 sv:4 min:2 cur:2
64183 Out of Range Block::1 bo:427 Volume offset:939 Expected:2 Got:4
64184 Shift 0, v:4 sv:4 min:2 cur:2
64185 Out of Range Block::1 bo:428 Volume offset:940 Expected:2 Got:4
64186 Shift 0, v:4 sv:4 min:2 cur:2
64187 Out of Range Block::1 bo:429 Volume offset:941 Expected:2 Got:4
64188 Shift 0, v:4 sv:4 min:2 cur:2
64189 Out of Range Block::1 bo:430 Volume offset:942 Expected:2 Got:4
64190 Shift 0, v:4 sv:4 min:2 cur:2
64191 Out of Range Block::1 bo:431 Volume offset:943 Expected:2 Got:4
64192 Shift 0, v:4 sv:4 min:2 cur:2
64193 Out of Range Block::1 bo:432 Volume offset:944 Expected:2 Got:4
64194 Shift 0, v:4 sv:4 min:2 cur:2
64195 Out of Range Block::1 bo:433 Volume offset:945 Expected:2 Got:4
64196 Shift 0, v:4 sv:4 min:2 cur:2
64197 Out of Range Block::1 bo:434 Volume offset:946 Expected:2 Got:4
64198 Shift 0, v:4 sv:4 min:2 cur:2
64199 Out of Range Block::1 bo:435 Volume offset:947 Expected:2 Got:4
64200 Shift 0, v:4 sv:4 min:2 cur:2
64201 Out of Range Block::1 bo:436 Volume offset:948 Expected:2 Got:4
64202 Shift 0, v:4 sv:4 min:2 cur:2
64203 Out of Range Block::1 bo:437 Volume offset:949 Expected:2 Got:4
64204 Shift 0, v:4 sv:4 min:2 cur:2
64205 Out of Range Block::1 bo:438 Volume offset:950 Expected:2 Got:4
64206 Shift 0, v:4 sv:4 min:2 cur:2
64207 Out of Range Block::1 bo:439 Volume offset:951 Expected:2 Got:4
64208 Shift 0, v:4 sv:4 min:2 cur:2
64209 Out of Range Block::1 bo:440 Volume offset:952 Expected:2 Got:4
64210 Shift 0, v:4 sv:4 min:2 cur:2
64211 Out of Range Block::1 bo:441 Volume offset:953 Expected:2 Got:4
64212 Shift 0, v:4 sv:4 min:2 cur:2
64213 Out of Range Block::1 bo:442 Volume offset:954 Expected:2 Got:4
64214 test test::test_wl_update_commit_rollover ... Shift 0, v:4 sv:4 min:2 cur:2
64215 Out of Range Block::1 bo:443 Volume offset:955 Expected:2 Got:4
64216 Shift 0, v:4 sv:4 min:2 cur:2
64217 Out of Range Block::1 bo:444 Volume offset:956 Expected:2 Got:4
64218 Shift 0, v:4 sv:4 min:2 cur:2
64219 Out of Range Block::1 bo:445 Volume offset:957 Expected:2 Got:4
64220 Shift 0, v:4 sv:4 min:2 cur:2
64221 Out of Range Block::1 bo:446 Volume offset:958 Expected:2 Got:4
64222 Shift 0, v:4 sv:4 min:2 cur:2
64223 Out of Range Block::1 bo:447 Volume offset:959 Expected:2 Got:4
64224 Shift 0, v:4 sv:4 min:2 cur:2
64225 Out of Range Block::1 bo:448 Volume offset:960 Expected:2 Got:4
64226 Shift 0, v:4 sv:4 min:2 cur:2
64227 Out of Range Block::1 bo:449 Volume offset:961 Expected:2 Got:4
64228 Shift 0, v:4 sv:4 min:2 cur:2
64229 Out of Range Block::1 bo:450 Volume offset:962 Expected:2 Got:4
64230 Shift 0, v:4 sv:4 min:2 cur:2
64231 Out of Range Block::1 bo:451 Volume offset:963 Expected:2 Got:4
64232 Shift 0, v:4 sv:4 min:2 cur:2
64233 Out of Range Block::1 bo:452 Volume offset:964 Expected:2 Got:4
64234 Shift 0, v:4 sv:4 min:2 cur:2
64235 Out of Range Block::1 bo:453 Volume offset:965 Expected:2 Got:4
64236 Shift 0, v:4 sv:4 min:2 cur:2
64237 Out of Range Block::1 bo:454 Volume offset:966 Expected:2 Got:4
64238 okShift 0, v:4 sv:4 min:2 cur:2
64239 Out of Range Block::1 bo:455 Volume offset:967 Expected:2 Got:4
64240 Shift 0, v:4 sv:4 min:2 cur:2
64241 Out of Range Block::1 bo:456 Volume offset:968 Expected:2 Got:4
64242 Shift 0, v:4 sv:4 min:2 cur:2
64243 Out of Range Block::1 bo:457 Volume offset:969 Expected:2 Got:4
64244 Shift 0, v:4 sv:4 min:2 cur:2
64245 Out of Range Block::1 bo:458 Volume offset:970 Expected:2 Got:4
64246 Shift 0, v:4 sv:4 min:2 cur:2
64247 Out of Range Block::1 bo:459 Volume offset:971 Expected:2 Got:4
64248 Shift 0, v:4 sv:4 min:2 cur:2
64249 Out of Range Block::1 bo:460 Volume offset:972 Expected:2 Got:4
64250 Shift 0, v:4 sv:4 min:2 cur:2
64251 Out of Range Block::1 bo:461 Volume offset:973 Expected:2 Got:4
64252 Shift 0, v:4 sv:4 min:2 cur:2
64253 Out of Range Block::1 bo:462 Volume offset:974 Expected:2 Got:4
64254 Shift 0, v:4 sv:4 min:2 cur:2
64255 Out of Range Block::1 bo:463 Volume offset:975 Expected:2 Got:4
64256 Shift 0, v:4 sv:4 min:2 cur:2
64257 Out of Range Block::1 bo:464 Volume offset:976 Expected:2 Got:4
64258 Shift 0, v:4 sv:4 min:2 cur:2
64259 Out of Range Block::1 bo:465 Volume offset:977 Expected:2 Got:4
64260 Shift 0, v:4 sv:4 min:2 cur:2
64261 Out of Range Block::1 bo:466 Volume offset:978 Expected:2 Got:4
64262 Shift 0, v:4 sv:4 min:2 cur:2
64263 Out of Range Block::1 bo:467 Volume offset:979 Expected:2 Got:4
64264 Shift 0, v:4 sv:4 min:2 cur:2
64265 Out of Range Block::1 bo:468 Volume offset:980 Expected:2 Got:4
64266 Shift 0, v:4 sv:4 min:2 cur:2
64267 Out of Range Block::1 bo:469 Volume offset:981 Expected:2 Got:4
64268 Shift 0, v:4 sv:4 min:2 cur:2
64269 Out of Range Block::1 bo:470 Volume offset:982 Expected:2 Got:4
64270 Shift 0, v:4 sv:4 min:2 cur:2
64271 Out of Range Block::1 bo:471 Volume offset:983 Expected:2 Got:4
64272 Shift 0, v:4 sv:4 min:2 cur:2
64273 Out of Range Block::1 bo:472 Volume offset:984 Expected:2 Got:4
64274 Shift 0, v:4 sv:4 min:2 cur:2
64275 Out of Range Block::1 bo:473 Volume offset:985 Expected:2 Got:4
64276 Shift 0, v:4 sv:4 min:2 cur:2
64277 Out of Range Block::1 bo:474 Volume offset:986 Expected:2 Got:4
64278 Shift 0, v:4 sv:4 min:2 cur:2
64279 Out of Range Block::1 bo:475 Volume offset:987 Expected:2 Got:4
64280 Shift 0, v:4 sv:4 min:2 cur:2
64281 Out of Range Block::1 bo:476 Volume offset:988 Expected:2 Got:4
64282 Shift 0, v:4 sv:4 min:2 cur:2
64283 Out of Range Block::1 bo:477 Volume offset:989 Expected:2 Got:4
64284 Shift 0, v:4 sv:4 min:2 cur:2
64285 Out of Range Block::1 bo:478 Volume offset:990 Expected:2 Got:4
64286 Shift 0, v:4 sv:4 min:2 cur:2
64287 Out of Range Block::1 bo:479 Volume offset:991 Expected:2 Got:4
64288 Shift 0, v:4 sv:4 min:2 cur:2
64289 Out of Range Block::1 bo:480 Volume offset:992 Expected:2 Got:4
64290 Shift 0, v:4 sv:4 min:2 cur:2
64291 Out of Range Block::1 bo:481 Volume offset:993 Expected:2 Got:4
64292 Shift 0, v:4 sv:4 min:2 cur:2
64293 Out of Range Block::1 bo:482 Volume offset:994 Expected:2 Got:4
64294 Shift 0, v:4 sv:4 min:2 cur:2
64295 Out of Range Block::1 bo:483 Volume offset:995 Expected:2 Got:4
64296 Shift 0, v:4 sv:4 min:2 cur:2
64297 Out of Range Block::1 bo:484 Volume offset:996 Expected:2 Got:4
64298 Shift 0, v:4 sv:4 min:2 cur:2
64299 Out of Range Block::1 bo:485 Volume offset:997 Expected:2 Got:4
64300 Shift 0, v:4 sv:4 min:2 cur:2
64301 Out of Range Block::1 bo:486 Volume offset:998 Expected:2 Got:4
64302 Shift 0, v:4 sv:4 min:2 cur:2
64303 Out of Range Block::1 bo:487 Volume offset:999 Expected:2 Got:4
64304 Shift 0, v:4 sv:4 min:2 cur:2
64305 Out of Range Block::1 bo:488 Volume offset:1000 Expected:2 Got:4
64306 Shift 0, v:4 sv:4 min:2 cur:2
64307 Out of Range Block::1 bo:489 Volume offset:1001 Expected:2 Got:4
64308 Shift 0, v:4 sv:4 min:2 cur:2
64309 Out of Range Block::1 bo:490 Volume offset:1002 Expected:2 Got:4
64310 Shift 0, v:4 sv:4 min:2 cur:2
64311 Out of Range Block::1 bo:491 Volume offset:1003 Expected:2 Got:4
64312 Shift 0, v:4 sv:4 min:2 cur:2
64313 Out of Range Block::1 bo:492 Volume offset:1004 Expected:2 Got:4
64314 Shift 0, v:4 sv:4 min:2 cur:2
64315 Out of Range Block::1 bo:493 Volume offset:1005 Expected:2 Got:4
64316 Shift 0, v:4 sv:4 min:2 cur:2
64317 Out of Range Block::1 bo:494 Volume offset:1006 Expected:2 Got:4
64318 Shift 0, v:4 sv:4 min:2 cur:2
64319 Out of Range Block::1 bo:495 Volume offset:1007 Expected:2 Got:4
64320 Shift 0, v:4 sv:4 min:2 cur:2
64321 Out of Range Block::1 bo:496 Volume offset:1008 Expected:2 Got:4
64322 Shift 0, v:4 sv:4 min:2 cur:2
64323 Out of Range Block::1 bo:497 Volume offset:1009 Expected:2 Got:4
64324 Shift 0, v:4 sv:4 min:2 cur:2
64325 Out of Range Block::1 bo:498 Volume offset:1010 Expected:2 Got:4
64326 Shift 0, v:4 sv:4 min:2 cur:2
64327 Out of Range Block::1 bo:499 Volume offset:1011 Expected:2 Got:4
64328 Shift 0, v:4 sv:4 min:2 cur:2
64329 Out of Range Block::1 bo:500 Volume offset:1012 Expected:2 Got:4
64330 Shift 0, v:4 sv:4 min:2 cur:2
64331 Out of Range Block::1 bo:501 Volume offset:1013 Expected:2 Got:4
64332 Shift 0, v:4 sv:4 min:2 cur:2
64333 Out of Range Block::1 bo:502 Volume offset:1014 Expected:2 Got:4
64334 Shift 0, v:4 sv:4 min:2 cur:2
64335 Out of Range Block::1 bo:503 Volume offset:1015 Expected:2 Got:4
64336 Shift 0, v:4 sv:4 min:2 cur:2
64337 Out of Range Block::1 bo:504 Volume offset:1016 Expected:2 Got:4
64338 Shift 0, v:4 sv:4 min:2 cur:2
64339 Out of Range Block::1 bo:505 Volume offset:1017 Expected:2 Got:4
64340 Shift 0, v:4 sv:4 min:2 cur:2
64341 Out of Range Block::1 bo:506 Volume offset:1018 Expected:2 Got:4
64342 Shift 0, v:4 sv:4 min:2 cur:2
64343 Out of Range Block::1 bo:507 Volume offset:1019 Expected:2 Got:4
64344 Shift 0, v:4 sv:4 min:2 cur:2
64345 Out of Range Block::1 bo:508 Volume offset:1020 Expected:2 Got:4
64346 Shift 0, v:4 sv:4 min:2 cur:2
64347 Out of Range Block::1 bo:509 Volume offset:1021 Expected:2 Got:4
64348 Shift 0, v:4 sv:4 min:2 cur:2
64349 Out of Range Block::1 bo:510 Volume offset:1022 Expected:2 Got:4
64350 Shift 0, v:4 sv:4 min:2 cur:2
64351 Out of Range Block::1 bo:511 Volume offset:1023 Expected:2 Got:4
64352 
64353 test test::test_wl_update_rollover ... ok
64354 test test::test_wl_commit_range_vv ... ok
64355 
64356 test result: ok. 64 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.02s
64357 
64358 Running `/work/oxidecomputer/crucible/target/debug/deps/dsc-2b80980cbc3bac2c --nocapture`
64359 
64360 running 16 tests
64361 Creating region directory at: /tmp/.tmpifwJvi
64362 Creating region directory at: /tmp/.tmpTonNol
64363 Creating dsc directory at: /tmp/.tmpPmdxFa
64364 Creating region directory at: /tmp/.tmpt3lW05
64365 Creating region directory at: /tmp/.tmpLEyjpg
64366 Creating region directory at: /tmp/.tmpCB1HFc
64367 Creating region directory at: /tmp/.tmpTjxItj
64368 Creating dsc directory at: /tmp/.tmpkFVBYI
64369 test test::bad_bin ... ok
64370 test test::new_ti ... ok
64371 test test::existing_ti ... ok
64372 test test::new_ti_four ... ok
64373 Creating region directory at: /tmp/.tmpIa4Dfo
64374 Creating region directory at: /tmp/.tmpdCVzJv
64375 Creating region directory at: /tmp/.tmpCNpQlM
64376 Creating dsc directory at: /tmp/.tmpAjiEye
64377 test test::port_to_region_generation ... ok
64378 Creating region directory at: /tmp/.tmpY03TrF
64379 Creating region directory at: /tmp/.tmp9pJMZe
64380 res is Err(No such file or directory (os error 2))
64381 test test::new_ti_three ... ok
64382 Creating region directory at: /tmp/.tmpxuUGZA
64383 Creating region directory at: /tmp/.tmpBK56XY
64384 Creating region directory at: /tmp/.tmplI6p8J
64385 Creating region directory at: /tmp/.tmp5cnXvi
64386 Creating dsc directory at: /tmp/.tmp3XkRYC
64387 test test::delete_bad_region ... ok
64388 test test::delete_region ... ok
64389 test test::new_ti_two_dirs ... ok
64390 test test::restart_four_region ... ok
64391 test test::delete_bad_second_region ... ok
64392 Creating region directory at: /tmp/.tmp5ayA3X
64393 test test::new_ti_two_region_count ... ok
64394 test test::restart_region_bad ... ok
64395 Creating region directory at: /tmp/.tmpk1abZE
64396 Creating region directory at: /tmp/.tmpJDyEGB
64397 test test::restart_three_region ... ok
64398 test test::restart_region_four_bad ... ok
64399 test control::test::test_dsc_openapi ... ok
64400 
64401 test result: ok. 16 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.01s
64402 
64403 Running `/work/oxidecomputer/crucible/target/debug/deps/dsc_client-f8a5b497695371e1 --nocapture`
64404 
64405 running 0 tests
64406 
64407 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64408 
64409 Running `/work/oxidecomputer/crucible/target/debug/deps/measure_iops-cbdca99bf515defe --nocapture`
64410 
64411 running 0 tests
64412 
64413 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64414 
64415 Running `/work/oxidecomputer/crucible/target/debug/deps/repair_client-5353c8de97b4615f --nocapture`
64416 
64417 running 0 tests
64418 
64419 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64420 
64421 Doc-tests crucible
64422 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible --test /work/oxidecomputer/crucible/upstairs/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern aes_gcm_siv=/work/oxidecomputer/crucible/target/debug/deps/libaes_gcm_siv-21495b616a07c9a4.rlib --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern async_recursion=/work/oxidecomputer/crucible/target/debug/deps/libasync_recursion-ce9499495a1cb858.so --extern async_trait=/work/oxidecomputer/crucible/target/debug/deps/libasync_trait-a300e84178ee0ad1.so --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern itertools=/work/oxidecomputer/crucible/target/debug/deps/libitertools-b06e69badd72e55c.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern proptest=/work/oxidecomputer/crucible/target/debug/deps/libproptest-327f7f2cf6858f27.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern test_strategy=/work/oxidecomputer/crucible/target/debug/deps/libtest_strategy-5eb6b90d55d9f739.so --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_test=/work/oxidecomputer/crucible/target/debug/deps/libtokio_test-12a28be646ff63e6.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64423 
64424 running 0 tests
64425 
64426 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64427 
64428 Doc-tests crucible-agent-client
64429 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_agent_client --test /work/oxidecomputer/crucible/agent-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_agent_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_agent_client-86e1c18945d61be3.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
64430 
64431 running 0 tests
64432 
64433 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64434 
64435 Doc-tests crucible-client-types
64436 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_client_types --test /work/oxidecomputer/crucible/crucible-client-types/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64437 
64438 running 0 tests
64439 
64440 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64441 
64442 Doc-tests crucible-common
64443 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_common --test /work/oxidecomputer/crucible/common/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern atty=/work/oxidecomputer/crucible/target/debug/deps/libatty-bfb6a2cdc762f7c4.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern rustls_pemfile=/work/oxidecomputer/crucible/target/debug/deps/librustls_pemfile-e52b2a6debfcae48.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_bunyan=/work/oxidecomputer/crucible/target/debug/deps/libslog_bunyan-dce051a6775f1d99.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern twox_hash=/work/oxidecomputer/crucible/target/debug/deps/libtwox_hash-9f5dd4f7319ca539.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64444 
64445 running 0 tests
64446 
64447 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64448 
64449 Doc-tests crucible-control-client
64450 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_control_client --test /work/oxidecomputer/crucible/control-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern crucible_control_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_control_client-3d0142c7d3790e17.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
64451 
64452 running 0 tests
64453 
64454 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64455 
64456 Doc-tests crucible-downstairs
64457 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_downstairs --test /work/oxidecomputer/crucible/downstairs/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern hyper_staticfile=/work/oxidecomputer/crucible/target/debug/deps/libhyper_staticfile-559b4389ef952563.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern mime_guess=/work/oxidecomputer/crucible/target/debug/deps/libmime_guess-66974d6c31968dc2.rlib --extern nix=/work/oxidecomputer/crucible/target/debug/deps/libnix-3635aff9412bf811.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry-5524fd7817ad57fb.rlib --extern opentelemetry_jaeger=/work/oxidecomputer/crucible/target/debug/deps/libopentelemetry_jaeger-9ebf47742e5e063f.rlib --extern oximeter=/work/oxidecomputer/crucible/target/debug/deps/liboximeter-90ae047d6b643e4e.rlib --extern oximeter_producer=/work/oxidecomputer/crucible/target/debug/deps/liboximeter_producer-5dc4f732e258486e.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern rand_chacha=/work/oxidecomputer/crucible/target/debug/deps/librand_chacha-add466c063ef8725.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern ringbuffer=/work/oxidecomputer/crucible/target/debug/deps/libringbuffer-e91d75e4694e6351.rlib --extern rusqlite=/work/oxidecomputer/crucible/target/debug/deps/librusqlite-e4d2316a88b06837.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_rustls=/work/oxidecomputer/crucible/target/debug/deps/libtokio_rustls-eafe4ab74a176b7d.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern toml=/work/oxidecomputer/crucible/target/debug/deps/libtoml-de0eb3fcc3b95b5c.rlib --extern tracing=/work/oxidecomputer/crucible/target/debug/deps/libtracing-1de351a7f2b0ab55.rlib --extern tracing_opentelemetry=/work/oxidecomputer/crucible/target/debug/deps/libtracing_opentelemetry-8f8cfc1900c3a663.rlib --extern tracing_subscriber=/work/oxidecomputer/crucible/target/debug/deps/libtracing_subscriber-e39dae5ba339bc78.rlib --extern usdt=/work/oxidecomputer/crucible/target/debug/deps/libusdt-86bb76e3b8fcea87.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --cfg 'feature="default"' --error-format human`
64458 
64459 running 0 tests
64460 
64461 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64462 
64463 Doc-tests crucible-integration-tests
64464 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_integration_tests --test /work/oxidecomputer/crucible/integration_tests/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_client_types=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_client_types-bd54c4335d2370bd.rlib --extern crucible_downstairs=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_downstairs-6276be71be5284a4.rlib --extern crucible_integration_tests=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_integration_tests-521d4724b4b30c4a.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern futures_core=/work/oxidecomputer/crucible/target/debug/deps/libfutures_core-46c6e3a1b3966417.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern httptest=/work/oxidecomputer/crucible/target/debug/deps/libhttptest-174da737d96e2af6.rlib --extern rand=/work/oxidecomputer/crucible/target/debug/deps/librand-1f91a9ea4aed49ee.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern slog_async=/work/oxidecomputer/crucible/target/debug/deps/libslog_async-03e743f036119074.rlib --extern slog_dtrace=/work/oxidecomputer/crucible/target/debug/deps/libslog_dtrace-8024beacfb95325b.rlib --extern slog_term=/work/oxidecomputer/crucible/target/debug/deps/libslog_term-31c687431f6dd53c.rlib --extern tempfile=/work/oxidecomputer/crucible/target/debug/deps/libtempfile-05cb6a5f7e86cb66.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64465 
64466 running 0 tests
64467 
64468 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64469 
64470 Doc-tests crucible-pantry
64471 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_pantry --test /work/oxidecomputer/crucible/pantry/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern base64=/work/oxidecomputer/crucible/target/debug/deps/libbase64-c139bdd129e780ac.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible=/work/oxidecomputer/crucible/target/debug/deps/libcrucible-41ca439abdc23695.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_pantry=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry-fe633af5059fe3a7.rlib --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern dropshot=/work/oxidecomputer/crucible/target/debug/deps/libdropshot-a49a4505c9c6b86f.rlib --extern expectorate=/work/oxidecomputer/crucible/target/debug/deps/libexpectorate-4e45b262baa473cc.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern hex=/work/oxidecomputer/crucible/target/debug/deps/libhex-6531b11cb72de3e5.rlib --extern http=/work/oxidecomputer/crucible/target/debug/deps/libhttp-4526ab951518c907.rlib --extern hyper=/work/oxidecomputer/crucible/target/debug/deps/libhyper-dad943d3b7cc33e9.rlib --extern omicron_common=/work/oxidecomputer/crucible/target/debug/deps/libomicron_common-52df1fff8b36d94c.rlib --extern openapi_lint=/work/oxidecomputer/crucible/target/debug/deps/libopenapi_lint-848bb99097a9a843.rlib --extern openapiv3=/work/oxidecomputer/crucible/target/debug/deps/libopenapiv3-f84072cef6d0d68c.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern sha2=/work/oxidecomputer/crucible/target/debug/deps/libsha2-9b09b4b286e2cb62.rlib --extern slog=/work/oxidecomputer/crucible/target/debug/deps/libslog-84fd25666c3c26ee.rlib --extern subprocess=/work/oxidecomputer/crucible/target/debug/deps/libsubprocess-0acfc5c9b903588a.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64472 
64473 running 0 tests
64474 
64475 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64476 
64477 Doc-tests crucible-pantry-client
64478 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_pantry_client --test /work/oxidecomputer/crucible/pantry-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern crucible_pantry_client=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_pantry_client-ccb9ddeebb23cea2.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64479 
64480 running 0 tests
64481 
64482 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64483 
64484 Doc-tests crucible-protocol
64485 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_protocol --test /work/oxidecomputer/crucible/protocol/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern bincode=/work/oxidecomputer/crucible/target/debug/deps/libbincode-bcb925e8faac86cd.rlib --extern bytes=/work/oxidecomputer/crucible/target/debug/deps/libbytes-64a8a55ef81e55dd.rlib --extern clap=/work/oxidecomputer/crucible/target/debug/deps/libclap-49bc17aade028e79.rlib --extern crucible_common=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_common-c45901e9152d33f4.rlib --extern crucible_protocol=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_protocol-0682e169c907a102.rlib --extern futures=/work/oxidecomputer/crucible/target/debug/deps/libfutures-df761c89bfa71e54.rlib --extern num_enum=/work/oxidecomputer/crucible/target/debug/deps/libnum_enum-9cd7a6d9dcf1dd5a.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern statistical=/work/oxidecomputer/crucible/target/debug/deps/libstatistical-f2809857d7fe1c91.rlib --extern tokio=/work/oxidecomputer/crucible/target/debug/deps/libtokio-cf190744403b2ee1.rlib --extern tokio_util=/work/oxidecomputer/crucible/target/debug/deps/libtokio_util-279b3765a2b5aad1.rlib --extern uuid=/work/oxidecomputer/crucible/target/debug/deps/libuuid-7cc8e87b4149b49e.rlib -C embed-bitcode=no --error-format human`
64486 
64487 running 0 tests
64488 
64489 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64490 
64491 Doc-tests crucible-smf
64492 Running `rustdoc --edition=2021 --crate-type lib --crate-name crucible_smf --test /work/oxidecomputer/crucible/smf/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern crucible_smf=/work/oxidecomputer/crucible/target/debug/deps/libcrucible_smf-e1a82c6f17385dc6.rlib --extern libc=/work/oxidecomputer/crucible/target/debug/deps/liblibc-a748caf4ceff51bd.rlib --extern num_derive=/work/oxidecomputer/crucible/target/debug/deps/libnum_derive-814c8a0a0a713cba.so --extern num_traits=/work/oxidecomputer/crucible/target/debug/deps/libnum_traits-8e50de91aba3f8f9.rlib --extern thiserror=/work/oxidecomputer/crucible/target/debug/deps/libthiserror-07eca56f531a0e5d.rlib -C embed-bitcode=no --error-format human`
64493 
64494 running 0 tests
64495 
64496 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64497 
64498 Doc-tests dsc-client
64499 Running `rustdoc --edition=2021 --crate-type lib --crate-name dsc_client --test /work/oxidecomputer/crucible/dsc-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern dsc_client=/work/oxidecomputer/crucible/target/debug/deps/libdsc_client-15b0c81fa833cf0f.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
64500 
64501 running 0 tests
64502 
64503 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64504 
64505 Doc-tests repair-client
64506 Running `rustdoc --edition=2021 --crate-type lib --crate-name repair_client --test /work/oxidecomputer/crucible/repair-client/src/lib.rs -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L dependency=/work/oxidecomputer/crucible/target/debug/deps -L native=/usr/lib/amd64 -L native=/usr/ssl-3/lib/amd64 -L native=/work/oxidecomputer/crucible/target/debug/build/libgit2-sys-6120c1544d9c7fd8/out/build -L native=/work/oxidecomputer/crucible/target/debug/build/ring-2b909d7f60e3ea12/out --test-args --nocapture --extern anyhow=/work/oxidecomputer/crucible/target/debug/deps/libanyhow-c23d7ea2a714731e.rlib --extern chrono=/work/oxidecomputer/crucible/target/debug/deps/libchrono-0402a5e17dd99cdf.rlib --extern percent_encoding=/work/oxidecomputer/crucible/target/debug/deps/libpercent_encoding-0000aebce3d30803.rlib --extern progenitor=/work/oxidecomputer/crucible/target/debug/deps/libprogenitor-0319e0dfd841f493.rlib --extern repair_client=/work/oxidecomputer/crucible/target/debug/deps/librepair_client-1452d56087b6ccb7.rlib --extern reqwest=/work/oxidecomputer/crucible/target/debug/deps/libreqwest-6407fc4e9374ca8e.rlib --extern schemars=/work/oxidecomputer/crucible/target/debug/deps/libschemars-83d20014cee5d9b5.rlib --extern serde=/work/oxidecomputer/crucible/target/debug/deps/libserde-2779165e31567af2.rlib --extern serde_json=/work/oxidecomputer/crucible/target/debug/deps/libserde_json-96027e7dd982d07a.rlib -C embed-bitcode=no --error-format human`
64507 
64508 running 0 tests
64509 
64510 test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
64511 
64512 
64513 real 16:14.717969945
64514 user 51:29.493703807
64515 sys 5:04.906420843
64516 trap 1.589705494
64517 tflt 0.679405711
64518 dflt 1.821424997
64519 kflt 0.000389406
64520 lock 1:51:39.210672691
64521 slp 1:56:32.384143002
64522 lat 1:12.476434840
64523 stop 39.258641176