[Gluster-Maintainers] Build failed in Jenkins: regression-test-with-multiplex #1502

jenkins at build.gluster.org jenkins at build.gluster.org
Sun Sep 22 18:05:03 UTC 2019


See <https://build.gluster.org/job/regression-test-with-multiplex/1502/display/redirect>

Changes:


------------------------------------------
[...truncated 3.64 MB...]
        peerinfo = 0x0
        priv = 0x1019740
        ret = -1
        this = 0xfcdd90
        __FUNCTION__ = "glusterd_hostname_to_uuid"
#6  0x00007fe59d65a724 in glusterd_volume_brickinfo_get (uuid=0x0, hostname=0x7fe58c009fcc "builder210.int.aws.gluster.org", path=0x7fe58c00a0cb "/d/backends/patchy0", volinfo=0x7fe59001de70, brickinfo=0x7fe59a4aec10) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-utils.c>:1501
        brickiter = 0x0
        peer_uuid = '\000' <repeats 15 times>
        ret = -1
        this = 0xfcdd90
        __FUNCTION__ = "glusterd_volume_brickinfo_get"
#7  0x00007fe59d65ab4a in glusterd_volume_brickinfo_get_by_brick (brick=0x7fe58c0016b5 "builder210.int.aws.gluster.org:/d/backends/patchy0", volinfo=0x7fe59001de70, brickinfo=0x7fe59a4aec10, construct_real_path=false) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-utils.c>:1576
        ret = 0
        tmp_brickinfo = 0x7fe58c009f20
        __PRETTY_FUNCTION__ = "glusterd_volume_brickinfo_get_by_brick"
        __FUNCTION__ = "glusterd_volume_brickinfo_get_by_brick"
#8  0x00007fe59d638bc5 in get_brickinfo_from_brickid (brickid=0x7fe594003f80 "1f6b0ad8-bc94-42c4-96f7-c10cccaf1f94:builder210.int.aws.gluster.org:/d/backends/patchy0", brickinfo=0x7fe59a4aec10) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-handler.c>:5934
        volinfo = 0x7fe59001de70
        volid_str = 0x7fe58c001690 "1f6b0ad8-bc94-42c4-96f7-c10cccaf1f94"
        brick = 0x7fe58c0016b5 "builder210.int.aws.gluster.org:/d/backends/patchy0"
        brickid_dup = 0x7fe58c001690 "1f6b0ad8-bc94-42c4-96f7-c10cccaf1f94"
        volid = "\037k\nؼ\224BĖ\367\301\f̯\037\224"
        ret = 0
#9  0x00007fe59d638cac in __glusterd_brick_rpc_notify (rpc=0x7fe594004040, mydata=0x7fe594003f80, event=RPC_CLNT_DISCONNECT, data=0x0) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-handler.c>:5968
        brickid = 0x7fe594003f80 "1f6b0ad8-bc94-42c4-96f7-c10cccaf1f94:builder210.int.aws.gluster.org:/d/backends/patchy0"
        ret = 0
        conf = 0x0
        brickinfo = 0x0
        volinfo = 0x0
        this = 0x0
        pid = -1
        brickinfo_tmp = 0x0
        brick_proc = 0x0
        pidfile = '\000' <repeats 4095 times>
        brickpath = 0x0
        is_service_running = true
        __PRETTY_FUNCTION__ = "__glusterd_brick_rpc_notify"
        __FUNCTION__ = "__glusterd_brick_rpc_notify"
#10 0x00007fe59d626142 in glusterd_big_locked_notify (rpc=0x7fe594004040, mydata=0x7fe594003f80, event=RPC_CLNT_DISCONNECT, data=0x0, notify_fn=0x7fe59d638be8 <__glusterd_brick_rpc_notify>) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-handler.c>:66
        priv = 0x1019740
        ret = -1
#11 0x00007fe59d639641 in glusterd_brick_rpc_notify (rpc=0x7fe594004040, mydata=0x7fe594003f80, event=RPC_CLNT_DISCONNECT, data=0x0) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-handler.c>:6123
No locals.
#12 0x00007fe5a92eb71b in rpc_clnt_handle_disconnect (clnt=0x7fe594004040, conn=0x7fe594004070) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/rpc/rpc-lib/src/rpc-clnt.c>:826
        ts = {tv_sec = 0, tv_nsec = 0}
        unref_clnt = false
        pre_notify_gen = 0
        post_notify_gen = 0
        __FUNCTION__ = "rpc_clnt_handle_disconnect"
#13 0x00007fe5a92eb9da in rpc_clnt_notify (trans=0x7fe594004350, mydata=0x7fe594004070, event=RPC_TRANSPORT_DISCONNECT, data=0x7fe594004350) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/rpc/rpc-lib/src/rpc-clnt.c>:887
        conn = 0x7fe594004070
        clnt = 0x7fe594004040
        ret = -1
        req_info = 0x0
        pollin = 0x0
        clnt_mydata = 0x0
        old_THIS = 0xfcdd90
        __FUNCTION__ = "rpc_clnt_notify"
#14 0x00007fe5a92e7b0e in rpc_transport_notify (this=0x7fe594004350, event=RPC_TRANSPORT_DISCONNECT, data=0x7fe594004350) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/rpc/rpc-lib/src/rpc-transport.c>:545
        ret = -1
        __FUNCTION__ = "rpc_transport_notify"
#15 0x00007fe59c644fd9 in socket_event_poll_err (this=0x7fe594004350, gen=1, idx=4) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/rpc/rpc-transport/socket/src/socket.c>:1408
        priv = 0x7fe5940048b0
        socket_closed = true
        __FUNCTION__ = "socket_event_poll_err"
#16 0x00007fe59c64b162 in socket_event_handler (fd=12, idx=4, gen=1, data=0x7fe594004350, poll_in=1, poll_out=0, poll_err=16, event_thread_died=0 '\000') at <https://build.gluster.org/job/regression-test-with-multiplex/ws/rpc/rpc-transport/socket/src/socket.c>:3048
        sa = 0x7fe594004410
        this = 0x7fe594004350
        priv = 0x7fe5940048b0
        ret = -1
        ctx = 0xf84010
        socket_closed = false
        notify_handled = false
        __FUNCTION__ = "socket_event_handler"
#17 0x00007fe5a95c8645 in event_dispatch_epoll_handler (event_pool=0xfbae50, event=0x7fe59a4af140) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/event-epoll.c>:642
        ev_data = 0x7fe59a4af144
        slot = 0xffb790
        handler = 0x7fe59c64ab56 <socket_event_handler>
        data = 0x7fe594004350
        idx = 4
        gen = 1
        ret = 0
        fd = 12
        handled_error_previously = false
        __FUNCTION__ = "event_dispatch_epoll_handler"
#18 0x00007fe5a95c8b5e in event_dispatch_epoll_worker (data=0x1037460) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/event-epoll.c>:755
        event = {events = 17, data = {ptr = 0x100000004, fd = 4, u32 = 4, u64 = 4294967300}}
        ret = 1
        ev_data = 0x1037460
        event_pool = 0xfbae50
        myindex = 1
        timetodie = 0
        gen = 0
        poller_death_notify = {next = 0x0, prev = 0x0}
        slot = 0x0
        tmp = 0x0
        __FUNCTION__ = "event_dispatch_epoll_worker"
#19 0x00007fe5a8377e65 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#20 0x00007fe5a7c3d88d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 6 (Thread 0x7fe59acb1700 (LWP 18091)):
#0  0x00007fe5a837b9f5 in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007fe59d6ff078 in hooks_worker (args=0xfcdd90) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/xlators/mgmt/glusterd/src/glusterd-hooks.c>:527
        conf = 0x1019740
        hooks_priv = 0x1035930
        stub = 0x7fe5941b7690
#2  0x00007fe5a8377e65 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#3  0x00007fe5a7c3d88d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 5 (Thread 0x7fe5a0b14700 (LWP 18060)):
#0  0x00007fe5a837bda2 in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007fe5a955e292 in gf_timer_proc (data=0xfc29c0) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/timer.c>:140
        now = {tv_sec = 2861773, tv_nsec = 552818540}
        reg = 0xfc29c0
        event = 0x7fe588000a50
        tmp = 0x0
        old_THIS = 0x7fe5a9862a60 <global_xlator>
#2  0x00007fe5a8377e65 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#3  0x00007fe5a7c3d88d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 4 (Thread 0x7fe5a9a744c0 (LWP 18059)):
#0  0x00007fe5a8378fd7 in pthread_join () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007fe5a95c8df0 in event_dispatch_epoll (event_pool=0xfbae50) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/event-epoll.c>:840
        i = 1
        t_id = 140624112846592
        pollercount = 1
        ret = 0
        ev_data = 0x1037460
        __FUNCTION__ = "event_dispatch_epoll"
#2  0x00007fe5a9584a03 in gf_event_dispatch (event_pool=0xfbae50) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/event.c>:115
        ret = -1
        __FUNCTION__ = "gf_event_dispatch"
#3  0x000000000040c26c in ?? ()
No symbol table info available.
#4  0x0000000000000000 in ?? ()
No symbol table info available.

Thread 3 (Thread 0x7fe59f311700 (LWP 18063)):
#0  0x00007fe5a837bda2 in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007fe5a959eec4 in syncenv_task (proc=0xfc3260) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/syncop.c>:517
        env = 0xfc3260
        task = 0x0
        sleep_till = {tv_sec = 1569173792, tv_nsec = 0}
        ret = 0
#2  0x00007fe5a959f0b9 in syncenv_processor (thdata=0xfc3260) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/syncop.c>:584
        env = 0xfc3260
        proc = 0xfc3260
        task = 0x7fe58c006b00
#3  0x00007fe5a8377e65 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#4  0x00007fe5a7c3d88d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 2 (Thread 0x7fe59fb12700 (LWP 18062)):
#0  0x00007fe5a7c0480d in nanosleep () from /lib64/libc.so.6
No symbol table info available.
#1  0x00007fe5a7c046a4 in sleep () from /lib64/libc.so.6
No symbol table info available.
#2  0x00007fe5a9585ff6 in pool_sweeper (arg=0x0) at <https://build.gluster.org/job/regression-test-with-multiplex/ws/libglusterfs/src/mem-pool.c>:446
        state = {death_row = {next = 0x7fe59fb0f150, prev = 0x7fe59fb0f150}, cold_lists = {0x0 <repeats 1024 times>}, n_cold_lists = 0}
        pool_list = 0x7fe59fb0f150
        next_pl = 0x7fe59fb0f150
        pt_pool = 0x0
        i = 0
        poisoned = false
#3  0x00007fe5a8377e65 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#4  0x00007fe5a7c3d88d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 1 (Thread 0x7fe5a0313700 (LWP 18061)):
#0  0x00007fe5a7b75337 in raise () from /lib64/libc.so.6
No symbol table info available.
#1  0x00007fe5a7b76a28 in abort () from /lib64/libc.so.6
No symbol table info available.
#2  0x00007fe5a7bb7e87 in __libc_message () from /lib64/libc.so.6
No symbol table info available.
#3  0x00007fe5a7bb7f6e in __libc_fatal () from /lib64/libc.so.6
No symbol table info available.
#4  0x00007fe5a7bb82e3 in _IO_vtable_check () from /lib64/libc.so.6
No symbol table info available.
#5  0x00007fe5a7bbcdfb in _IO_cleanup () from /lib64/libc.so.6
No symbol table info available.
#6  0x00007fe5a7b78c4b in __run_exit_handlers () from /lib64/libc.so.6
No symbol table info available.
#7  0x00007fe5a7b78ce7 in exit () from /lib64/libc.so.6
No symbol table info available.
#8  0x00000000004096da in ?? ()
No symbol table info available.
#9  0x0000000000000001 in ?? ()
No symbol table info available.
#10 0x00000000000186c0 in ?? ()
No symbol table info available.
#11 0x0000000000416b48 in ?? ()
No symbol table info available.
#12 0x000000000000000f in ?? ()
No symbol table info available.
#13 0x0000000000000000 in ?? ()
No symbol table info available.
=========================================================
              Finish backtrace
         program name : /build/install/sbin/glusterd
         corefile     : /glfs_sigwait-18059.core
=========================================================

+ rm -f /build/install/cores/gdbout.txt
+ sort /build/install/cores/liblist.txt
+ uniq
+ cat /build/install/cores/liblist.txt.tmp
+ grep -v /build/install
+ tar -cf /archives/archived_builds/build-install-regression-test-with-multiplex-1502.tar /build/install/sbin /build/install/bin /build/install/lib /build/install/libexec /build/install/cores
tar: Removing leading `/' from member names
+ tar -rhf /archives/archived_builds/build-install-regression-test-with-multiplex-1502.tar -T /build/install/cores/liblist.txt
tar: Removing leading `/' from member names
+ bzip2 /archives/archived_builds/build-install-regression-test-with-multiplex-1502.tar
+ rm -f /build/install/cores/liblist.txt
+ rm -f /build/install/cores/liblist.txt.tmp
+ find /archives -size +1G -delete -type f
+ [[ builder210.int.aws.gluster.org == *\a\w\s* ]]
+ scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i **** /archives/archived_builds/build-install-regression-test-with-multiplex-1502.tar.bz2 _logs-collector at logs.aws.gluster.org:/var/www/glusterfs-logs/regression-test-with-multiplex-1502.bz2
Warning: Permanently added 'logs.aws.gluster.org,18.219.45.211' (ECDSA) to the list of known hosts.
+ echo 'Cores and builds archived in https://logs.aws.gluster.org/regression-test-with-multiplex-1502.bz2'
Cores and builds archived in https://logs.aws.gluster.org/regression-test-with-multiplex-1502.bz2
+ echo 'Open core using the following command to get a proper stack'
Open core using the following command to get a proper stack
+ echo 'Example: From root of extracted tarball'
Example: From root of extracted tarball
+ echo '\t\tgdb -ex '\''set sysroot ./'\'' -ex '\''core-file ./build/install/cores/xxx.core'\'' <target, say ./build/install/sbin/glusterd>'
\t\tgdb -ex 'set sysroot ./' -ex 'core-file ./build/install/cores/xxx.core' <target, say ./build/install/sbin/glusterd>
+ RET=1
+ '[' 1 -ne 0 ']'
+ tar -czf <https://build.gluster.org/job/regression-test-with-multiplex/1502/artifact/glusterfs-logs.tgz> /var/log/glusterfs /var/log/messages /var/log/messages-20190901 /var/log/messages-20190908 /var/log/messages-20190915 /var/log/messages-20190922
tar: Removing leading `/' from member names
+ case $(uname -s) in
++ uname -s
+ /sbin/sysctl -w kernel.core_pattern=/%e-%p.core
kernel.core_pattern = /%e-%p.core
+ exit 1
Build step 'Execute shell' marked build as failure


More information about the maintainers mailing list