[Gluster-Maintainers] Build failed in Jenkins: experimental-periodic #219

jenkins at build.gluster.org jenkins at build.gluster.org
Thu Feb 1 17:35:26 UTC 2018


See <https://build.gluster.org/job/experimental-periodic/219/display/redirect>

------------------------------------------
[...truncated 981.73 KB...]
#3  0x00007f42cd217e25 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#4  0x00007f42ccae434d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 5 (Thread 0x7f42c3d6e700 (LWP 9003)):
#0  0x00007f42cd21bcf2 in pthread_cond_timedwait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007f42ce41261d in syncenv_task (proc=0x12e1f70) at <https://build.gluster.org/job/experimental-periodic/ws/libglusterfs/src/syncop.c>:603
        env = 0x12e1f70
        task = 0x0
        sleep_till = {tv_sec = 1517504134, tv_nsec = 0}
        ret = 0
#2  0x00007f42ce4128b8 in syncenv_processor (thdata=0x12e1f70) at <https://build.gluster.org/job/experimental-periodic/ws/libglusterfs/src/syncop.c>:695
        env = 0x12e1f70
        proc = 0x12e1f70
        task = 0x0
#3  0x00007f42cd217e25 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#4  0x00007f42ccae434d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 4 (Thread 0x7f42c456f700 (LWP 9002)):
#0  0x00007f42ccaab1ad in nanosleep () from /lib64/libc.so.6
No symbol table info available.
#1  0x00007f42ccaab044 in sleep () from /lib64/libc.so.6
No symbol table info available.
#2  0x00007f42ce3fb461 in pool_sweeper (arg=0x0) at <https://build.gluster.org/job/experimental-periodic/ws/libglusterfs/src/mem-pool.c>:470
        state = {death_row = {next = 0x0, prev = 0x0}, cold_lists = {0x0 <repeats 1024 times>}, n_cold_lists = 0}
        pool_list = 0x0
        next_pl = 0x0
        pt_pool = 0x0
        i = 0
        begin_time = {tv_sec = 0, tv_usec = 0}
        end_time = {tv_sec = 0, tv_usec = 0}
        elapsed = {tv_sec = 0, tv_usec = 0}
#3  0x00007f42cd217e25 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#4  0x00007f42ccae434d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 3 (Thread 0x7f42c4d70700 (LWP 9001)):
#0  0x00007f42cd21f371 in sigwait () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x000000000040a432 in ?? ()
No symbol table info available.
#2  0x0000000000000000 in ?? ()
No symbol table info available.

Thread 2 (Thread 0x7f42b933c700 (LWP 9012)):
#0  0x00007f42cd21b945 in pthread_cond_wait@@GLIBC_2.3.2 () from /lib64/libpthread.so.0
No symbol table info available.
#1  0x00007f42c57ab2af in notify_kernel_loop (data=0x12ccb40) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/mount/fuse/src/fuse-bridge.c>:4018
        len = 0
        rv = 0
        this = 0x12ccb40
        priv = 0x12d43a0
        node = 0x0
        tmp = 0x0
        pfoh = 0x0
        iov_out = {iov_base = 0x0, iov_len = 0}
        __FUNCTION__ = "notify_kernel_loop"
#2  0x00007f42cd217e25 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#3  0x00007f42ccae434d in clone () from /lib64/libc.so.6
No symbol table info available.

Thread 1 (Thread 0x7f42c12a2700 (LWP 9006)):
#0  0x00007f42c0854ade in dict_to_xdr (this=0x7f42ac0019c8, dict=0x7f42c12a1240) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/xdr/src/glusterfs3.h>:681
        ret = -1
        i = 16
        index = 16
        dpair = 0x0
        xpair = 0x7f42bc091730
        size = 0
        __FUNCTION__ = "dict_to_xdr"
#1  0x00007f42c086100d in client_pre_lookup_v2 (this=0x7f42bc00adc0, req=0x7f42c12a1210, loc=0x7f42ac005f90, xdata=0x7f42ac0019c8) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/protocol/client/src/client-common.c>:2986
        op_errno = 116
#2  0x00007f42c0877571 in client4_0_lookup (frame=0x7f42bc08f2c8, this=0x7f42bc00adc0, data=0x7f42c12a1340) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/protocol/client/src/client-rpc-fops_v2.c>:2892
        conf = 0x7f42bc0724b0
        local = 0x7f42bc08f658
        args = 0x7f42c12a1340
        req = {gfid = '\000' <repeats 15 times>, "\001", pargfid = '\000' <repeats 15 times>, flags = 0, bname = 0x7f42c08906be "", xdata = {xdr_size = 0, count = 0, pairs = {pairs_len = 0, pairs_val = 0x7f42bc090c30}}}
        ret = 0
        op_errno = 116
        content = 0x7f42ac0020f8
        vector = {{iov_base = 0x7f42cc68c000, iov_len = 131072}, {iov_base = 0x0, iov_len = 0} <repeats 15 times>}
        count = 1
        rsp_iobref = 0x0
        rsp_iobuf = 0x0
        rsphdr = 0x7f42c12a1110
        __FUNCTION__ = "client4_0_lookup"
#3  0x00007f42c0805750 in client_lookup (frame=0x7f42bc08f2c8, this=0x7f42bc00adc0, loc=0x7f42ac005f90, xdata=0x7f42ac0019c8) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/protocol/client/src/client.c>:538
        ret = -1
        conf = 0x7f42bc0724b0
        proc = 0x7f42c0aa0ed0 <clnt4_0_fop_actors+432>
        args = {loc = 0x7f42ac005f90, fd = 0x0, linkname = 0x0, iobref = 0x0, vector = 0x0, xattr = 0x0, stbuf = 0x0, oldloc = 0x0, newloc = 0x0, name = 0x0, flock = 0x0, volume = 0x0, basename = 0x0, offset = 0, mask = 0, cmd = 0, size = 0, mode = 0, rdev = 0, flags = 0, count = 0, datasync = 0, cmd_entrylk = ENTRYLK_LOCK, type = ENTRYLK_RDLCK, optype = GF_XATTROP_ADD_ARRAY, valid = 0, len = 0, what = GF_SEEK_DATA, lease = 0x0, umask = 0, xdata = 0x7f42ac0019c8, locklist = 0x0}
        __FUNCTION__ = "client_lookup"
#4  0x00007f42c05bfafd in afr_discover_do (frame=0x7f42ac005a48, this=0x7f42bc012f50, err=0) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/cluster/afr/src/afr-common.c>:2994
        _new = 0x7f42bc08f2c8
        old_THIS = 0x7f42bc012f50
        tmp_cbk = 0x7f42c05bf35e <afr_discover_cbk>
        ret = 0
        i = 1
        local = 0x7f42ac005b58
        priv = 0x7f42bc0608b0
        call_count = 1
        __FUNCTION__ = "afr_discover_do"
#5  0x00007f42c05b8539 in afr_txn_refresh_done (frame=0x7f42ac005a48, this=0x7f42bc012f50, err=0) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/cluster/afr/src/afr-common.c>:1229
        heal_frame = 0x0
        heal_local = 0x0
        local = 0x7f42ac005b58
        priv = 0x7f42bc0608b0
        inode = 0x7f42bc083ea8
        event_generation = 0
        read_subvol = -1
        op_errno = 12
        ret = 0
#6  0x00007f42c05b87fc in afr_inode_refresh_done (frame=0x7f42ac005a48, this=0x7f42bc012f50, error=0) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/cluster/afr/src/afr-common.c>:1276
        heal_frame = 0x0
        local = 0x7f42ac005b58
        start_heal = false
        heal_local = 0x0
        op_errno = 12
        ret = 0
        err = 0
#7  0x00007f42c05b8a04 in afr_inode_refresh_subvol_cbk (frame=0x7f42ac005a48, cookie=0x1, this=0x7f42bc012f50, op_ret=0, op_errno=0, buf=0x7f42c12a1830, xdata=0x7f42bc08f038, par=0x7f42c12a1790) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/cluster/afr/src/afr-common.c>:1313
        local = 0x7f42ac005b58
        call_child = 1
        need_heal = 0 '\000'
        call_count = 0
        ret = 0
#8  0x00007f42c05b8a5a in afr_inode_refresh_subvol_with_lookup_cbk (frame=0x7f42ac005a48, cookie=0x1, this=0x7f42bc012f50, op_ret=0, op_errno=0, inode=0x7f42bc083ea8, buf=0x7f42c12a1830, xdata=0x7f42bc08f038, par=0x7f42c12a1790) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/cluster/afr/src/afr-common.c>:1325
No locals.
#9  0x00007f42c0875d6f in client4_0_lookup_cbk (req=0x7f42ac00c0a8, iov=0x7f42ac00c0e8, count=1, myframe=0x7f42ac00baa8) at <https://build.gluster.org/job/experimental-periodic/ws/xlators/protocol/client/src/client-rpc-fops_v2.c>:2539
        fn = 0x7f42c05b8a0b <afr_inode_refresh_subvol_with_lookup_cbk>
        _parent = 0x7f42ac005a48
        old_THIS = 0x7f42bc00adc0
        __local = 0x7f42ac00b228
        rsp = {op_ret = 0, op_errno = 0, xdata = {xdr_size = 120, count = 3, pairs = {pairs_len = 3, pairs_val = 0x7f42bc089660}}, prestat = {ia_gfid = '\000' <repeats 15 times>, "\001", ia_flags = 6143, ia_ino = 1, ia_dev = 1792, ia_rdev = 0, ia_size = 24, ia_blocks = 0, ia_attributes = 0, ia_attributes_mask = 0, ia_atime = 1517503532, ia_mtime = 1517503532, ia_ctime = 1517503534, ia_btime = 0, ia_atime_nsec = 579554554, ia_mtime_nsec = 579554554, ia_ctime_nsec = 556599115, ia_btime_nsec = 0, ia_nlink = 3, ia_uid = 0, ia_gid = 0, ia_blksize = 4096, mode = 16877}, poststat = {ia_gfid = '\000' <repeats 15 times>, ia_flags = 0, ia_ino = 0, ia_dev = 0, ia_rdev = 0, ia_size = 0, ia_blocks = 0, ia_attributes = 0, ia_attributes_mask = 0, ia_atime = 0, ia_mtime = 0, ia_ctime = 0, ia_btime = 0, ia_atime_nsec = 0, ia_mtime_nsec = 0, ia_ctime_nsec = 0, ia_btime_nsec = 0, ia_nlink = 0, ia_uid = 0, ia_gid = 0, ia_blksize = 0, mode = 0}}
        local = 0x7f42ac00b228
        frame = 0x7f42ac00baa8
        ret = 0
        stbuf = {ia_flags = 6143, ia_ino = 1, ia_dev = 1792, ia_rdev = 0, ia_size = 24, ia_nlink = 3, ia_uid = 0, ia_gid = 0, ia_blksize = 4096, ia_blocks = 0, ia_atime = 1517503532, ia_mtime = 1517503532, ia_ctime = 1517503534, ia_btime = 0, ia_atime_nsec = 579554554, ia_mtime_nsec = 579554554, ia_ctime_nsec = 556599115, ia_btime_nsec = 0, ia_attributes = 0, ia_attributes_mask = 0, ia_gfid = '\000' <repeats 15 times>, "\001", ia_type = IA_IFDIR, ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {read = 1 '\001', write = 1 '\001', exec = 1 '\001'}, group = {read = 1 '\001', write = 0 '\000', exec = 1 '\001'}, other = {read = 1 '\001', write = 0 '\000', exec = 1 '\001'}}}
        postparent = {ia_flags = 0, ia_ino = 0, ia_dev = 0, ia_rdev = 0, ia_size = 0, ia_nlink = 0, ia_uid = 0, ia_gid = 0, ia_blksize = 0, ia_blocks = 0, ia_atime = 0, ia_mtime = 0, ia_ctime = 0, ia_btime = 0, ia_atime_nsec = 0, ia_mtime_nsec = 0, ia_ctime_nsec = 0, ia_btime_nsec = 0, ia_attributes = 0, ia_attributes_mask = 0, ia_gfid = '\000' <repeats 15 times>, ia_type = IA_INVAL, ia_prot = {suid = 0 '\000', sgid = 0 '\000', sticky = 0 '\000', owner = {read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, group = {read = 0 '\000', write = 0 '\000', exec = 0 '\000'}, other = {read = 0 '\000', write = 0 '\000', exec = 0 '\000'}}}
        op_errno = 0
        xdata = 0x7f42bc08f038
        inode = 0x7f42bc083ea8
        this = 0x7f42bc00adc0
        __FUNCTION__ = "client4_0_lookup_cbk"
#10 0x00007f42ce18d6db in rpc_clnt_handle_reply (clnt=0x7f42bc072620, pollin=0x7f42bc08a040) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/rpc-lib/src/rpc-clnt.c>:778
        conn = 0x7f42bc072650
        saved_frame = 0x7f42ac00c4b8
        ret = 0
        req = 0x7f42ac00c0a8
        xid = 22
        __FUNCTION__ = "rpc_clnt_handle_reply"
#11 0x00007f42ce18dcb2 in rpc_clnt_notify (trans=0x7f42bc072910, mydata=0x7f42bc072650, event=RPC_TRANSPORT_MSG_RECEIVED, data=0x7f42bc08a040) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/rpc-lib/src/rpc-clnt.c>:976
        conn = 0x7f42bc072650
        clnt = 0x7f42bc072620
        ret = -1
        req_info = 0x0
        pollin = 0x7f42bc08a040
        clnt_mydata = 0x0
        old_THIS = 0x7f42bc00adc0
        __FUNCTION__ = "rpc_clnt_notify"
#12 0x00007f42ce189de5 in rpc_transport_notify (this=0x7f42bc072910, event=RPC_TRANSPORT_MSG_RECEIVED, data=0x7f42bc08a040) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/rpc-lib/src/rpc-transport.c>:537
        ret = -1
        __FUNCTION__ = "rpc_transport_notify"
#13 0x00007f42c2b54df8 in socket_event_poll_in (this=0x7f42bc072910, notify_handled=true) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/rpc-transport/socket/src/socket.c>:2462
        ret = 0
        pollin = 0x7f42bc08a040
        priv = 0x7f42bc072f90
        ctx = 0x128d010
#14 0x00007f42c2b55466 in socket_event_handler (fd=16, idx=4, gen=1, data=0x7f42bc072910, poll_in=1, poll_out=0, poll_err=0) at <https://build.gluster.org/job/experimental-periodic/ws/rpc/rpc-transport/socket/src/socket.c>:2618
        this = 0x7f42bc072910
        priv = 0x7f42bc072f90
        ret = 0
        ctx = 0x128d010
        socket_closed = false
        notify_handled = false
        __FUNCTION__ = "socket_event_handler"
#15 0x00007f42ce43a1c0 in event_dispatch_epoll_handler (event_pool=0x12c4bc0, event=0x7f42c12a1ea0) at <https://build.gluster.org/job/experimental-periodic/ws/libglusterfs/src/event-epoll.c>:579
        ev_data = 0x7f42c12a1ea4
        slot = 0x1309770
        handler = 0x7f42c2b55198 <socket_event_handler>
        data = 0x7f42bc072910
        idx = 4
        gen = 1
        ret = -1
        fd = 16
        handled_error_previously = false
        __FUNCTION__ = "event_dispatch_epoll_handler"
#16 0x00007f42ce43a4b3 in event_dispatch_epoll_worker (data=0x1308590) at <https://build.gluster.org/job/experimental-periodic/ws/libglusterfs/src/event-epoll.c>:655
        event = {events = 1, data = {ptr = 0x100000004, fd = 4, u32 = 4, u64 = 4294967300}}
        ret = 1
        ev_data = 0x1308590
        event_pool = 0x12c4bc0
        myindex = 1
        timetodie = 0
        __FUNCTION__ = "event_dispatch_epoll_worker"
#17 0x00007f42cd217e25 in start_thread () from /lib64/libpthread.so.0
No symbol table info available.
#18 0x00007f42ccae434d in clone () from /lib64/libc.so.6
No symbol table info available.
=========================================================
              Finish backtrace
         program name : /build/install/sbin/glusterfs
         corefile     : /glusterepoll0-8999.core
=========================================================

+ rm -f /build/install/cores/gdbout.txt
+ sort /build/install/cores/liblist.txt
+ uniq
+ cat /build/install/cores/liblist.txt.tmp
+ grep -v /build/install
+ tar -cf /archives/archived_builds/build-install-experimental-periodic-219.tar /build/install/sbin /build/install/bin /build/install/lib /build/install/libexec /build/install/cores
tar: Removing leading `/' from member names
+ tar -rhf /archives/archived_builds/build-install-experimental-periodic-219.tar -T /build/install/cores/liblist.txt
tar: Removing leading `/' from member names
+ bzip2 /archives/archived_builds/build-install-experimental-periodic-219.tar
+ scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i <https://build.gluster.org/job/experimental-periodic/ws/> /archives/archived_builds/build-install-experimental-periodic-219.tar _logs_collector at http.int.rht.gluster.org:/var/www/glusterfs-logs/experimental-periodic-build-install-219.tgz
ssh: connect to host http.int.rht.gluster.org port 22: Connection timed out
lost connection
+ true
+ rm -f /build/install/cores/liblist.txt
+ rm -f /build/install/cores/liblist.txt.tmp
+ echo 'Cores and build archived in http://builder100.cloud.gluster.org/archived_builds/build-install-experimental-periodic-219.tar.bz2'
Cores and build archived in http://builder100.cloud.gluster.org/archived_builds/build-install-experimental-periodic-219.tar.bz2
+ echo 'Open core using the following command to get a proper stack'
Open core using the following command to get a proper stack
+ echo 'Example: From root of extracted tarball'
Example: From root of extracted tarball
+ echo '\t\tgdb -ex '\''set sysroot ./'\'' -ex '\''core-file ./build/install/cores/xxx.core'\'' <target, say ./build/install/sbin/glusterd>'
\t\tgdb -ex 'set sysroot ./' -ex 'core-file ./build/install/cores/xxx.core' <target, say ./build/install/sbin/glusterd>
+ RET=1
+ '[' 1 -ne 0 ']'
+ tar -czf <https://build.gluster.org/job/experimental-periodic/ws/glusterfs-logs.tgz> /var/log/glusterfs /var/log/messages
tar: Removing leading `/' from member names
+ scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i <https://build.gluster.org/job/experimental-periodic/ws/> glusterfs-logs.tgz _logs_collector at http.int.rht.gluster.org:/var/www/glusterfs-logs/experimental-periodic-219.tgz
ssh: connect to host http.int.rht.gluster.org port 22: Connection timed out
lost connection
+ true
+ case $(uname -s) in
++ uname -s
+ /sbin/sysctl -w kernel.core_pattern=/%e-%p.core
kernel.core_pattern = /%e-%p.core
+ exit 1
Build step 'Execute shell' marked build as failure


More information about the maintainers mailing list