| Message ID | 863a4972681a395e0e73ac1081b3f065b92592d0.1704813698.git.joerg.sommer@navimatix.de |
|---|---|
| State | Accepted |
| Headers | show |
| Series | [meta-oe,v4] bonnie++: New recipe for version 2.0 | expand |
ERROR: bonnie++-2.00a-r0 do_patch: QA Issue: Fuzz detected:
Applying patch makefile-use-link-for-helper.patch
patching file Makefile.in
Hunk #1 succeeded at 11 with fuzz 1.
The context lines in the patches can be updated with devtool:
devtool modify bonnie++
devtool finish --force-patch-refresh bonnie++ <layer_path>
Don't forget to review changes done by devtool!
Patch log indicates that patches do not apply cleanly. [patch-fuzz]
ERROR: bonnie++-2.00a-r0 do_patch: Fatal QA errors were found, failing task.
On Tue, Jan 9, 2024 at 7:22 AM Jörg Sommer via lists.openembedded.org
<joerg.sommer=navimatix.de@lists.openembedded.org> wrote:
>
> From: Jörg Sommer <joerg.sommer@navimatix.de>
>
> Newer versions of bonnie get published on
> <https://doc.coker.com.au/projects/bonnie/>. Unfortunately, the new version
> doesn't compile with g++ 11 which requires *fix-csv2html-data.patch* and
> configure fails due to cross compilation which gets fixed
> with *fix-configure-lfs.patch*
>
> Signed-off-by: Jörg Sommer <joerg.sommer@navimatix.de>
> ---
> .../bonnie/bonnie++/fix-configure-lfs.patch | 39 ++++
> .../bonnie/bonnie++/fix-csv2html-data.patch | 183 ++++++++++++++++++
> .../makefile-use-link-for-helper.patch | 24 +++
> .../bonnie/bonnie++_2.00a.bb | 33 ++++
> 4 files changed, 279 insertions(+)
> create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
> create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
> create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch
> create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
>
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
> new file mode 100644
> index 000000000..af20acdcd
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
> @@ -0,0 +1,39 @@
> +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=4ffece51791ba75ddca2e664cdce726cc40c92d3]
> +
> +diff --git i/configure.in w/configure.in
> +index 080e40c..f2a2bbe 100644
> +--- i/configure.in
> ++++ w/configure.in
> +@@ -82,8 +82,15 @@ void * thread_func(void * param) { return NULL; }
> + , thread_ldflags="-lpthread"
> + , thread_ldflags="-pthread")
> +
> +-AC_SUBST(large_file)
> +-AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
> ++AC_ARG_ENABLE(lfs,
> ++ [ --disable-lfs disable large file support],
> ++ LFS_CHOICE=$enableval, LFS_CHOICE=check)
> ++
> ++if test "$LFS_CHOICE" = yes; then
> ++ bonniepp_cv_large_file=yes
> ++elif test "$LFS_CHOICE" = check; then
> ++ AC_CACHE_CHECK([whether to enable -D_LARGEFILE64_SOURCE], bonniepp_cv_large_file,
> ++ AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
> + #define _LARGEFILE64_SOURCE
> + #endif
> + #include <stdio.h>
> +@@ -118,8 +125,12 @@ int main () {
> + }
> + close(fd);
> + return 0;
> +-}], large_file="yes")
> +-if [[ -n "$large_file" ]]; then
> ++}], bonniepp_cv_large_file="yes"))
> ++fi
> ++
> ++AC_SUBST(large_file)
> ++
> ++if [[ -n "$bonniepp_cv_large_file" ]]; then
> + large_file="#define _LARGEFILE64_SOURCE"
> + fi
> +
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
> new file mode 100644
> index 000000000..4b37b8d65
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
> @@ -0,0 +1,183 @@
> +commit 7e9433a56f22426b11cbc9bd80e0debca67c893b
> +Author: Jörg Sommer <joerg.sommer@navimatix.de>
> +Date: Mon Jun 26 12:38:30 2023 +0200
> +
> + csv2html: Explicitly reference data in top level
> +
> + With g++ 11 *data* became ambiguous with [std::data][1]. Therefore it's
> + needed to explicitly address the variable from the top level scope.
> +
> + [1] https://en.cppreference.com/w/cpp/iterator/data
> +
> +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=fb13a71d56dab8aaa39233fcaaedfb0ba4ad647d]
> +
> +diff --git a/bon_csv2html.cpp b/bon_csv2html.cpp
> +index e9d9c50..652e330 100644
> +--- a/bon_csv2html.cpp
> ++++ b/bon_csv2html.cpp
> +@@ -87,8 +87,8 @@ int main(int argc, char **argv)
> + read_in(buf);
> + }
> +
> +- props = new PPCCHAR[data.size()];
> +- for(i = 0; i < data.size(); i++)
> ++ props = new PPCCHAR[::data.size()];
> ++ for(i = 0; i < ::data.size(); i++)
> + {
> + props[i] = new PCCHAR[MAX_ITEMS];
> + props[i][0] = NULL;
> +@@ -109,7 +109,7 @@ int main(int argc, char **argv)
> + }
> + calc_vals();
> + int mid_width = header();
> +- for(i = 0; i < data.size(); i++)
> ++ for(i = 0; i < ::data.size(); i++)
> + {
> + // First print the average speed line
> + printf("<tr>");
> +@@ -171,23 +171,23 @@ int compar(const void *a, const void *b)
> +
> + void calc_vals()
> + {
> +- ITEM *arr = new ITEM[data.size()];
> ++ ITEM *arr = new ITEM[::data.size()];
> + for(unsigned int column_ind = 0; column_ind < MAX_ITEMS; column_ind++)
> + {
> + switch(vals[column_ind])
> + {
> + case eNoCols:
> + {
> +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> + {
> + if(column_ind == COL_CONCURRENCY)
> + {
> +- if(data[row_ind][column_ind] && strcmp("1", data[row_ind][column_ind]))
> ++ if(::data[row_ind][column_ind] && strcmp("1", ::data[row_ind][column_ind]))
> + col_used[column_ind] = true;
> + }
> + else
> + {
> +- if(data[row_ind][column_ind] && strlen(data[row_ind][column_ind]))
> ++ if(::data[row_ind][column_ind] && strlen(::data[row_ind][column_ind]))
> + col_used[column_ind] = true;
> + }
> + }
> +@@ -195,22 +195,22 @@ void calc_vals()
> + break;
> + case eCPU:
> + {
> +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> + {
> + double work, cpu;
> + arr[row_ind].val = 0.0;
> +- if(data[row_ind].size() > column_ind
> +- && sscanf(data[row_ind][column_ind - 1], "%lf", &work) == 1
> +- && sscanf(data[row_ind][column_ind], "%lf", &cpu) == 1)
> ++ if(::data[row_ind].size() > column_ind
> ++ && sscanf(::data[row_ind][column_ind - 1], "%lf", &work) == 1
> ++ && sscanf(::data[row_ind][column_ind], "%lf", &cpu) == 1)
> + {
> + arr[row_ind].val = cpu / work;
> + }
> + arr[row_ind].pos = row_ind;
> + }
> +- qsort(arr, data.size(), sizeof(ITEM), compar);
> ++ qsort(arr, ::data.size(), sizeof(ITEM), compar);
> + int col_count = -1;
> + double min_col = -1.0, max_col = -1.0;
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + // if item is different from previous or if the first row
> + // (sort_ind == 0) then increment col count
> +@@ -239,7 +239,7 @@ void calc_vals()
> + min_col /= mult;
> + }
> + double range_col = max_col - min_col;
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + if(arr[sort_ind].col_ind > -1)
> + {
> +@@ -250,7 +250,7 @@ void calc_vals()
> + }
> + else
> + {
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + if(vals[column_ind] == eLatency)
> + {
> +@@ -263,25 +263,25 @@ void calc_vals()
> + case eSpeed:
> + case eLatency:
> + {
> +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> + {
> + arr[row_ind].val = 0.0;
> +- if(data[row_ind].size() <= column_ind
> +- || sscanf(data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
> ++ if(::data[row_ind].size() <= column_ind
> ++ || sscanf(::data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
> + arr[row_ind].val = 0.0;
> + if(vals[column_ind] == eLatency && arr[row_ind].val != 0.0)
> + {
> +- if(strstr(data[row_ind][column_ind], "ms"))
> ++ if(strstr(::data[row_ind][column_ind], "ms"))
> + arr[row_ind].val *= 1000.0;
> +- else if(!strstr(data[row_ind][column_ind], "us"))
> ++ else if(!strstr(::data[row_ind][column_ind], "us"))
> + arr[row_ind].val *= 1000000.0; // is !us && !ms then secs!
> + }
> + arr[row_ind].pos = row_ind;
> + }
> +- qsort(arr, data.size(), sizeof(ITEM), compar);
> ++ qsort(arr, ::data.size(), sizeof(ITEM), compar);
> + int col_count = -1;
> + double min_col = -1.0, max_col = -1.0;
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + // if item is different from previous or if the first row
> + // (sort_ind == 0) then increment col count
> +@@ -310,7 +310,7 @@ void calc_vals()
> + min_col /= mult;
> + }
> + double range_col = max_col - min_col;
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + if(arr[sort_ind].col_ind > -1)
> + {
> +@@ -332,7 +332,7 @@ void calc_vals()
> + }
> + else
> + {
> +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> + {
> + if(vals[column_ind] == eLatency)
> + {
> +@@ -481,16 +481,16 @@ void read_in(CPCCHAR buf)
> + free((void *)arr[0]);
> + return;
> + }
> +- data.push_back(arr);
> ++ ::data.push_back(arr);
> + }
> +
> + void print_item(int num, int item, CPCCHAR extra)
> + {
> + PCCHAR line_data;
> + char buf[1024];
> +- if(int(data[num].size()) > item)
> ++ if(int(::data[num].size()) > item)
> + {
> +- line_data = data[num][item];
> ++ line_data = ::data[num][item];
> + switch(item)
> + {
> + case COL_PUT_BLOCK:
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch
> new file mode 100644
> index 000000000..f352fd7c0
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch
> @@ -0,0 +1,24 @@
> +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=f6ce71d9f97316697c55d0fb3c756becdee04c5c]
> +
> +diff --git i/Makefile.in w/Makefile.in
> +index 6dfdb75..87a5fb1 100644
> +--- i/Makefile.in
> ++++ w/Makefile.in
> +@@ -11,7 +11,7 @@ eprefix=@exec_prefix@
> + WFLAGS=-Wall -W -Wshadow -Wpointer-arith -Wwrite-strings -pedantic -Wcast-align -Wsign-compare -Wpointer-arith -Wwrite-strings -Wformat-security -Wswitch-enum -Winit-self $(MORE_WARNINGS)
> + CFLAGS=-O2 @debug@ -DNDEBUG $(WFLAGS) $(MORECFLAGS)
> + CXX=@CXX@ $(CFLAGS)
> +-LINK=@CXX@
> ++LINK=$(LINK.cc)
> + THREAD_LFLAGS=@thread_ldflags@
> +
> + INSTALL=@INSTALL@
> +@@ -43,7 +43,7 @@ getc_putc: $(GETCOBJS) getc_putc_helper
> + $(LINK) -o getc_putc $(GETCOBJS) $(THREAD_LFLAGS)
> +
> + getc_putc_helper: $(GETCHOBJS)
> +- $(CXX) -o getc_putc_helper $(GETCHOBJS)
> ++ $(LINK) -o getc_putc_helper $(GETCHOBJS)
> +
> + bon_csv2html: bon_csv2html.o
> + $(LINK) bon_csv2html.o -o bon_csv2html
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
> new file mode 100644
> index 000000000..29590bfe8
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
> @@ -0,0 +1,33 @@
> +SUMMARY = "Tests large file IO and creation/deletion of small files"
> +HOMEPAGE = "https://doc.coker.com.au/projects/bonnie/"
> +SECTION = "benchmark/tests"
> +LICENSE = "GPL-2.0-only"
> +LIC_FILES_CHKSUM = "file://copyright.txt;md5=cd4dde95a6b9d122f0a9150ae9cc3ee0"
> +
> +SRC_URI = "\
> + http://www.coker.com.au/bonnie++/${BPN}-${PV}.tgz \
> + file://fix-configure-lfs.patch \
> + file://fix-csv2html-data.patch \
> + file://makefile-use-link-for-helper.patch \
> +"
> +SRC_URI[sha256sum] = "a8d33bbd81bc7eb559ce5bf6e584b9b53faea39ccfb4ae92e58f27257e468f0e"
> +
> +# force lfs to skip configure's check, because we are cross-building
> +PACKAGECONFIG ?= "lfs"
> +PACKAGECONFIG[lfs] = "--enable-lfs,--disable-lfs"
> +
> +inherit autotools
> +
> +EXTRA_OECONF += "--disable-stripping"
> +EXTRA_OEMAKE += "-I ${S} VPATH=${S}"
> +CXXFLAGS += "-I ${S}"
> +
> +do_install() {
> + oe_runmake eprefix='${D}${exec_prefix}' install-bin
> +}
> +
> +PACKAGE_BEFORE_PN += "${PN}-scripts"
> +
> +FILES:${PN}-scripts = "${bindir}/bon_csv2*"
> +
> +RDEPENDS:${PN}-scripts += "perl"
> --
> 2.34.1
>
>
> -=-=-=-=-=-=-=-=-=-=-=-
> Links: You receive all messages sent to this group.
> View/Reply Online (#108199): https://lists.openembedded.org/g/openembedded-devel/message/108199
> Mute This Topic: https://lists.openembedded.org/mt/103621192/1997914
> Group Owner: openembedded-devel+owner@lists.openembedded.org
> Unsubscribe: https://lists.openembedded.org/g/openembedded-devel/unsub [raj.khem@gmail.com]
> -=-=-=-=-=-=-=-=-=-=-=-
>
diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch new file mode 100644 index 000000000..af20acdcd --- /dev/null +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch @@ -0,0 +1,39 @@ +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=4ffece51791ba75ddca2e664cdce726cc40c92d3] + +diff --git i/configure.in w/configure.in +index 080e40c..f2a2bbe 100644 +--- i/configure.in ++++ w/configure.in +@@ -82,8 +82,15 @@ void * thread_func(void * param) { return NULL; } + , thread_ldflags="-lpthread" + , thread_ldflags="-pthread") + +-AC_SUBST(large_file) +-AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE ++AC_ARG_ENABLE(lfs, ++ [ --disable-lfs disable large file support], ++ LFS_CHOICE=$enableval, LFS_CHOICE=check) ++ ++if test "$LFS_CHOICE" = yes; then ++ bonniepp_cv_large_file=yes ++elif test "$LFS_CHOICE" = check; then ++ AC_CACHE_CHECK([whether to enable -D_LARGEFILE64_SOURCE], bonniepp_cv_large_file, ++ AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE + #define _LARGEFILE64_SOURCE + #endif + #include <stdio.h> +@@ -118,8 +125,12 @@ int main () { + } + close(fd); + return 0; +-}], large_file="yes") +-if [[ -n "$large_file" ]]; then ++}], bonniepp_cv_large_file="yes")) ++fi ++ ++AC_SUBST(large_file) ++ ++if [[ -n "$bonniepp_cv_large_file" ]]; then + large_file="#define _LARGEFILE64_SOURCE" + fi + diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch new file mode 100644 index 000000000..4b37b8d65 --- /dev/null +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch @@ -0,0 +1,183 @@ +commit 7e9433a56f22426b11cbc9bd80e0debca67c893b +Author: Jörg Sommer <joerg.sommer@navimatix.de> +Date: Mon Jun 26 12:38:30 2023 +0200 + + csv2html: Explicitly reference data in top level + + With g++ 11 *data* became ambiguous with [std::data][1]. Therefore it's + needed to explicitly address the variable from the top level scope. + + [1] https://en.cppreference.com/w/cpp/iterator/data + +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=fb13a71d56dab8aaa39233fcaaedfb0ba4ad647d] + +diff --git a/bon_csv2html.cpp b/bon_csv2html.cpp +index e9d9c50..652e330 100644 +--- a/bon_csv2html.cpp ++++ b/bon_csv2html.cpp +@@ -87,8 +87,8 @@ int main(int argc, char **argv) + read_in(buf); + } + +- props = new PPCCHAR[data.size()]; +- for(i = 0; i < data.size(); i++) ++ props = new PPCCHAR[::data.size()]; ++ for(i = 0; i < ::data.size(); i++) + { + props[i] = new PCCHAR[MAX_ITEMS]; + props[i][0] = NULL; +@@ -109,7 +109,7 @@ int main(int argc, char **argv) + } + calc_vals(); + int mid_width = header(); +- for(i = 0; i < data.size(); i++) ++ for(i = 0; i < ::data.size(); i++) + { + // First print the average speed line + printf("<tr>"); +@@ -171,23 +171,23 @@ int compar(const void *a, const void *b) + + void calc_vals() + { +- ITEM *arr = new ITEM[data.size()]; ++ ITEM *arr = new ITEM[::data.size()]; + for(unsigned int column_ind = 0; column_ind < MAX_ITEMS; column_ind++) + { + switch(vals[column_ind]) + { + case eNoCols: + { +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++) ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++) + { + if(column_ind == COL_CONCURRENCY) + { +- if(data[row_ind][column_ind] && strcmp("1", data[row_ind][column_ind])) ++ if(::data[row_ind][column_ind] && strcmp("1", ::data[row_ind][column_ind])) + col_used[column_ind] = true; + } + else + { +- if(data[row_ind][column_ind] && strlen(data[row_ind][column_ind])) ++ if(::data[row_ind][column_ind] && strlen(::data[row_ind][column_ind])) + col_used[column_ind] = true; + } + } +@@ -195,22 +195,22 @@ void calc_vals() + break; + case eCPU: + { +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++) ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++) + { + double work, cpu; + arr[row_ind].val = 0.0; +- if(data[row_ind].size() > column_ind +- && sscanf(data[row_ind][column_ind - 1], "%lf", &work) == 1 +- && sscanf(data[row_ind][column_ind], "%lf", &cpu) == 1) ++ if(::data[row_ind].size() > column_ind ++ && sscanf(::data[row_ind][column_ind - 1], "%lf", &work) == 1 ++ && sscanf(::data[row_ind][column_ind], "%lf", &cpu) == 1) + { + arr[row_ind].val = cpu / work; + } + arr[row_ind].pos = row_ind; + } +- qsort(arr, data.size(), sizeof(ITEM), compar); ++ qsort(arr, ::data.size(), sizeof(ITEM), compar); + int col_count = -1; + double min_col = -1.0, max_col = -1.0; +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + // if item is different from previous or if the first row + // (sort_ind == 0) then increment col count +@@ -239,7 +239,7 @@ void calc_vals() + min_col /= mult; + } + double range_col = max_col - min_col; +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + if(arr[sort_ind].col_ind > -1) + { +@@ -250,7 +250,7 @@ void calc_vals() + } + else + { +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + if(vals[column_ind] == eLatency) + { +@@ -263,25 +263,25 @@ void calc_vals() + case eSpeed: + case eLatency: + { +- for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++) ++ for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++) + { + arr[row_ind].val = 0.0; +- if(data[row_ind].size() <= column_ind +- || sscanf(data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0) ++ if(::data[row_ind].size() <= column_ind ++ || sscanf(::data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0) + arr[row_ind].val = 0.0; + if(vals[column_ind] == eLatency && arr[row_ind].val != 0.0) + { +- if(strstr(data[row_ind][column_ind], "ms")) ++ if(strstr(::data[row_ind][column_ind], "ms")) + arr[row_ind].val *= 1000.0; +- else if(!strstr(data[row_ind][column_ind], "us")) ++ else if(!strstr(::data[row_ind][column_ind], "us")) + arr[row_ind].val *= 1000000.0; // is !us && !ms then secs! + } + arr[row_ind].pos = row_ind; + } +- qsort(arr, data.size(), sizeof(ITEM), compar); ++ qsort(arr, ::data.size(), sizeof(ITEM), compar); + int col_count = -1; + double min_col = -1.0, max_col = -1.0; +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + // if item is different from previous or if the first row + // (sort_ind == 0) then increment col count +@@ -310,7 +310,7 @@ void calc_vals() + min_col /= mult; + } + double range_col = max_col - min_col; +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + if(arr[sort_ind].col_ind > -1) + { +@@ -332,7 +332,7 @@ void calc_vals() + } + else + { +- for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++) ++ for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++) + { + if(vals[column_ind] == eLatency) + { +@@ -481,16 +481,16 @@ void read_in(CPCCHAR buf) + free((void *)arr[0]); + return; + } +- data.push_back(arr); ++ ::data.push_back(arr); + } + + void print_item(int num, int item, CPCCHAR extra) + { + PCCHAR line_data; + char buf[1024]; +- if(int(data[num].size()) > item) ++ if(int(::data[num].size()) > item) + { +- line_data = data[num][item]; ++ line_data = ::data[num][item]; + switch(item) + { + case COL_PUT_BLOCK: diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch new file mode 100644 index 000000000..f352fd7c0 --- /dev/null +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/makefile-use-link-for-helper.patch @@ -0,0 +1,24 @@ +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=f6ce71d9f97316697c55d0fb3c756becdee04c5c] + +diff --git i/Makefile.in w/Makefile.in +index 6dfdb75..87a5fb1 100644 +--- i/Makefile.in ++++ w/Makefile.in +@@ -11,7 +11,7 @@ eprefix=@exec_prefix@ + WFLAGS=-Wall -W -Wshadow -Wpointer-arith -Wwrite-strings -pedantic -Wcast-align -Wsign-compare -Wpointer-arith -Wwrite-strings -Wformat-security -Wswitch-enum -Winit-self $(MORE_WARNINGS) + CFLAGS=-O2 @debug@ -DNDEBUG $(WFLAGS) $(MORECFLAGS) + CXX=@CXX@ $(CFLAGS) +-LINK=@CXX@ ++LINK=$(LINK.cc) + THREAD_LFLAGS=@thread_ldflags@ + + INSTALL=@INSTALL@ +@@ -43,7 +43,7 @@ getc_putc: $(GETCOBJS) getc_putc_helper + $(LINK) -o getc_putc $(GETCOBJS) $(THREAD_LFLAGS) + + getc_putc_helper: $(GETCHOBJS) +- $(CXX) -o getc_putc_helper $(GETCHOBJS) ++ $(LINK) -o getc_putc_helper $(GETCHOBJS) + + bon_csv2html: bon_csv2html.o + $(LINK) bon_csv2html.o -o bon_csv2html diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb new file mode 100644 index 000000000..29590bfe8 --- /dev/null +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb @@ -0,0 +1,33 @@ +SUMMARY = "Tests large file IO and creation/deletion of small files" +HOMEPAGE = "https://doc.coker.com.au/projects/bonnie/" +SECTION = "benchmark/tests" +LICENSE = "GPL-2.0-only" +LIC_FILES_CHKSUM = "file://copyright.txt;md5=cd4dde95a6b9d122f0a9150ae9cc3ee0" + +SRC_URI = "\ + http://www.coker.com.au/bonnie++/${BPN}-${PV}.tgz \ + file://fix-configure-lfs.patch \ + file://fix-csv2html-data.patch \ + file://makefile-use-link-for-helper.patch \ +" +SRC_URI[sha256sum] = "a8d33bbd81bc7eb559ce5bf6e584b9b53faea39ccfb4ae92e58f27257e468f0e" + +# force lfs to skip configure's check, because we are cross-building +PACKAGECONFIG ?= "lfs" +PACKAGECONFIG[lfs] = "--enable-lfs,--disable-lfs" + +inherit autotools + +EXTRA_OECONF += "--disable-stripping" +EXTRA_OEMAKE += "-I ${S} VPATH=${S}" +CXXFLAGS += "-I ${S}" + +do_install() { + oe_runmake eprefix='${D}${exec_prefix}' install-bin +} + +PACKAGE_BEFORE_PN += "${PN}-scripts" + +FILES:${PN}-scripts = "${bindir}/bon_csv2*" + +RDEPENDS:${PN}-scripts += "perl"