find lower bound
-
Below, there is an algorithm which I wrote to find lower bound of the given key in a sorted table. In other words, it finds a key which is <= of a given one. I wonder whether there is a more compact algorithm or maybe more efficient.
#include <stddef.h>
/*
* Perform a binary lower bound search.
*
*/static size_t blsize;
static const void *skey;
static int (*cmpf)(const void *e1, const void *e2);static void *lbnd(const void *base, size_t size)
{
const char *p,*p1;
int c;
size_t n;if(size==0) return NULL; if(size==1) { c = (\*cmpf)(skey,base); if(c<0) return NULL; else return (void \*)base; } n = size >> 1; p = (char \*)base + n\*blsize; c = (\*cmpf)(skey,p); if(c==0) return (void \*)p; if(c<0) { return lbnd(base,n); } else { p1 = lbnd((char\*)p + blsize,size - n - 1); if(p1==NULL) return (void \*)p; else return (void \*)p1; }
}
void *lbound(const void *key, const void *base0, size_t nmemb, size_t size, int (*compar)(const void *e1, const void *e2))
{
blsize = size;
cmpf = compar;
skey = key;
return lbnd(base0,nmemb);
}The static function
lbnd
does the search using static variables which contain constant values becauselbnd
is called recursively and to minimize code overhead in parameters passing. -
Below, there is an algorithm which I wrote to find lower bound of the given key in a sorted table. In other words, it finds a key which is <= of a given one. I wonder whether there is a more compact algorithm or maybe more efficient.
#include <stddef.h>
/*
* Perform a binary lower bound search.
*
*/static size_t blsize;
static const void *skey;
static int (*cmpf)(const void *e1, const void *e2);static void *lbnd(const void *base, size_t size)
{
const char *p,*p1;
int c;
size_t n;if(size==0) return NULL; if(size==1) { c = (\*cmpf)(skey,base); if(c<0) return NULL; else return (void \*)base; } n = size >> 1; p = (char \*)base + n\*blsize; c = (\*cmpf)(skey,p); if(c==0) return (void \*)p; if(c<0) { return lbnd(base,n); } else { p1 = lbnd((char\*)p + blsize,size - n - 1); if(p1==NULL) return (void \*)p; else return (void \*)p1; }
}
void *lbound(const void *key, const void *base0, size_t nmemb, size_t size, int (*compar)(const void *e1, const void *e2))
{
blsize = size;
cmpf = compar;
skey = key;
return lbnd(base0,nmemb);
}The static function
lbnd
does the search using static variables which contain constant values becauselbnd
is called recursively and to minimize code overhead in parameters passing.“I think the bubble sort would be the wrong way to go.” :)
-
“I think the bubble sort would be the wrong way to go.” :)
-
Below, there is an algorithm which I wrote to find lower bound of the given key in a sorted table. In other words, it finds a key which is <= of a given one. I wonder whether there is a more compact algorithm or maybe more efficient.
#include <stddef.h>
/*
* Perform a binary lower bound search.
*
*/static size_t blsize;
static const void *skey;
static int (*cmpf)(const void *e1, const void *e2);static void *lbnd(const void *base, size_t size)
{
const char *p,*p1;
int c;
size_t n;if(size==0) return NULL; if(size==1) { c = (\*cmpf)(skey,base); if(c<0) return NULL; else return (void \*)base; } n = size >> 1; p = (char \*)base + n\*blsize; c = (\*cmpf)(skey,p); if(c==0) return (void \*)p; if(c<0) { return lbnd(base,n); } else { p1 = lbnd((char\*)p + blsize,size - n - 1); if(p1==NULL) return (void \*)p; else return (void \*)p1; }
}
void *lbound(const void *key, const void *base0, size_t nmemb, size_t size, int (*compar)(const void *e1, const void *e2))
{
blsize = size;
cmpf = compar;
skey = key;
return lbnd(base0,nmemb);
}The static function
lbnd
does the search using static variables which contain constant values becauselbnd
is called recursively and to minimize code overhead in parameters passing. -
Maybe yes maybe no. The question is not about sorting but about searching the sorted table, though.
-
Below, there is an algorithm which I wrote to find lower bound of the given key in a sorted table. In other words, it finds a key which is <= of a given one. I wonder whether there is a more compact algorithm or maybe more efficient.
#include <stddef.h>
/*
* Perform a binary lower bound search.
*
*/static size_t blsize;
static const void *skey;
static int (*cmpf)(const void *e1, const void *e2);static void *lbnd(const void *base, size_t size)
{
const char *p,*p1;
int c;
size_t n;if(size==0) return NULL; if(size==1) { c = (\*cmpf)(skey,base); if(c<0) return NULL; else return (void \*)base; } n = size >> 1; p = (char \*)base + n\*blsize; c = (\*cmpf)(skey,p); if(c==0) return (void \*)p; if(c<0) { return lbnd(base,n); } else { p1 = lbnd((char\*)p + blsize,size - n - 1); if(p1==NULL) return (void \*)p; else return (void \*)p1; }
}
void *lbound(const void *key, const void *base0, size_t nmemb, size_t size, int (*compar)(const void *e1, const void *e2))
{
blsize = size;
cmpf = compar;
skey = key;
return lbnd(base0,nmemb);
}The static function
lbnd
does the search using static variables which contain constant values becauselbnd
is called recursively and to minimize code overhead in parameters passing.Hi,
liquid_ wrote:
I wonder whether there is a more compact algorithm or maybe more efficient
As you use
size_t
you are probably using a C++ compiler here. Then ifKey
is any type withoperator<(const Key& other)
defined:#include <algorithm>
std::min_element(/* const Key* */ base, base + /*size_t*/ nElts /* = number of elements*/);returns a
const Key*
pointing to the smallest key in the array. cheers, ARWhen the wise (person) points at the moon the fool looks at the finger (Chinese proverb)
-
Hi,
liquid_ wrote:
I wonder whether there is a more compact algorithm or maybe more efficient
As you use
size_t
you are probably using a C++ compiler here. Then ifKey
is any type withoperator<(const Key& other)
defined:#include <algorithm>
std::min_element(/* const Key* */ base, base + /*size_t*/ nElts /* = number of elements*/);returns a
const Key*
pointing to the smallest key in the array. cheers, ARWhen the wise (person) points at the moon the fool looks at the finger (Chinese proverb)
Thanks for constructive reply. Firstly, I used
size_t
only for shortage ofunsigned int
. The rest of code is written in pure C and it should be just such - a more universal one. Secondly, analyzing thestd::min_element
function it is clear that it has linear complexity. What I tried to aim is logarithmic complexity. However, comparing my implementation oflbound(const void* key, void *base, unsigned int nmemb, unsigned int size, int (*compare)(const void *e1, const void *e2))
with
bsearch
implementation, it seemed to me that mylbound
is not so compact and efficient asbsearch
standard implementation (two additional checks -size==0
andsize==1
, and one more comparison aftersize==1
check). Maybe its the matter of lower bound specific.bsearch
just returns NULL if it does not find a key,lbound
returns NULL only if key is lower than first element of table. Regards -
Thanks for constructive reply. Firstly, I used
size_t
only for shortage ofunsigned int
. The rest of code is written in pure C and it should be just such - a more universal one. Secondly, analyzing thestd::min_element
function it is clear that it has linear complexity. What I tried to aim is logarithmic complexity. However, comparing my implementation oflbound(const void* key, void *base, unsigned int nmemb, unsigned int size, int (*compare)(const void *e1, const void *e2))
with
bsearch
implementation, it seemed to me that mylbound
is not so compact and efficient asbsearch
standard implementation (two additional checks -size==0
andsize==1
, and one more comparison aftersize==1
check). Maybe its the matter of lower bound specific.bsearch
just returns NULL if it does not find a key,lbound
returns NULL only if key is lower than first element of table. RegardsHi,
liquid_ wrote:
code is written in pure C and it should be just such - a more universal one.
This is really planet-dependant :)
liquid_ wrote:
What I tried to aim is logarithmic complexity.
If your data are unsorted you will have to check each and any for smaller than current minimum, so you will never get under linear complexity. If your data are sorted the problem is trivial. More generally you should think that more man*hours have been spent on each part of the Standard C++ Library than you can provide in your total professional life. Good luck, AR
When the wise (person) points at the moon the fool looks at the finger (Chinese proverb)
-
Hi,
liquid_ wrote:
code is written in pure C and it should be just such - a more universal one.
This is really planet-dependant :)
liquid_ wrote:
What I tried to aim is logarithmic complexity.
If your data are unsorted you will have to check each and any for smaller than current minimum, so you will never get under linear complexity. If your data are sorted the problem is trivial. More generally you should think that more man*hours have been spent on each part of the Standard C++ Library than you can provide in your total professional life. Good luck, AR
When the wise (person) points at the moon the fool looks at the finger (Chinese proverb)
The assumption is that data are sorted. I'd just like to know if I found this trivial solution. Concerning the philosophy of using ready made STL or creating own solutions, it depends on particular needs. Many programs are written in C not in C++ because of various reasons (eg. UNIX or LINUX areas). I use pure C or C++ with STL or other available libraries (eg. BOOST, MFC, .NET aso) as well. Most of software I do is made just for fun or to search/find interesting solutions, or to resolve some problems I meet in my job. It is not my core business. Anyway, thanks for your points. Regards