Memory Optimizations

Try to allocate buffers in tokenizer()
Optimized MySQL_ResultSet::buffer_to_PSarrayOut()
pull/1209/head
René Cannaò 9 years ago
parent 4e4938a1e9
commit fbd2f15efe

@ -29,7 +29,7 @@ class MySQL_ResultSet {
bool get_resultset(PtrSizeArray *PSarrayFinal); bool get_resultset(PtrSizeArray *PSarrayFinal);
unsigned char *buffer; unsigned char *buffer;
unsigned int buffer_used; unsigned int buffer_used;
void buffer_to_PSarrayOut(); void buffer_to_PSarrayOut(bool _last=false);
unsigned long long current_size(); unsigned long long current_size();
}; };

@ -5,6 +5,7 @@
#ifndef C_TOKENIZER_H #ifndef C_TOKENIZER_H
#define C_TOKENIZER_H #define C_TOKENIZER_H
#define PROXYSQL_TOKENIZER_BUFFSIZE 128
#ifndef FIRST_COMMENT_MAX_LENGTH #ifndef FIRST_COMMENT_MAX_LENGTH
#define FIRST_COMMENT_MAX_LENGTH 1024 #define FIRST_COMMENT_MAX_LENGTH 1024
@ -12,6 +13,8 @@
typedef struct typedef struct
{ {
char buffer[PROXYSQL_TOKENIZER_BUFFSIZE];
int s_length;
char* s; char* s;
const char* delimiters; const char* delimiters;
char* current; char* current;
@ -25,7 +28,8 @@ enum { TOKENIZER_EMPTIES_OK, TOKENIZER_NO_EMPTIES };
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif /* __cplusplus */ #endif /* __cplusplus */
tokenizer_t tokenizer( const char* s, const char* delimiters, int empties ); //tokenizer_t tokenizer( const char* s, const char* delimiters, int empties );
void tokenizer( tokenizer_t *, const char* s, const char* delimiters, int empties );
const char* free_tokenizer( tokenizer_t* tokenizer ); const char* free_tokenizer( tokenizer_t* tokenizer );
const char* tokenize( tokenizer_t* tokenizer ); const char* tokenize( tokenizer_t* tokenizer );
char * mysql_query_digest_and_first_comment(char *s , int len , char **first_comment, char *buf); char * mysql_query_digest_and_first_comment(char *s , int len , char **first_comment, char *buf);

@ -1891,7 +1891,7 @@ void MySQL_ResultSet::add_eof() {
myprot->generate_pkt_EOF(false, NULL, NULL, sid, 0, mysql->server_status|setStatus, this); myprot->generate_pkt_EOF(false, NULL, NULL, sid, 0, mysql->server_status|setStatus, this);
sid++; sid++;
resultset_size += 9; resultset_size += 9;
buffer_to_PSarrayOut(); buffer_to_PSarrayOut(true);
} }
resultset_completed=true; resultset_completed=true;
} }
@ -1925,14 +1925,20 @@ bool MySQL_ResultSet::get_resultset(PtrSizeArray *PSarrayFinal) {
return resultset_completed; return resultset_completed;
} }
void MySQL_ResultSet::buffer_to_PSarrayOut() { void MySQL_ResultSet::buffer_to_PSarrayOut(bool _last) {
if (buffer_used==0) if (buffer_used==0)
return; // exit immediately if the buffer is empty return; // exit immediately if the buffer is empty
if (buffer_used < RESULTSET_BUFLEN/2) { if (buffer_used < RESULTSET_BUFLEN/2) {
buffer=(unsigned char *)realloc(buffer,buffer_used); if (_last == false) {
buffer=(unsigned char *)realloc(buffer,buffer_used);
}
} }
PSarrayOUT->add(buffer,buffer_used); PSarrayOUT->add(buffer,buffer_used);
buffer=(unsigned char *)malloc(RESULTSET_BUFLEN); if (_last) {
buffer = NULL;
} else {
buffer=(unsigned char *)malloc(RESULTSET_BUFLEN);
}
buffer_used=0; buffer_used=0;
} }

@ -1957,7 +1957,8 @@ void MySQL_Threads_Handler::start_listeners() {
GloMTH->set_variable((char *)"interfaces", (char *)"0.0.0.0:6033"); // changed. See isseu #1104 GloMTH->set_variable((char *)"interfaces", (char *)"0.0.0.0:6033"); // changed. See isseu #1104
} }
free(_tmp); free(_tmp);
tokenizer_t tok = tokenizer( variables.interfaces, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, variables.interfaces, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for (token = tokenize( &tok ); token; token = tokenize( &tok )) { for (token = tokenize( &tok ); token; token = tokenize( &tok )) {
listener_add((char *)token); listener_add((char *)token);
@ -1968,7 +1969,8 @@ void MySQL_Threads_Handler::start_listeners() {
void MySQL_Threads_Handler::stop_listeners() { void MySQL_Threads_Handler::stop_listeners() {
if (variables.interfaces==NULL || strlen(variables.interfaces)==0) if (variables.interfaces==NULL || strlen(variables.interfaces)==0)
return; return;
tokenizer_t tok = tokenizer( variables.interfaces, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, variables.interfaces, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for (token = tokenize( &tok ); token; token = tokenize( &tok )) { for (token = tokenize( &tok ); token; token = tokenize( &tok )) {
listener_del((char *)token); listener_del((char *)token);

@ -430,7 +430,8 @@ class admin_main_loop_listeners {
delete *ifd; delete *ifd;
*ifd=new ifaces_desc(); *ifd=new ifaces_desc();
int i=0; int i=0;
tokenizer_t tok = tokenizer( list, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, list, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for ( token = tokenize( &tok ) ; token && i < MAX_IFACES ; token = tokenize( &tok ) ) { for ( token = tokenize( &tok ) ; token && i < MAX_IFACES ; token = tokenize( &tok ) ) {
(*ifd)->add(token); (*ifd)->add(token);
@ -446,7 +447,8 @@ class admin_main_loop_listeners {
wrlock(); wrlock();
int i; int i;
char **ifaces=*_ifaces; char **ifaces=*_ifaces;
tokenizer_t tok = tokenizer( list, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, list, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
ifaces=reset_ifaces(ifaces); ifaces=reset_ifaces(ifaces);
i=0; i=0;
@ -4070,7 +4072,8 @@ void ProxySQL_Admin::add_credentials(char *type, char *credentials, int hostgrou
void ProxySQL_Admin::add_credentials(char *credentials, int hostgroup_id) { void ProxySQL_Admin::add_credentials(char *credentials, int hostgroup_id) {
#endif /* DEBUG */ #endif /* DEBUG */
proxy_debug(PROXY_DEBUG_ADMIN, 4, "Adding %s credentials: %s\n", type, credentials); proxy_debug(PROXY_DEBUG_ADMIN, 4, "Adding %s credentials: %s\n", type, credentials);
tokenizer_t tok = tokenizer( credentials, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, credentials, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for (token = tokenize( &tok ); token; token = tokenize( &tok )) { for (token = tokenize( &tok ); token; token = tokenize( &tok )) {
char *user=NULL; char *user=NULL;
@ -4092,7 +4095,8 @@ void ProxySQL_Admin::delete_credentials(char *type, char *credentials) {
void ProxySQL_Admin::delete_credentials(char *credentials) { void ProxySQL_Admin::delete_credentials(char *credentials) {
#endif /* DEBUG */ #endif /* DEBUG */
proxy_debug(PROXY_DEBUG_ADMIN, 4, "Removing old %s credentials: %s\n", type, credentials); proxy_debug(PROXY_DEBUG_ADMIN, 4, "Removing old %s credentials: %s\n", type, credentials);
tokenizer_t tok = tokenizer( credentials, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, credentials, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for (token = tokenize( &tok ); token; token = tokenize( &tok )) { for (token = tokenize( &tok ); token; token = tokenize( &tok )) {
char *user=NULL; char *user=NULL;

@ -435,7 +435,8 @@ QP_rule_t * Query_Processor::new_query_rule(int rule_id, bool active, char *user
newQR->negate_match_pattern=negate_match_pattern; newQR->negate_match_pattern=negate_match_pattern;
newQR->re_modifiers=0; newQR->re_modifiers=0;
{ {
tokenizer_t tok = tokenizer( re_modifiers, ",", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, re_modifiers, ",", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for (token = tokenize( &tok ); token; token = tokenize( &tok )) { for (token = tokenize( &tok ); token; token = tokenize( &tok )) {
if (strncasecmp(token,(char *)"CASELESS",strlen((char *)"CASELESS"))==0) { if (strncasecmp(token,(char *)"CASELESS",strlen((char *)"CASELESS"))==0) {
@ -1277,7 +1278,8 @@ enum MYSQL_COM_QUERY_command Query_Processor::__query_parser_command_type(SQP_pa
enum MYSQL_COM_QUERY_command ret=MYSQL_COM_QUERY_UNKNOWN; enum MYSQL_COM_QUERY_command ret=MYSQL_COM_QUERY_UNKNOWN;
char c1; char c1;
tokenizer_t tok = tokenizer( text, " ", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, text, " ", TOKENIZER_NO_EMPTIES );
char* token=NULL; char* token=NULL;
token=(char *)tokenize(&tok); token=(char *)tokenize(&tok);
if (token==NULL) { if (token==NULL) {
@ -1622,7 +1624,8 @@ __exit__query_parser_command_type:
bool Query_Processor::query_parser_first_comment(Query_Processor_Output *qpo, char *fc) { bool Query_Processor::query_parser_first_comment(Query_Processor_Output *qpo, char *fc) {
bool ret=false; bool ret=false;
tokenizer_t tok = tokenizer( fc, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, fc, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for ( token = tokenize( &tok ) ; token ; token = tokenize( &tok ) ) { for ( token = tokenize( &tok ) ; token ; token = tokenize( &tok ) ) {
char *key=NULL; char *key=NULL;

@ -373,7 +373,8 @@ class sqlite3server_main_loop_listeners {
delete *ifd; delete *ifd;
*ifd=new ifaces_desc(); *ifd=new ifaces_desc();
int i=0; int i=0;
tokenizer_t tok = tokenizer( list, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, list, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
for ( token = tokenize( &tok ) ; token && i < MAX_IFACES ; token = tokenize( &tok ) ) { for ( token = tokenize( &tok ) ; token && i < MAX_IFACES ; token = tokenize( &tok ) ) {
(*ifd)->add(token); (*ifd)->add(token);
@ -389,7 +390,8 @@ class sqlite3server_main_loop_listeners {
wrlock(); wrlock();
int i; int i;
char **ifaces=*_ifaces; char **ifaces=*_ifaces;
tokenizer_t tok = tokenizer( list, ";", TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, list, ";", TOKENIZER_NO_EMPTIES );
const char* token; const char* token;
ifaces=reset_ifaces(ifaces); ifaces=reset_ifaces(ifaces);
i=0; i=0;

@ -12,24 +12,36 @@ extern __thread int mysql_thread___query_digests_max_query_length;
#define bool char #define bool char
extern __thread bool mysql_thread___query_digests_lowercase; extern __thread bool mysql_thread___query_digests_lowercase;
tokenizer_t tokenizer( const char* s, const char* delimiters, int empties ) void tokenizer(tokenizer_t *result, const char* s, const char* delimiters, int empties )
{ {
tokenizer_t result; //tokenizer_t result;
result.s = (s && delimiters) ? strdup( s ) : NULL; result->s_length = ( (s && delimiters) ? strlen(s) : 0 );
result.delimiters = delimiters; result->s = NULL;
result.current = NULL; if (result->s_length) {
result.next = result.s; if (result->s_length > (PROXYSQL_TOKENIZER_BUFFSIZE-1)) {
result.is_ignore_empties = (empties != TOKENIZER_EMPTIES_OK); result->s = strdup(s);
} else {
strcpy(result->buffer,s);
result->s = result->buffer;
}
}
result->delimiters = delimiters;
result->current = NULL;
result->next = result->s;
result->is_ignore_empties = (empties != TOKENIZER_EMPTIES_OK);
return result; //return result;
} }
const char* free_tokenizer( tokenizer_t* tokenizer ) const char* free_tokenizer( tokenizer_t* tokenizer )
{ {
free( tokenizer->s ); if (tokenizer->s_length > (PROXYSQL_TOKENIZER_BUFFSIZE-1)) {
return tokenizer->s = NULL; free(tokenizer->s);
}
tokenizer->s = NULL;
return NULL;
} }
const char* tokenize( tokenizer_t* tokenizer ) const char* tokenize( tokenizer_t* tokenizer )
@ -65,7 +77,8 @@ void c_split_2(const char *in, const char *del, char **out1, char **out2) {
*out1=NULL; *out1=NULL;
*out2=NULL; *out2=NULL;
const char *t; const char *t;
tokenizer_t tok = tokenizer( in, del, TOKENIZER_NO_EMPTIES ); tokenizer_t tok;
tokenizer( &tok, in, del, TOKENIZER_NO_EMPTIES );
for ( t=tokenize(&tok); t; t=tokenize(&tok)) { for ( t=tokenize(&tok); t; t=tokenize(&tok)) {
if (*out1==NULL) { *out1=strdup(t); continue; } if (*out1==NULL) { *out1=strdup(t); continue; }
if (*out2==NULL) { *out2=strdup(t); continue; } if (*out2==NULL) { *out2=strdup(t); continue; }

Loading…
Cancel
Save