summaryrefslogtreecommitdiffstats
path: root/libnetdata/url
diff options
context:
space:
mode:
authorPavlos Emm. Katsoulakis <paul@netdata.rocks>2019-06-07 18:14:44 +0300
committerPavlos Emm. Katsoulakis <paul@netdata.rocks>2019-06-07 18:14:44 +0300
commit0fca36fd596e49395141561bc42c168a240f51b8 (patch)
tree7c47ac793a7937015e4fb3c557ecbb3de5940456 /libnetdata/url
parentd6f3937b4d4bffedb8a8cc61f80affde7f17b180 (diff)
Revert "New URL parser (#6070)"
This reverts commit 58b7d95a7ec9c576f8a06bbab07f755846b5349a. --- As agreed with @thiago and @cakrit we revert URL parser changes, to buy the time on a more detailed investigation ---
Diffstat (limited to 'libnetdata/url')
-rw-r--r--libnetdata/url/url.c85
-rw-r--r--libnetdata/url/url.h22
2 files changed, 0 insertions, 107 deletions
diff --git a/libnetdata/url/url.c b/libnetdata/url/url.c
index fb1fb7f07a..07a9f8069e 100644
--- a/libnetdata/url/url.c
+++ b/libnetdata/url/url.c
@@ -79,88 +79,3 @@ char *url_decode_r(char *to, char *url, size_t size) {
return to;
}
-
-inline HTTP_VALIDATION url_is_request_complete(char *begin,char *end,size_t length) {
- if ( begin == end) {
- return HTTP_VALIDATION_INCOMPLETE;
- }
-
- if ( length > 3 ) {
- begin = end - 4;
- }
-
- uint32_t counter = 0;
- do {
- if (*begin == '\r') {
- begin++;
- if ( begin == end )
- {
- break;
- }
-
- if (*begin == '\n')
- {
- counter++;
- }
- } else if (*begin == '\n') {
- begin++;
- counter++;
- }
-
- if ( counter == 2) {
- break;
- }
- }
- while (begin != end);
-
- return (counter == 2)?HTTP_VALIDATION_OK:HTTP_VALIDATION_INCOMPLETE;
-}
-
-inline char *url_find_protocol(char *s) {
- while(*s) {
- // find the next space
- while (*s && *s != ' ') s++;
-
- // is it SPACE + "HTTP/" ?
- if(*s && !strncmp(s, " HTTP/", 6)) break;
- else s++;
- }
-
- return s;
-}
-
-int url_parse_query_string(struct web_fields *names,struct web_fields *values,char *moveme,char *divisor) {
- uint32_t i = 0;
- uint32_t max = WEB_FIELDS_MAX;
-
- do {
- if ( i == max) {
- error("We are exceeding the maximum number of elements possible(%u) in this query string(%s)",max,moveme);
- break;
- }
- if (divisor) {
- names[i].body = moveme;
- names[i].length = divisor - moveme;//= - begin
-
- moveme = ++divisor; //value
- values[i].body = moveme;
-
- (void)divisor;
- divisor = strchr(moveme,'&'); //end of value
- if (divisor) {
- values[i].length = (size_t )(divisor - moveme);
- } else{
- values[i].length = strlen(moveme);
- break;
- }
-
- moveme = divisor;
- divisor = strchr(++moveme,'='); //end of value
- i++;
- } else {
- break;
- }
- } while (moveme);
-
- return ++i;
-}
diff --git a/libnetdata/url/url.h b/libnetdata/url/url.h
index 9e86c20cfd..6cef6d7a84 100644
--- a/libnetdata/url/url.h
+++ b/libnetdata/url/url.h
@@ -25,26 +25,4 @@ extern char *url_decode(char *str);
extern char *url_decode_r(char *to, char *url, size_t size);
-#define WEB_FIELDS_MAX 200
-struct web_fields{
- char *body;
- size_t length;
-};
-// http_request_validate()
-// returns:
-// = 0 : all good, process the request
-// > 0 : request is not supported
-// < 0 : request is incomplete - wait for more data
-
-typedef enum {
- HTTP_VALIDATION_OK,
- HTTP_VALIDATION_NOT_SUPPORTED,
- HTTP_VALIDATION_INCOMPLETE,
- HTTP_VALIDATION_REDIRECT
-} HTTP_VALIDATION;
-
-extern HTTP_VALIDATION url_is_request_complete(char *begin,char *end,size_t length);
-extern char *url_find_protocol(char *s);
-extern int url_parse_query_string(struct web_fields *names,struct web_fields *values,char *moveme,char *divisor);
-
#endif /* NETDATA_URL_H */