summaryrefslogtreecommitdiff
path: root/src/backend/taler-merchant-httpd.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/backend/taler-merchant-httpd.c')
-rw-r--r--src/backend/taler-merchant-httpd.c12
1 files changed, 6 insertions, 6 deletions
diff --git a/src/backend/taler-merchant-httpd.c b/src/backend/taler-merchant-httpd.c
index b6cb1cc4..75a4cf4d 100644
--- a/src/backend/taler-merchant-httpd.c
+++ b/src/backend/taler-merchant-httpd.c
@@ -1115,7 +1115,7 @@ url_handler (void *cls,
{
/* Client exceeds upload limit. Should _usually_ be checked earlier
when we look at the MHD_HTTP_HEADER_CONTENT_LENGTH, alas with
- chunked encoding an uploader MAY have ommitted this, and thus
+ chunked encoding an uploader MAY have omitted this, and thus
not permitted us to check on time. In this case, we just close
the connection once it exceeds our limit (instead of waiting
for the upload to complete and then fail). This could theoretically
@@ -1299,28 +1299,28 @@ url_handler (void *cls,
continue; /* too many segments to match */
if ( (NULL == infix_url)
^ (NULL == rh->url_suffix) )
- continue; /* suffix existence missmatch */
+ continue; /* suffix existence mismatch */
if ( (NULL != infix_url) &&
( (infix_strlen != strlen (rh->url_suffix)) ||
(0 != memcmp (infix_url,
rh->url_suffix,
infix_strlen)) ) )
- continue; /* cannot use infix as suffix: content missmatch */
+ continue; /* cannot use infix as suffix: content mismatch */
}
else
{
if ( (NULL == infix_url)
^ (GNUNET_NO == rh->have_id_segment) )
- continue; /* infix existence missmatch */
+ continue; /* infix existence mismatch */
if ( ( (NULL == suffix_url)
^ (NULL == rh->url_suffix) ) )
- continue; /* suffix existence missmatch */
+ continue; /* suffix existence mismatch */
if ( (NULL != suffix_url) &&
( (suffix_strlen != strlen (rh->url_suffix)) ||
(0 != memcmp (suffix_url,
rh->url_suffix,
suffix_strlen)) ) )
- continue; /* suffix content missmatch */
+ continue; /* suffix content mismatch */
}
url_found = true;
if (0 == strcasecmp (method,