Skip to content

Commit

Permalink
add: decode_url(), print_buf_ascii(), css mimetype; fix hex_dump_line()
Browse files Browse the repository at this point in the history
  • Loading branch information
Nikaoto committed Jul 26, 2022
1 parent 089aa86 commit 9c26feb
Show file tree
Hide file tree
Showing 8 changed files with 117 additions and 31 deletions.
6 changes: 6 additions & 0 deletions ascii.c
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ is_alnum(char c)
return is_digit(c) || is_alpha(c);
}

int
is_hex(char c)
{
return is_digit(c) || (c >= 'A' && c <= 'F') || (c >= 'a' && c <= 'f');
}

// https://tools.ietf.org/html/rfc3986#section-2.3
int
needs_encoding(unsigned char c)
Expand Down
1 change: 1 addition & 0 deletions ascii.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ int is_upper_ascii(char c);
int is_alpha(char c);
int is_digit(char c);
int is_alnum(char c);
int is_hex(char c);
int needs_encoding(unsigned char c);

#endif // _MIMINO_ASCII_H
38 changes: 21 additions & 17 deletions buffer.c
Original file line number Diff line number Diff line change
Expand Up @@ -106,23 +106,6 @@ buf_sprintf(Buffer *buf, char *fmt, ...)
return len - 1;
}

// Encode url and copy it into buf
void
buf_encode_url(Buffer *buf, char *url)
{
char hex[] = "0123456789ABCDEF";

for (; *url != '\0'; url++) {
if (needs_encoding((unsigned char)*url)) {
buf_push(buf, '%');
buf_push(buf, hex[(*url >> 4) & 0xF]);
buf_push(buf, hex[ *url & 0xF]);
} else {
buf_push(buf, *url);
}
}
}

// Return -1 on fopen error
// Return 0 on read error
// Return 1 on success
Expand Down Expand Up @@ -159,3 +142,24 @@ buf_append_file_contents(Buffer *buf, File *f, char *path)
fclose(fp);
return 1;
}

void
print_buf_ascii(FILE *stream, Buffer *buf)
{
for (size_t i = 0; i < buf->n_items; i++) {
switch (buf->data[i]) {
case '\n':
fprintf(stream, "\\n\n");
break;
case '\t':
fprintf(stream, "\\t");
break;
case '\r':
fprintf(stream, "\\r");
break;
default:
putc(buf->data[i], stream);
break;
}
}
}
2 changes: 1 addition & 1 deletion buffer.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ typedef struct {
} Buffer;

Buffer* new_buf(size_t data_size);
void print_buf_ascii(FILE *stream, Buffer *buf);
void free_buf(Buffer*);
void free_buf_parts(Buffer*);

Expand All @@ -23,6 +24,5 @@ void buf_append(Buffer *b, char *src, size_t n);
void buf_append_str(Buffer *b, char *str);
int buf_sprintf(Buffer *buf, char *fmt, ...);
int buf_append_file_contents(Buffer *buf, File *f, char *path);
void buf_encode_url(Buffer *b, char *url);

#endif
88 changes: 81 additions & 7 deletions http.c
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ is_http_end(char *buf, size_t size)
}

// Parses buffer and fills out Http_Request fields
// Returns 1 if request has en
Http_Request*
parse_http_request(Http_Request *req)
{
Expand Down Expand Up @@ -298,14 +297,16 @@ make_http_response(Server *serv, Http_Request *req)
res->error = NULL;

char *clean_http_path = cleanup_path(req->path);
defer(&dq, free, clean_http_path);
char *decoded_http_path = decode_url(clean_http_path);
free(clean_http_path);
defer(&dq, free, decoded_http_path);

char *path = resolve_path(serv->serve_path, clean_http_path);
char *path = resolve_path(serv->serve_path, decoded_http_path);
defer(&dq, free, path);

/* printf("serve path: %s\n", serv->serve_path); */
/* printf("request path: %s\n", req->path); */
/* printf("clean request path: %s\n", clean_http_path); */
/* printf("decoded request path: %s\n", decoded_http_path); */
/* printf("resolved path: %s\n", path); */

// Find out if we're listing a dir or serving a file
Expand All @@ -332,12 +333,12 @@ make_http_response(Server *serv, Http_Request *req)
// We're serving a dirlisting
if (file.is_dir) {
// Forward to path with trailing slash if it's missing
if (clean_http_path[strlen(clean_http_path) - 1] != '/') {
if (decoded_http_path[strlen(decoded_http_path) - 1] != '/') {
buf_sprintf(
res->buf,
"HTTP/1.1 301\r\n"
"Location:%s/\r\n\r\n",
clean_http_path);
decoded_http_path);
return fulfill(&dq, res);
}

Expand All @@ -357,7 +358,7 @@ make_http_response(Server *serv, Http_Request *req)
buf_append_str(
res->buf,
"Content-Type: text/html; charset=UTF-8\r\n\r\n");
file_list_to_html(res->buf, clean_http_path, fl);
file_list_to_html(res->buf, decoded_http_path, fl);
buf_append_str(res->buf, "\r\n");

fulfill(&dqfl, NULL);
Expand All @@ -381,6 +382,10 @@ make_http_response(Server *serv, Http_Request *req)
buf_append_str(
res->buf,
"Content-Type: application/pdf\r\n");
} else if (strstr(file.name, ".css")) {
buf_append_str(
res->buf,
"Content-Type: text/css\r\n");
} else {
buf_append_str(
res->buf,
Expand Down Expand Up @@ -455,3 +460,72 @@ free_http_response(Http_Response *res)
res = NULL;
}

// Encode url and copy it into buf
void
buf_encode_url(Buffer *buf, char *url)
{
char hex[] = "0123456789ABCDEF";

for (; *url != '\0'; url++) {
if (needs_encoding((unsigned char)*url)) {
buf_push(buf, '%');
buf_push(buf, hex[(*url >> 4) & 0xF]);
buf_push(buf, hex[ *url & 0xF]);
} else {
buf_push(buf, *url);
}
}
}

char*
decode_url(char *url)
{
size_t len = strlen(url);
char *ret = xmalloc(len + 1);
char *p = ret;

for (size_t i = 0; i < len; i++) {
if (url[i] == '%') {
if (i + 1 < len && url[i+1] == '%') {
*p++ = '%';
i++;
continue;
}

if (i + 2 < len && is_hex(url[i+1]) && is_hex(url[i+2])) {
*p = 0;

// Frst hex digit
if (url[i+1] >= 'A' && url[i+1] <= 'F') {
*p = 0x0A + (url[i+1] - 'A');
} else if (url[i+1] >= 'a' && url[i+1] <= 'f') {
*p = 0x0A + (url[i+1] - 'a');
} else if (url[i+1] >= '0' && url[i+1] <= '9') {
*p = url[i+1] - '0';
}

*p = *p << 4;

// Second hex digit
if (url[i+2] >= 'A' && url[i+2] <= 'F') {
*p |= (0x0A + (url[i+2] - 'A'));
} else if (url[i+2] >= 'a' && url[i+2] <= 'f') {
*p |= (0x0A + (url[i+2] - 'a'));
} else if (url[i+2] >= '0' && url[i+2] <= '9') {
*p |= url[i+2] - '0';
}

p++;
i += 2;
continue;
}

}

*p++ = url[i];
}

*p = '\0';

return ret;
}
3 changes: 2 additions & 1 deletion http.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include <stdio.h>
#include "mimino.h"
#include "buffer.h"

Http_Request* parse_http_request(Http_Request*);
int is_http_end(char *buf, size_t size);
Expand All @@ -14,6 +15,6 @@ void print_http_response(FILE*, Http_Response*);
void free_http_response(Http_Response*);

void buf_encode_url(Buffer *, char *);
//char *decode_url(char *);
char *decode_url(char *);

#endif // _MIMINO_HTTP_H
7 changes: 5 additions & 2 deletions mimino.c
Original file line number Diff line number Diff line change
Expand Up @@ -443,13 +443,16 @@ main(int argc, char **argv)
} else if (status == 1) {
// Reading done, parse request
parse_http_request(conn->req);
//print_http_request(stdout, req);
//print_http_request(stderr, req);

// Parse error
if (conn->req->error) {
fprintf(stdout,
"Parse error: %s\n",
conn->req->error);
fprintf(stderr,
"Request buffer dump:\n");
print_buf_ascii(stderr, conn->req->buf);

// Close the connection if we didn't manage
// to parse the essential headers
Expand Down Expand Up @@ -643,7 +646,7 @@ hex_dump_line(FILE *stream, char *buf, size_t buf_size, size_t width)
// Hexdump
fprintf(stream, " | ");
for (size_t i = 0; i < buf_size; i++) {
fprintf(stream, "%02X ", buf[i]);
fprintf(stream, "%02X ", (unsigned char) buf[i]);
}
putc('\n', stream);
}
Expand Down
3 changes: 0 additions & 3 deletions todo.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
## TODO

- url encoding/decoding
- decode req->path url and pass that to resolve_path

- faster dir scanning (?)
- use readdir() instead of scandir() ~/src/darkhttpd/darkhttpd.c:1830:0

Expand Down

0 comments on commit 9c26feb

Please sign in to comment.