nginx-0.0.1-2003-11-09-23:03:38 import; separate building
diff --git a/src/http/modules/proxy/ngx_http_proxy_cache.c b/src/http/modules/proxy/ngx_http_proxy_cache.c
index 0bc7c61..0f56686 100644
--- a/src/http/modules/proxy/ngx_http_proxy_cache.c
+++ b/src/http/modules/proxy/ngx_http_proxy_cache.c
@@ -5,12 +5,14 @@
#include <ngx_http_proxy_handler.h>
+static int ngx_http_proxy_process_cached_response(ngx_http_proxy_ctx_t *p,
+ int rc);
static int ngx_http_proxy_process_cached_header(ngx_http_proxy_ctx_t *p);
+static void ngx_http_proxy_cache_look_complete_request(ngx_http_proxy_ctx_t *p);
int ngx_http_proxy_get_cached_response(ngx_http_proxy_ctx_t *p)
{
- int rc;
char *last;
ngx_http_request_t *r;
ngx_http_proxy_cache_t *c;
@@ -54,44 +56,69 @@
c->ctx.buf = p->header_in;
- rc = ngx_http_cache_get_file(r, &c->ctx);
+ return ngx_http_proxy_process_cached_response(p,
+ ngx_http_cache_get_file(r, &c->ctx));
+}
- switch (rc) {
- case NGX_HTTP_CACHE_STALE:
- p->stale = 1;
- p->state->cache = NGX_HTTP_PROXY_CACHE_EXPR;
- break;
- case NGX_HTTP_CACHE_AGED:
- p->stale = 1;
- p->state->cache = NGX_HTTP_PROXY_CACHE_AGED;
- break;
+static int ngx_http_proxy_process_cached_response(ngx_http_proxy_ctx_t *p,
+ int rc)
+{
+ if (rc == NGX_OK) {
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_HIT;
+ p->header_in->pos = p->header_in->start + p->cache->ctx.header_size;
- case NGX_OK:
- p->state->cache = NGX_HTTP_PROXY_CACHE_HIT;
- break;
-
- default:
- p->state->cache = NGX_HTTP_PROXY_CACHE_MISS;
- }
-
- if (rc == NGX_OK
- || rc == NGX_HTTP_CACHE_STALE
- || rc == NGX_HTTP_CACHE_AGED)
- {
- p->header_in->pos = p->header_in->start + c->ctx.header_size;
if (ngx_http_proxy_process_cached_header(p) == NGX_ERROR) {
return NGX_HTTP_INTERNAL_SERVER_ERROR;
}
- p->header_in->pos = p->header_in->start + c->ctx.header_size;
+
+ p->valid_header_in = 1;
+
+ return ngx_http_proxy_send_cached_response(p);
+ }
+
+ if (rc == NGX_HTTP_CACHE_STALE) {
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_EXPR;
+
+ } else if (rc == NGX_HTTP_CACHE_AGED) {
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_AGED;
+ }
+
+ if (rc == NGX_HTTP_CACHE_STALE || rc == NGX_HTTP_CACHE_AGED) {
+ p->header_in->pos = p->header_in->start + p->cache->ctx.header_size;
+
+ if (ngx_http_proxy_process_cached_header(p) == NGX_ERROR) {
+ return NGX_HTTP_INTERNAL_SERVER_ERROR;
+ }
+
+ p->header_in->pos = p->header_in->start + p->cache->ctx.header_size;
p->header_in->last = p->header_in->pos;
+ p->stale = 1;
+ p->valid_header_in = 1;
+
} else if (rc == NGX_DECLINED) {
- p->header_in->pos = p->header_in->start + c->ctx.header_size;
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_MISS;
+ p->header_in->pos = p->header_in->start + p->cache->ctx.header_size;
p->header_in->last = p->header_in->pos;
}
- return rc;
+ if (p->lcf->busy_lock) {
+ p->try_busy_lock = 1;
+
+ p->header_in->pos = p->header_in->start;
+ p->header_in->last = p->header_in->start;
+
+ p->busy_lock.time = 0;
+ p->busy_lock.event = p->request->connection->read;
+ p->busy_lock.event_handler = ngx_http_proxy_busy_lock_handler;
+ p->busy_lock.md5 = p->cache->ctx.md5;
+
+ ngx_http_proxy_cache_busy_lock(p);
+ return NGX_DONE;
+ }
+
+ return ngx_http_proxy_request_upstream(p);
}
@@ -141,6 +168,7 @@
ngx_log_debug(r->connection->log, "http cache status %d '%s'" _
c->status _ c->status_line.data);
+ /* TODO: ngx_init_table */
c->headers_in.headers = ngx_create_table(r->pool, 20);
for ( ;; ) {
@@ -216,28 +244,109 @@
}
-#if 0
-
-static void ngx_http_proxy_cache_busy_lock(ngx_http_proxy_ctx_t *p)
+void ngx_http_proxy_cache_busy_lock(ngx_http_proxy_ctx_t *p)
{
- rc = ngx_http_busy_lock(p->lcf->busy_lock, p->cache->ctx.md5);
+ int rc, ft_type;
+
+ rc = ngx_http_busy_lock_cachable(p->lcf->busy_lock, &p->busy_lock,
+ p->try_busy_lock);
if (rc == NGX_OK) {
- ngx_http_proxy_request_upstream(p);
+ if (p->try_busy_lock) {
+ p->busy_locked = 1;
+ p->header_in->pos = p->header_in->start + p->cache->ctx.header_size;
+ p->header_in->last = p->header_in->pos;
+
+ ngx_http_proxy_request_upstream(p);
+ return;
+ }
+
+ ngx_http_proxy_cache_look_complete_request(p);
+ return;
}
- if (rc == NGX_AGAIN) {
- if (p->busy_lock_time) {
- ngx_add_timer(p->request->connection->read, 1000);
+ p->try_busy_lock = 0;
+
+ if (p->cache->ctx.file.fd != NGX_INVALID_FILE
+ && !p->cache->ctx.file.info_valid)
+ {
+ if (ngx_stat_fd(p->cache->ctx.file.fd, &p->cache->ctx.file.info)
+ == NGX_FILE_ERROR)
+ {
+ ngx_log_error(NGX_LOG_CRIT, p->request->connection->log, ngx_errno,
+ ngx_stat_fd_n " \"%s\" failed",
+ p->cache->ctx.file.name.data);
+ ngx_http_proxy_finalize_request(p, NGX_HTTP_INTERNAL_SERVER_ERROR);
return;
}
+
+ p->cache->ctx.file.info_valid = 1;
+ }
+
+
+ if (rc == NGX_AGAIN) {
+ return;
+ }
+
+ if (rc == NGX_DONE) {
+ ft_type = NGX_HTTP_PROXY_FT_BUSY_LOCK;
+
+ } else {
+ /* rc == NGX_ERROR */
+ ft_type = NGX_HTTP_PROXY_FT_MAX_WAITING;
+ }
+
+ if (p->stale && (p->lcf->use_stale & ft_type)) {
+ ngx_http_proxy_finalize_request(p,
+ ngx_http_proxy_send_cached_response(p));
+ return;
+ }
+
+ ngx_http_proxy_finalize_request(p, NGX_HTTP_SERVICE_UNAVAILABLE);
+}
+
+
+static void ngx_http_proxy_cache_look_complete_request(ngx_http_proxy_ctx_t *p)
+{
+ int rc;
+ ngx_http_cache_ctx_t *ctx;
+
+ if (!(ctx = ngx_pcalloc(p->request->pool, sizeof(ngx_http_cache_ctx_t)))) {
+ ngx_http_proxy_finalize_request(p, NGX_HTTP_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ *ctx = p->cache->ctx;
+
+ rc = ngx_http_cache_open_file(p->request, ctx,
+ ngx_file_uniq(p->cache->ctx.file.info));
+
+ if (rc == NGX_HTTP_CACHE_THE_SAME) {
+ p->try_busy_lock = 1;
+ p->busy_lock.time = 0;
+ ngx_http_proxy_cache_busy_lock(p);
+ return;
+ }
+
+ngx_log_debug(p->request->connection->log, "OLD: %d, NEW: %d" _
+ p->cache->ctx.file.fd _ ctx->file.fd);
+
+ if (p->cache->ctx.file.fd != NGX_INVALID_FILE) {
+ if (ngx_close_file(p->cache->ctx.file.fd) == NGX_FILE_ERROR) {
+ ngx_log_error(NGX_LOG_ALERT, p->request->connection->log, ngx_errno,
+ ngx_close_file_n " \"%s\" failed",
+ p->cache->ctx.file.name.data);
+ }
}
- rc == NGX_ERROR
- check waitn
-}
+ p->cache->ctx = *ctx;
-#endif
+ p->status = 0;
+ p->status_count = 0;
+
+ ngx_http_proxy_finalize_request(p,
+ ngx_http_proxy_process_cached_response(p, rc));
+}
int ngx_http_proxy_send_cached_response(ngx_http_proxy_ctx_t *p)
@@ -435,8 +544,8 @@
/* FIXME: time_t == int_64_t, we can use fpu */
p->state->reason = NGX_HTTP_PROXY_CACHE_LMF;
- p->cache->ctx.expires = ngx_time()
- + (((int64_t) (date - last_modified)) * p->lcf->lm_factor) / 100;
+ p->cache->ctx.expires = (time_t) (ngx_time()
+ + (((int64_t) (date - last_modified)) * p->lcf->lm_factor) / 100);
return 1;
}
@@ -460,6 +569,9 @@
ep = p->upstream->event_pipe;
+ngx_log_debug(p->request->connection->log, "LEN: " OFF_FMT ", " OFF_FMT _
+ p->cache->ctx.length _ ep->read_length);
+
if (p->cache->ctx.length == -1) {
/* TODO: test rc */
ngx_write_file(&ep->temp_file->file,
diff --git a/src/http/modules/proxy/ngx_http_proxy_handler.c b/src/http/modules/proxy/ngx_http_proxy_handler.c
index a284956..4a48250 100644
--- a/src/http/modules/proxy/ngx_http_proxy_handler.c
+++ b/src/http/modules/proxy/ngx_http_proxy_handler.c
@@ -132,7 +132,7 @@
{ ngx_string("proxy_busy_lock"),
- NGX_HTTP_MAIN_CONF|NGX_HTTP_SRV_CONF|NGX_HTTP_LOC_CONF|NGX_CONF_TAKE3,
+ NGX_HTTP_MAIN_CONF|NGX_HTTP_SRV_CONF|NGX_HTTP_LOC_CONF|NGX_CONF_TAKE13,
ngx_http_set_busy_lock_slot,
NGX_HTTP_LOC_CONF_OFFSET,
offsetof(ngx_http_proxy_loc_conf_t, busy_lock),
@@ -240,9 +240,31 @@
};
+static ngx_str_t cache_states[] = {
+ ngx_string("PASS"),
+ ngx_string("BYPASS"),
+ ngx_string("AUTH"),
+ ngx_string("PGNC"),
+ ngx_string("MISS"),
+ ngx_string("EXPR"),
+ ngx_string("AGED"),
+ ngx_string("HIT")
+};
+
+
+static ngx_str_t cache_reason[] = {
+ ngx_string("BPS"),
+ ngx_string("XAE"),
+ ngx_string("CTL"),
+ ngx_string("EXP"),
+ ngx_string("MVD"),
+ ngx_string("LMF"),
+ ngx_string("PDE")
+};
+
+
static int ngx_http_proxy_handler(ngx_http_request_t *r)
{
- int rc;
ngx_http_proxy_ctx_t *p;
ngx_http_create_ctx(r, p, ngx_http_proxy_module,
@@ -269,16 +291,16 @@
if (!p->lcf->cache
|| (r->method != NGX_HTTP_GET && r->method != NGX_HTTP_HEAD))
{
- p->state->cache = NGX_HTTP_PROXY_CACHE_PASS;
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_PASS;
} else if (r->bypass_cache) {
- p->state->cache = NGX_HTTP_PROXY_CACHE_BYPASS;
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_BYPASS;
} else if (r->headers_in.authorization) {
- p->state->cache = NGX_HTTP_PROXY_CACHE_AUTH;
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_AUTH;
} else if (r->no_cache) {
- p->state->cache = NGX_HTTP_PROXY_CACHE_PGNC;
+ p->state->cache_state = NGX_HTTP_PROXY_CACHE_PGNC;
p->cachable = 1;
} else {
@@ -286,25 +308,72 @@
}
- if (p->state->cache) {
+ if (p->state->cache_state != 0) {
return ngx_http_proxy_request_upstream(p);
}
- rc = ngx_http_proxy_get_cached_response(p);
+ return ngx_http_proxy_get_cached_response(p);
+}
- if (rc == NGX_DONE || rc == NGX_HTTP_INTERNAL_SERVER_ERROR) {
- return rc;
+
+void ngx_http_proxy_busy_lock_handler(ngx_event_t *rev)
+{
+ ngx_connection_t *c;
+ ngx_http_request_t *r;
+ ngx_http_proxy_ctx_t *p;
+
+ ngx_log_debug(rev->log, "busy lock");
+
+ c = rev->data;
+ r = c->data;
+ p = ngx_http_get_module_ctx(r, ngx_http_proxy_module);
+ p->action = "waiting upstream in busy lock";
+
+ if (rev->timedout) {
+ rev->timedout = 0;
+ p->busy_lock.time++;
+ p->state->bl_time = p->busy_lock.time;
+ if (p->state->cache_state < NGX_HTTP_PROXY_CACHE_MISS) {
+ ngx_http_proxy_upstream_busy_lock(p);
+
+ } else {
+ ngx_http_proxy_cache_busy_lock(p);
+ }
+
+ return;
}
- p->valid_header_in = 1;
+ ngx_log_debug(rev->log, "client sent while busy lock");
- if (rc == NGX_OK) {
- return ngx_http_proxy_send_cached_response(p);
+ /*
+ * TODO: kevent() notify about error, otherwise we need to
+ * call ngx_peek(): recv(MGS_PEEK) to get errno. THINK about aio
+ * if there's no error we need to disable event.
+ */
+
+#if (HAVE_KQUEUE)
+
+ if ((ngx_event_flags & NGX_HAVE_KQUEUE_EVENT) && rev->kq_eof) {
+ p->lcf->busy_lock->waiting--;
+
+ ngx_del_timer(rev);
+
+ ngx_log_error(NGX_LOG_ERR, c->log, rev->kq_errno,
+ "client() closed connection");
+
+ if (ngx_del_event(rev, NGX_READ_EVENT, NGX_CLOSE_EVENT) == NGX_ERROR) {
+ ngx_http_proxy_finalize_request(p, NGX_HTTP_INTERNAL_SERVER_ERROR);
+ return;
+ }
+
+ /* we have not HTTP code for the case when a client cancels a request */
+
+ ngx_http_proxy_finalize_request(p, 0);
+ return;
}
- /* rc == NGX_DECLINED || NGX_HTTP_CACHE_STALE || NGX_HTTP_CACHE_AGED */
+#endif
- return ngx_http_proxy_request_upstream(p);
}
@@ -313,7 +382,7 @@
ngx_log_debug(p->request->connection->log,
"finalize http proxy request");
- if (p->upstream->peer.connection) {
+ if (p->upstream && p->upstream->peer.connection) {
ngx_http_proxy_close_connection(p);
}
@@ -323,8 +392,10 @@
rc = 0;
}
- p->request->connection->log->data = p->saved_ctx;
- p->request->connection->log->handler = p->saved_handler;
+ if (p->saved_ctx) {
+ p->request->connection->log->data = p->saved_ctx;
+ p->request->connection->log->handler = p->saved_handler;
+ }
ngx_http_finalize_request(p->request, rc);
}
@@ -338,7 +409,7 @@
p->upstream->peer.connection = NULL;
if (p->lcf->busy_lock) {
- p->lcf->busy_lock->conn_n--;
+ p->lcf->busy_lock->busy--;
}
ngx_log_debug(c->log, "proxy close connection: %d" _ c->fd);
@@ -382,6 +453,12 @@
}
+size_t ngx_http_proxy_log_state(void *data, char *buf, size_t len)
+{
+ return 0;
+}
+
+
size_t ngx_http_proxy_log_error(void *data, char *buf, size_t len)
{
ngx_http_proxy_ctx_t *p = data;
@@ -513,21 +590,21 @@
conf->busy_lock = prev->busy_lock;
}
- if (conf->busy_lock && conf->cache && conf->busy_lock->busy == NULL) {
+ if (conf->busy_lock && conf->cache && conf->busy_lock->md5 == NULL) {
- /* ngx_alloc_shared() */
- conf->busy_lock->busy_mask =
- ngx_palloc(cf->pool, (conf->busy_lock->max_conn + 7) / 8);
- if (conf->busy_lock->busy_mask == NULL) {
+ /* ngx_calloc_shared() */
+ conf->busy_lock->md5_mask =
+ ngx_pcalloc(cf->pool, (conf->busy_lock->max_busy + 7) / 8);
+ if (conf->busy_lock->md5_mask == NULL) {
return NGX_CONF_ERROR;
}
/* 16 bytes are 128 bits of the md5 */
/* ngx_alloc_shared() */
- conf->busy_lock->busy = ngx_palloc(cf->pool,
- 16 * conf->busy_lock->max_conn);
- if (conf->busy_lock->busy == NULL) {
+ conf->busy_lock->md5 = ngx_palloc(cf->pool,
+ 16 * conf->busy_lock->max_busy);
+ if (conf->busy_lock->md5 == NULL) {
return NGX_CONF_ERROR;
}
}
diff --git a/src/http/modules/proxy/ngx_http_proxy_handler.h b/src/http/modules/proxy/ngx_http_proxy_handler.h
index 919a0af..138a778 100644
--- a/src/http/modules/proxy/ngx_http_proxy_handler.h
+++ b/src/http/modules/proxy/ngx_http_proxy_handler.h
@@ -80,10 +80,24 @@
} ngx_http_proxy_loc_conf_t;
+/*
+ * "EXPR/10/5/- 200/EXP/60 4"
+ * "MISS/-/-/B 503/-/- -"
+ * "EXPR/10/20/SB HIT/-/- -"
+ * "EXPR/10/15/NB HIT/-/- -"
+ */
+
typedef struct {
- ngx_http_proxy_state_e cache;
- ngx_http_proxy_reason_e reason;
+ ngx_http_proxy_state_e cache_state;
+ time_t expired;
+ time_t bl_time;
+ int bl_state;
+
int status;
+ ngx_http_proxy_reason_e reason;
+ time_t time;
+ time_t expires;
+
ngx_str_t *peer;
} ngx_http_proxy_state_t;
@@ -140,12 +154,14 @@
ngx_hunk_t *header_in;
- time_t busy_lock_time;
+ ngx_http_busy_lock_ctx_t busy_lock;
unsigned accel:1;
unsigned cachable:1;
unsigned stale:1;
+ unsigned try_busy_lock:1;
+ unsigned busy_locked:1;
unsigned valid_header_in:1;
unsigned request_sent:1;
@@ -187,6 +203,10 @@
int ngx_http_proxy_is_cachable(ngx_http_proxy_ctx_t *p);
int ngx_http_proxy_update_cache(ngx_http_proxy_ctx_t *p);
+void ngx_http_proxy_busy_lock_handler(ngx_event_t *rev);
+void ngx_http_proxy_cache_busy_lock(ngx_http_proxy_ctx_t *p);
+void ngx_http_proxy_upstream_busy_lock(ngx_http_proxy_ctx_t *p);
+
size_t ngx_http_proxy_log_error(void *data, char *buf, size_t len);
void ngx_http_proxy_finalize_request(ngx_http_proxy_ctx_t *p, int rc);
void ngx_http_proxy_close_connection(ngx_http_proxy_ctx_t *p);
diff --git a/src/http/modules/proxy/ngx_http_proxy_upstream.c b/src/http/modules/proxy/ngx_http_proxy_upstream.c
index c66c164..0ec2fee 100644
--- a/src/http/modules/proxy/ngx_http_proxy_upstream.c
+++ b/src/http/modules/proxy/ngx_http_proxy_upstream.c
@@ -11,8 +11,6 @@
static ngx_chain_t *ngx_http_proxy_create_request(ngx_http_proxy_ctx_t *p);
static void ngx_http_proxy_init_upstream(void *data);
static void ngx_http_proxy_reinit_upstream(ngx_http_proxy_ctx_t *p);
-static void ngx_http_proxy_upstream_busy_lock(ngx_http_proxy_ctx_t *p);
-static void ngx_http_proxy_upstream_busy_lock_handler(ngx_event_t *rev);
static void ngx_http_proxy_connect(ngx_http_proxy_ctx_t *p);
static void ngx_http_proxy_send_request(ngx_http_proxy_ctx_t *p);
static void ngx_http_proxy_send_request_handler(ngx_event_t *wev);
@@ -266,7 +264,7 @@
wctx->pool = r->pool;
- if (p->lcf->busy_lock) {
+ if (p->lcf->busy_lock && !p->busy_locked) {
ngx_http_proxy_upstream_busy_lock(p);
} else {
ngx_http_proxy_connect(p);
@@ -312,41 +310,31 @@
}
-static void ngx_http_proxy_upstream_busy_lock(ngx_http_proxy_ctx_t *p)
+void ngx_http_proxy_upstream_busy_lock(ngx_http_proxy_ctx_t *p)
{
- int ft_type;
+ int rc, ft_type;
- if (p->lcf->busy_lock->conn_n < p->lcf->busy_lock->max_conn) {
- p->lcf->busy_lock->conn_n++;
+ if (p->busy_lock.time == 0) {
+ p->busy_lock.event = p->request->connection->read;
+ p->busy_lock.event_handler = ngx_http_proxy_busy_lock_handler;
+ }
- if (p->busy_lock_time) {
- p->busy_lock_time = 0;
- p->lcf->busy_lock->waiting_n--;
- }
+ rc = ngx_http_busy_lock(p->lcf->busy_lock, &p->busy_lock);
+ if (rc == NGX_AGAIN) {
+ return;
+ }
+
+ if (rc == NGX_OK) {
ngx_http_proxy_connect(p);
return;
}
- if (p->busy_lock_time) {
- if (p->busy_lock_time < p->lcf->busy_lock->timeout) {
- ngx_add_timer(p->request->connection->read, 1000);
- return;
- }
-
- p->lcf->busy_lock->waiting_n--;
+ if (rc == NGX_DONE) {
ft_type = NGX_HTTP_PROXY_FT_BUSY_LOCK;
} else {
- if (p->lcf->busy_lock->waiting_n < p->lcf->busy_lock->max_waiting) {
- p->lcf->busy_lock->waiting_n++;
- ngx_add_timer(p->request->connection->read, 1000);
- p->request->connection->read->event_handler =
- ngx_http_proxy_upstream_busy_lock_handler;
- /* TODO: ngx_handle_level_read_event() */
- return;
- }
-
+ /* rc == NGX_ERROR */
ft_type = NGX_HTTP_PROXY_FT_MAX_WAITING;
}
@@ -357,61 +345,6 @@
}
ngx_http_proxy_finalize_request(p, NGX_HTTP_SERVICE_UNAVAILABLE);
- return;
-}
-
-
-static void ngx_http_proxy_upstream_busy_lock_handler(ngx_event_t *rev)
-{
- ngx_connection_t *c;
- ngx_http_request_t *r;
- ngx_http_proxy_ctx_t *p;
-
- ngx_log_debug(rev->log, "busy lock");
-
- c = rev->data;
- r = c->data;
- p = ngx_http_get_module_ctx(r, ngx_http_proxy_module);
- p->action = "waiting upstream in busy lock";
-
- if (rev->timedout) {
- rev->timedout = 0;
- p->busy_lock_time++;
- ngx_http_proxy_upstream_busy_lock(p);
- return;
- }
-
- ngx_log_debug(rev->log, "client sent while busy lock");
-
- /*
- * TODO: kevent() notify about error, otherwise we need to
- * call ngx_peek(): recv(MGS_PEEK) to get errno. THINK about aio
- * if there's no error we need to disable event.
- */
-
-#if (HAVE_KQUEUE)
-
- if ((ngx_event_flags & NGX_HAVE_KQUEUE_EVENT) && rev->kq_eof) {
- p->lcf->busy_lock->waiting_n--;
-
- ngx_del_timer(rev);
-
- ngx_log_error(NGX_LOG_ERR, c->log, rev->kq_errno,
- "client() closed connection");
-
- if (ngx_del_event(rev, NGX_READ_EVENT, NGX_CLOSE_EVENT) == NGX_ERROR) {
- ngx_http_proxy_finalize_request(p, NGX_HTTP_INTERNAL_SERVER_ERROR);
- return;
- }
-
- /* we have not HTTP code for the case when a client cancels a request */
-
- ngx_http_proxy_finalize_request(p, 0);
- return;
- }
-
-#endif
-
}
@@ -724,6 +657,16 @@
}
}
+ if (p->status == NGX_HTTP_NOT_FOUND
+ && p->upstream->peer.tries > 1
+ && p->lcf->next_upstream & NGX_HTTP_PROXY_FT_HTTP_404)
+ {
+ ngx_http_proxy_next_upstream(p, NGX_HTTP_PROXY_FT_HTTP_404);
+ return;
+ }
+
+ /* TODO: "proxy_error_page" */
+
p->upstream->status_line.len = p->status_end - p->status_start;
p->upstream->status_line.data = ngx_palloc(p->request->pool,
p->upstream->status_line.len + 1);
@@ -740,6 +683,7 @@
if (p->upstream->headers_in.headers) {
p->upstream->headers_in.headers->nelts = 0;
} else {
+ /* TODO: ngx_init_table */
p->upstream->headers_in.headers = ngx_create_table(p->request->pool,
20);
}
@@ -956,8 +900,8 @@
header->expires = p->cache->ctx.expires;
header->last_modified = p->cache->ctx.last_modified;
header->date = p->cache->ctx.date;
- /* TODO: r->headers_out.content_length_n == -1 */
header->length = r->headers_out.content_length_n;
+ p->cache->ctx.length = r->headers_out.content_length_n;
header->key_len = p->cache->ctx.key.len;
ngx_memcpy(&header->key, p->cache->ctx.key.data, header->key_len);
@@ -1105,18 +1049,24 @@
if (p->upstream->peer.connection) {
if (ep->upstream_done && p->cachable) {
if (ngx_http_proxy_update_cache(p) == NGX_ERROR) {
+ ngx_http_busy_unlock_cachable(p->lcf->busy_lock, &p->busy_lock);
ngx_http_proxy_finalize_request(p, 0);
return;
}
+ ngx_http_busy_unlock_cachable(p->lcf->busy_lock, &p->busy_lock);
+
} else if (ep->upstream_eof && p->cachable) {
/* TODO: check length & update cache */
if (ngx_http_proxy_update_cache(p) == NGX_ERROR) {
+ ngx_http_busy_unlock_cachable(p->lcf->busy_lock, &p->busy_lock);
ngx_http_proxy_finalize_request(p, 0);
return;
}
+
+ ngx_http_busy_unlock_cachable(p->lcf->busy_lock, &p->busy_lock);
}
if (ep->upstream_done || ep->upstream_eof || ep->upstream_error) {
@@ -1206,7 +1156,7 @@
}
}
- if (p->lcf->busy_lock) {
+ if (p->lcf->busy_lock && !p->busy_locked) {
ngx_http_proxy_upstream_busy_lock(p);
} else {
ngx_http_proxy_connect(p);