Nginx-ru mailing list archive (nginx-ru@sysoev.ru)
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
open_file_cache statistics
Mihal K wrote:
> Igor Sysoev wrote:
>> Изменения в nginx 0.6.10 03.09.2007
>>
>> *) Добавление: директивы open_file_cache, open_file_cache_retest и
>> open_file_cache_errors.
>
> RFE: было бы интересно иметь доступ к статистике hits/misses этого кэша.
> Иначе в общем случае непонятно, как подбирать его оптимальный размер.
Добрый день.
Долго ждал появления этой статистики, но так и не дождался, так что
пришлось написать самому. Может, кому пригодится.
Выглядит статистика так:
Cached files: 21437 (65.42%)
Total files: 32768
Hits: 113646 (65.55%)
Misses: 59717
Cleanups: 34046
Проценты возле Cached files - это заполненность кэша, а возле Hits - это
эффективность кэша = hits / (hits + misses), т.е. доля запросов,
обслуженных из кэша.
Модуль очень простой, сделан полностью на основе
http_stub_status_module, поэтому в нужный server надо добавить что-то типа:
location = /cache-status {
cache_status on;
}
чтобы смотреть статистику по domain.name/cache-status
diff -uNr nginx-0.6.24/auto/modules nginx-0.6.24-new/auto/modules
--- nginx-0.6.24/auto/modules 2007-12-27 17:21:59.000000000 +0300
+++ nginx-0.6.24-new/auto/modules 2008-01-21 19:35:45.000000000 +0300
@@ -287,6 +288,9 @@
HTTP_SRCS="$HTTP_SRCS src/http/modules/ngx_http_stub_status_module.c"
fi
+HTTP_MODULES="$HTTP_MODULES ngx_http_stub_cache_status_module"
+HTTP_SRCS="$HTTP_SRCS src/http/modules/ngx_http_stub_cache_status_module.c"
+
#if [ -r $NGX_OBJS/auto ]; then
# . $NGX_OBJS/auto
#fi
diff -uNr nginx-0.6.24/src/core/ngx_open_file_cache.c
nginx-0.6.24-new/src/core/ngx_open_file_cache.c
--- nginx-0.6.24/src/core/ngx_open_file_cache.c 2007-12-25 13:46:40.000000000
+0300
+++ nginx-0.6.24-new/src/core/ngx_open_file_cache.c 2008-01-22
15:45:20.000000000 +0300
@@ -55,6 +55,9 @@
cache->current = 0;
cache->max = max;
cache->inactive = inactive;
+ cache->hits = 0;
+ cache->misses = 0;
+ cache->cleanups = 0;
cln = ngx_pool_cleanup_add(pool, 0);
if (cln == NULL) {
@@ -94,6 +97,7 @@
ngx_rbtree_delete(&cache->rbtree, &file->node);
cache->current--;
+ cache->cleanups++;
ngx_log_debug1(NGX_LOG_DEBUG_CORE, ngx_cycle->log, 0,
"delete cached open file: %s", file->name);
@@ -212,6 +216,8 @@
of->err = file->err;
}
+ cache->hits++;
+
goto found;
}
@@ -300,6 +306,7 @@
ngx_rbtree_delete(&cache->rbtree, &file->node);
cache->current--;
+ cache->cleanups++;
file->close = 1;
@@ -377,6 +384,8 @@
renew:
+ cache->misses++;
+
file->created = now;
found:
@@ -412,6 +421,7 @@
ngx_rbtree_delete(&cache->rbtree, &file->node);
cache->current--;
+ cache->cleanups++;
if (ngx_close_file(file->fd) == NGX_FILE_ERROR) {
ngx_log_error(NGX_LOG_ALERT, pool->log, ngx_errno,
@@ -681,6 +691,7 @@
ngx_rbtree_delete(&cache->rbtree, &file->node);
cache->current--;
+ cache->cleanups++;
ngx_log_debug1(NGX_LOG_DEBUG_CORE, log, 0,
"expire cached open file: %s", file->name);
@@ -797,6 +808,7 @@
ngx_rbtree_delete(&fev->cache->rbtree, &file->node);
fev->cache->current--;
+ fev->cache->cleanups++;
/* NGX_ONESHOT_EVENT was already deleted */
file->event = NULL;
diff -uNr nginx-0.6.24/src/core/ngx_open_file_cache.h
nginx-0.6.24-new/src/core/ngx_open_file_cache.h
--- nginx-0.6.24/src/core/ngx_open_file_cache.h 2007-12-25 13:46:40.000000000
+0300
+++ nginx-0.6.24-new/src/core/ngx_open_file_cache.h 2008-01-22
11:21:11.000000000 +0300
@@ -73,6 +73,10 @@
ngx_uint_t current;
ngx_uint_t max;
time_t inactive;
+
+ ngx_atomic_t hits;
+ ngx_atomic_t misses;
+ ngx_atomic_t cleanups;
} ngx_open_file_cache_t;
diff -uNr nginx-0.6.24/src/http/modules/ngx_http_stub_cache_status_module.c
nginx-0.6.24-new/src/http/modules/ngx_http_stub_cache_status_module.c
--- nginx-0.6.24/src/http/modules/ngx_http_stub_cache_status_module.c
1970-01-01 03:00:00.000000000 +0300
+++ nginx-0.6.24-new/src/http/modules/ngx_http_stub_cache_status_module.c
2008-01-22 15:27:37.000000000 +0300
@@ -0,0 +1,163 @@
+#include <ngx_config.h>
+#include <ngx_core.h>
+#include <ngx_http.h>
+
+
+static char *ngx_http_set_cache_status(ngx_conf_t *cf, ngx_command_t *cmd,
+ void *conf);
+
+static ngx_command_t ngx_http_cache_status_commands[] = {
+
+ { ngx_string("cache_status"),
+ NGX_HTTP_SRV_CONF|NGX_HTTP_LOC_CONF|NGX_CONF_FLAG,
+ ngx_http_set_cache_status,
+ 0,
+ 0,
+ NULL },
+
+ ngx_null_command
+};
+
+
+
+static ngx_http_module_t ngx_http_stub_cache_status_module_ctx = {
+ NULL, /* preconfiguration */
+ NULL, /* postconfiguration */
+
+ NULL, /* create main configuration */
+ NULL, /* init main configuration */
+
+ NULL, /* create server configuration */
+ NULL, /* merge server configuration */
+
+ NULL, /* create location configuration */
+ NULL /* merge location configuration */
+};
+
+
+ngx_module_t ngx_http_stub_cache_status_module = {
+ NGX_MODULE_V1,
+ &ngx_http_stub_cache_status_module_ctx, /* module context */
+ ngx_http_cache_status_commands, /* module directives */
+ NGX_HTTP_MODULE, /* module type */
+ NULL, /* init master */
+ NULL, /* init module */
+ NULL, /* init process */
+ NULL, /* init thread */
+ NULL, /* exit thread */
+ NULL, /* exit process */
+ NULL, /* exit master */
+ NGX_MODULE_V1_PADDING
+};
+
+
+static ngx_int_t ngx_http_cache_status_handler(ngx_http_request_t *r)
+{
+ ngx_http_core_loc_conf_t *clcf;
+ ngx_open_file_cache_t *cache;
+
+ size_t size;
+ ngx_int_t rc;
+ ngx_buf_t *b;
+ ngx_chain_t out;
+ float percent;
+ ngx_atomic_uint_t pm, pd;
+
+ if (r->method != NGX_HTTP_GET && r->method != NGX_HTTP_HEAD) {
+ return NGX_HTTP_NOT_ALLOWED;
+ }
+
+ rc = ngx_http_discard_request_body(r);
+
+ if (rc != NGX_OK) {
+ return rc;
+ }
+
+ r->headers_out.content_type.len = sizeof("text/plain") - 1;
+ r->headers_out.content_type.data = (u_char *) "text/plain";
+
+ if (r->method == NGX_HTTP_HEAD) {
+ r->headers_out.status = NGX_HTTP_OK;
+
+ rc = ngx_http_send_header(r);
+
+ if (rc == NGX_ERROR || rc > NGX_OK || r->header_only) {
+ return rc;
+ }
+ }
+
+ clcf = ngx_http_get_module_loc_conf(r, ngx_http_core_module);
+ cache = clcf->open_file_cache;
+
+ if ((cache == NULL) || !cache->max) {
+ b = ngx_create_temp_buf(r->pool, sizeof("Cache disabled.\n"));
+ if (b == NULL) {
+ return NGX_HTTP_INTERNAL_SERVER_ERROR;
+ }
+
+ out.buf = b;
+ out.next = NULL;
+
+ b->last = ngx_cpymem(b->last, "Cache disabled.\n",
+ sizeof("Cache disabled.\n") - 1);
+ } else {
+ size = sizeof("Cached files: (100.00%)\n") + 3 * NGX_ATOMIC_T_LEN
+ + sizeof("Total files: \n\n") + NGX_ATOMIC_T_LEN
+ + sizeof("Hits: (100.00%)\n") + NGX_ATOMIC_T_LEN
+ + sizeof("Misses: \n") + NGX_ATOMIC_T_LEN
+ + sizeof("Cleanups: \n") + NGX_ATOMIC_T_LEN;
+
+ b = ngx_create_temp_buf(r->pool, size);
+ if (b == NULL) {
+ return NGX_HTTP_INTERNAL_SERVER_ERROR;
+ }
+
+ out.buf = b;
+ out.next = NULL;
+
+ /* Work around the lack of "%f" in ngx_sprintf */
+ percent = (100.0 * cache->current) / (float)cache->max;
+ pd = ((ngx_atomic_uint_t) (percent * 100)) % 100;
+ pm = ((ngx_atomic_uint_t) percent) % 100;
+
+ b->last = ngx_sprintf(b->last, "Cached files: %uA (%uA.%uA%%)\n",
+ cache->current, pm, pd);
+
+ b->last = ngx_sprintf(b->last, "Total files: %uA\n\n", cache->max);
+
+ percent = (100.0 * cache->hits) / (float)(cache->hits + cache->misses);
+ pd = ((ngx_atomic_uint_t) (percent * 100)) % 100;
+ pm = ((ngx_atomic_uint_t) percent) % 100;
+
+ b->last = ngx_sprintf(b->last, "Hits: %uA (%uA.%uA%%)\n",
+ cache->hits, pm, pd);
+
+ b->last = ngx_sprintf(b->last, "Misses: %uA\n", cache->misses);
+
+ b->last = ngx_sprintf(b->last, "Cleanups: %uA\n", cache->cleanups);
+ }
+
+ r->headers_out.status = NGX_HTTP_OK;
+ r->headers_out.content_length_n = b->last - b->pos;
+
+ b->last_buf = 1;
+
+ rc = ngx_http_send_header(r);
+
+ if (rc == NGX_ERROR || rc > NGX_OK || r->header_only) {
+ return rc;
+ }
+
+ return ngx_http_output_filter(r, &out);
+}
+
+
+static char *ngx_http_set_cache_status(ngx_conf_t *cf, ngx_command_t *cmd,
void *conf)
+{
+ ngx_http_core_loc_conf_t *clcf;
+
+ clcf = ngx_http_conf_get_module_loc_conf(cf, ngx_http_core_module);
+ clcf->handler = ngx_http_cache_status_handler;
+
+ return NGX_CONF_OK;
+}
|