From e205304a3e903007e4d4d4dc3c0ae7d1e9edd620 Mon Sep 17 00:00:00 2001 From: normal Date: Sun, 9 Jul 2017 23:04:43 +0000 Subject: Hash#[]= deduplicates string keys if (and only if) fstring exists In typical applications, hash entries are read after being written to. Blindly writing to hashes which are never read makes little sense. So, for any hash which is read from, an fstring entry for the key should already exist for the key. We no longer blindly create fstrings if the code is blindly setting random hash keys, preventing the performance regression in the reverted r43870. Regarding , this has a minimum impact on the bm_so_k_nucleotide where hash keys are set and not reused, performance is within 1-2% of existing cases. * hash.c: #include gc.h for rb_objspace_garbage_object_p (hash_aset_str): do read-only check of fstring table and reuse fstring if it exists and is still alive (not garbage) [ruby-core:81942] [Feature #13725] git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@59304 b2dd03c8-39d4-4d8f-98ff-823fe69b080e --- hash.c | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/hash.c b/hash.c index 3176bc7a24..0514c852a9 100644 --- a/hash.c +++ b/hash.c @@ -18,6 +18,7 @@ #include "probes.h" #include "id.h" #include "symbol.h" +#include "gc.h" #ifdef __APPLE__ # ifdef HAVE_CRT_EXTERNS_H @@ -1518,8 +1519,21 @@ hash_aset(st_data_t *key, st_data_t *val, struct update_arg *arg, int existing) static int hash_aset_str(st_data_t *key, st_data_t *val, struct update_arg *arg, int existing) { - if (!existing) { - *key = rb_str_new_frozen(*key); + if (!existing && !RB_OBJ_FROZEN(*key)) { + VALUE fstr; + st_table *tbl = rb_vm_fstring_table(); + + if (st_lookup(tbl, *key, (st_data_t *)&fstr)) { + if (rb_objspace_garbage_object_p(fstr)) { + *key = rb_fstring(*key); + } + else { + *key = fstr; + } + } + else { + *key = rb_str_new_frozen(*key); + } } return hash_aset(key, val, arg, existing); } -- cgit v1.2.3