2#include "internal/gc.h"
3#include "internal/hash.h"
4#include "internal/proc.h"
5#include "internal/sanitizers.h"
52wmap_foreach_i(st_data_t key, st_data_t val, st_data_t arg)
56 if (data->dead_entry != NULL) {
57 ruby_sized_xfree(data->dead_entry,
sizeof(
struct weakmap_entry));
58 data->dead_entry = NULL;
64 if (wmap_live_p(entry->key) && wmap_live_p(entry->val)) {
68 int ret = data->func(entry, data->arg);
80 data->dead_entry = entry;
95 st_foreach(w->table, wmap_foreach_i, (st_data_t)&foreach_data);
97 ruby_sized_xfree(foreach_data.dead_entry,
sizeof(
struct weakmap_entry));
103 rb_gc_mark_weak(&entry->key);
104 rb_gc_mark_weak(&entry->val);
114 wmap_foreach(w, wmap_mark_weak_table_i, (st_data_t)0);
119wmap_free_table_i(st_data_t key, st_data_t val, st_data_t arg)
133 st_foreach(w->table, wmap_free_table_i, 0);
134 st_free_table(w->table);
138wmap_memsize(
const void *ptr)
143 size += st_memsize(w->table);
145 size += st_table_size(w->table) * (2 *
sizeof(
VALUE));
151wmap_compact_table_i(
struct weakmap_entry *entry, st_data_t data)
153 st_table *table = (st_table *)data;
155 VALUE new_key = rb_gc_location(entry->key);
157 entry->val = rb_gc_location(entry->val);
161 if (entry->key != new_key) {
162 entry->key = new_key;
164 DURING_GC_COULD_MALLOC_REGION_START();
166 st_insert(table, (st_data_t)&entry->key, (st_data_t)&entry->val);
168 DURING_GC_COULD_MALLOC_REGION_END();
177wmap_compact(
void *ptr)
182 wmap_foreach(w, wmap_compact_table_i, (st_data_t)w->table);
194 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
198wmap_cmp(st_data_t x, st_data_t y)
204wmap_hash(st_data_t n)
206 return st_numhash(*(
VALUE *)n);
209static const struct st_hash_type wmap_hash_type = {
215wmap_allocate(
VALUE klass)
219 w->table = st_init_table(&wmap_hash_type);
239 if (RSTRING_PTR(str)[0] ==
'#') {
244 RSTRING_PTR(str)[0] =
'#';
247 wmap_inspect_append(str, entry->key);
249 wmap_inspect_append(str, entry->val);
255wmap_inspect(
VALUE self)
261 VALUE str = rb_sprintf(
"-<%"PRIsVALUE
":%p", c, (
void *)self);
263 wmap_foreach(w, wmap_inspect_i, (st_data_t)str);
265 RSTRING_PTR(str)[0] =
'#';
293 wmap_foreach(w, wmap_each_i, (st_data_t)0);
315wmap_each_key(
VALUE self)
320 wmap_foreach(w, wmap_each_key_i, (st_data_t)0);
326wmap_each_value_i(
struct weakmap_entry *entry, st_data_t _data)
342wmap_each_value(
VALUE self)
347 wmap_foreach(w, wmap_each_value_i, (st_data_t)0);
357 rb_ary_push(ary, entry->key);
375 VALUE ary = rb_ary_new();
376 wmap_foreach(w, wmap_keys_i, (st_data_t)ary);
386 rb_ary_push(ary, entry->val);
399wmap_values(
VALUE self)
404 VALUE ary = rb_ary_new();
405 wmap_foreach(w, wmap_values_i, (st_data_t)ary);
411nonspecial_obj_id(
VALUE obj)
413#if SIZEOF_LONG == SIZEOF_VOIDP
415#elif SIZEOF_LONG_LONG == SIZEOF_VOIDP
423wmap_aset_replace(st_data_t *key, st_data_t *val, st_data_t new_key_ptr,
int existing)
426 VALUE new_val = *(((
VALUE *)new_key_ptr) + 1);
429 assert(*(
VALUE *)*key == new_key);
434 *key = (st_data_t)&entry->key;;
435 *val = (st_data_t)&entry->val;
438 *(
VALUE *)*key = new_key;
439 *(
VALUE *)*val = new_val;
459 VALUE pair[2] = { key, val };
461 st_update(w->table, (st_data_t)pair, wmap_aset_replace, (st_data_t)pair);
466 return nonspecial_obj_id(val);
473 assert(wmap_live_p(key));
479 if (!st_lookup(w->table, (st_data_t)&key, &data))
return Qundef;
483 return *(
VALUE *)data;
497 VALUE obj = wmap_lookup(self, key);
498 return !UNDEF_P(obj) ? obj :
Qnil;
535 VALUE orig_key = key;
536 st_data_t orig_key_data = (st_data_t)&orig_key;
537 st_data_t orig_val_data;
538 if (st_delete(w->table, &orig_key_data, &orig_val_data)) {
541 rb_gc_remove_weak(self, (
VALUE *)orig_key_data);
542 rb_gc_remove_weak(self, (
VALUE *)orig_val_data);
547 if (wmap_live_p(orig_val)) {
569 return RBOOL(!UNDEF_P(wmap_lookup(self, key)));
584 st_index_t n = st_table_size(w->table);
586#if SIZEOF_ST_INDEX_T <= SIZEOF_LONG
616wkmap_mark_table_i(st_data_t key, st_data_t val_obj, st_data_t data)
619 if (dead_entry != NULL) {
620 ruby_sized_xfree(*dead_entry,
sizeof(
VALUE));
626 if (wmap_live_p(*key_ptr)) {
627 rb_gc_mark_weak(key_ptr);
628 rb_gc_mark_movable((
VALUE)val_obj);
633 *dead_entry = key_ptr;
644 VALUE *dead_entry = NULL;
645 st_foreach(w->table, wkmap_mark_table_i, (st_data_t)&dead_entry);
646 if (dead_entry != NULL) {
647 ruby_sized_xfree(dead_entry,
sizeof(
VALUE));
653wkmap_free_table_i(st_data_t key, st_data_t _val, st_data_t _arg)
655 ruby_sized_xfree((
VALUE *)key,
sizeof(
VALUE));
664 st_foreach(w->table, wkmap_free_table_i, 0);
665 st_free_table(w->table);
669wkmap_memsize(
const void *ptr)
674 size += st_memsize(w->table);
676 size += st_table_size(w->table) *
sizeof(
VALUE);
682wkmap_compact_table_i(st_data_t key, st_data_t val_obj, st_data_t data,
int _error)
685 if (dead_entry != NULL) {
686 ruby_sized_xfree(*dead_entry,
sizeof(
VALUE));
692 if (wmap_live_p(*key_ptr)) {
693 if (*key_ptr != rb_gc_location(*key_ptr) || val_obj != rb_gc_location(val_obj)) {
700 *dead_entry = key_ptr;
707wkmap_compact_table_replace(st_data_t *key_ptr, st_data_t *val_ptr, st_data_t _data,
int existing)
711 *(
VALUE *)*key_ptr = rb_gc_location(*(
VALUE *)*key_ptr);
712 *val_ptr = (st_data_t)rb_gc_location((
VALUE)*val_ptr);
718wkmap_compact(
void *ptr)
723 VALUE *dead_entry = NULL;
724 st_foreach_with_replace(w->table, wkmap_compact_table_i, wkmap_compact_table_replace, (st_data_t)&dead_entry);
725 if (dead_entry != NULL) {
726 ruby_sized_xfree(dead_entry,
sizeof(
VALUE));
739 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
743wkmap_cmp(st_data_t x, st_data_t y)
748 if (wmap_live_p(x_obj) && wmap_live_p(y_obj)) {
749 return rb_any_cmp(x_obj, y_obj);
758wkmap_hash(st_data_t n)
761 assert(wmap_live_p(obj));
763 return rb_any_hash(obj);
766static const struct st_hash_type wkmap_hash_type = {
772wkmap_allocate(
VALUE klass)
776 w->table = st_init_table(&wkmap_hash_type);
787 if (!st_lookup(w->table, (st_data_t)&key, &data))
return Qundef;
803 VALUE obj = wkmap_lookup(self, key);
813wkmap_aset_replace(st_data_t *key, st_data_t *val, st_data_t data_args,
int existing)
821 *(
VALUE *)*key = args->new_key;
822 *val = (st_data_t)args->new_val;
846 rb_raise(rb_eArgError,
"WeakKeyMap must be garbage collectable");
855 st_update(w->table, (st_data_t)&key, wkmap_aset_replace, (st_data_t)&args);
898 VALUE orig_key = key;
899 st_data_t orig_key_data = (st_data_t)&orig_key;
900 st_data_t orig_val_data;
901 if (st_delete(w->table, &orig_key_data, &orig_val_data)) {
904 rb_gc_remove_weak(self, (
VALUE *)orig_key_data);
906 ruby_sized_xfree((
VALUE *)orig_key_data,
sizeof(
VALUE));
945 if (!st_get_key(w->table, (st_data_t)&key, &orig_key))
return Qnil;
947 return *(
VALUE *)orig_key;
959 return RBOOL(wkmap_lookup(self, key) !=
Qundef);
963wkmap_clear_i(st_data_t key, st_data_t val, st_data_t data)
969 rb_gc_remove_weak(self, (
VALUE *)key);
970 return wkmap_free_table_i(key, val, 0);
980wkmap_clear(
VALUE self)
985 st_foreach(w->table, wkmap_clear_i, (st_data_t)self);
1003wkmap_inspect(
VALUE self)
1008 st_index_t n = st_table_size(w->table);
1010#if SIZEOF_ST_INDEX_T <= SIZEOF_LONG
1011 const char * format =
"#<%"PRIsVALUE
":%p size=%lu>";
1013 const char * format =
"#<%"PRIsVALUE
":%p size=%llu>";
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
VALUE rb_define_module(const char *name)
Defines a top-level module.
int rb_block_given_p(void)
Determines if the current method is given a block.
#define Qundef
Old name of RUBY_Qundef.
#define rb_str_cat2
Old name of rb_str_cat_cstr.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define FIXNUM_FLAG
Old name of RUBY_FIXNUM_FLAG.
#define LL2NUM
Old name of RB_LL2NUM.
#define CLASS_OF
Old name of rb_class_of.
#define xmalloc
Old name of ruby_xmalloc.
#define FL_ABLE
Old name of RB_FL_ABLE.
#define ULL2NUM
Old name of RB_ULL2NUM.
#define Qnil
Old name of RUBY_Qnil.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
VALUE rb_any_to_s(VALUE obj)
Generates a textual representation of the given object.
VALUE rb_mEnumerable
Enumerable module.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
VALUE rb_yield(VALUE val)
Yields the block.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
struct rb_data_type_struct rb_data_type_t
This is the struct that holds necessary info for a struct.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
#define _(args)
This was a transition path from K&R to ANSI.
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t VALUE
Type that represents a Ruby object.