Skip to content
This repository
Fetching contributors…

Octocat-spinner-32-eaf2f5

Cannot retrieve contributors at this time

file 101 lines (75 sloc) 1.577 kb
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100
#ifndef RBX_VM_FIBER_STACK_HPP
#define RBX_VM_FIBER_STACK_HPP

namespace rubinius {
  class FiberData;
  class GarbageCollector;

  class FiberStack {
    void* address_;
    size_t size_;
    int refs_;
    FiberData* user_;
#ifdef HAVE_VALGRIND_H
    unsigned valgrind_id_;
#endif

  public:
    FiberStack(size_t size);

    void* address() {
      return address_;
    }

    void* top_address() {
      return (void*)((char*)address_ + size_);
    }

    size_t size() {
      return size_;
    }

    void inc_ref() {
      refs_++;
    }

    void dec_ref() {
      refs_--;
    }

    int refs() {
      return refs_;
    }

    bool unused_p() {
      return refs_ == 0;
    }

    bool shared_p() {
      return user_ && refs_ > 1;
    }

    FiberData* user() {
      return user_;
    }

    void set_user(FiberData* d) {
      user_ = d;
    }

    void allocate();
    void free();
    void flush(STATE);
    void orphan(STATE, FiberData* user);
  };

  class FiberStacks {
  public:
    const static size_t cTrampolineSize = 4096;

  private:
    typedef std::list<FiberStack> Stacks;
    typedef std::list<FiberData*> Datas;

    size_t max_stacks_;
    size_t stack_size_;

    VM* thread_;
    Stacks stacks_;
    Datas datas_;
    void* trampoline_;

  public:
    FiberStacks(VM* thread, SharedState& shared);
    ~FiberStacks();

    FiberStack* allocate();

    void remove_data(FiberData* data) {
      datas_.remove(data);
    }

    FiberData* new_data(bool root=false);

    void* trampoline();

    void gc_scan(GarbageCollector* gc);
  };
}

#endif
Something went wrong with that request. Please try again.