43const RenderState *RenderState::_empty_state =
nullptr;
45size_t RenderState::_garbage_index = 0;
47PStatCollector RenderState::_cache_update_pcollector(
"*:State Cache:Update");
48PStatCollector RenderState::_garbage_collect_pcollector(
"*:State Cache:Garbage Collect");
49PStatCollector RenderState::_state_compose_pcollector(
"*:State Cache:Compose State");
50PStatCollector RenderState::_state_invert_pcollector(
"*:State Cache:Invert State");
53PStatCollector RenderState::_state_break_cycles_pcollector(
"*:State Cache:Break Cycles");
54PStatCollector RenderState::_state_validate_pcollector(
"*:State Cache:Validate");
71 if (_states ==
nullptr) {
76 _cache_stats.add_num_states(1);
77 _read_overrides =
nullptr;
78 _generated_shader =
nullptr;
90 _filled_slots(copy._filled_slots),
95 for (
int i = 0; i < RenderAttribRegistry::_max_slots; ++i) {
96 _attributes[i] = copy._attributes[i];
101 _cache_stats.add_num_states(1);
102 _read_overrides =
nullptr;
103 _generated_shader =
nullptr;
105#ifdef DO_MEMORY_USAGE
117 nassertv(!is_destructing());
123 nassertv(_saved_entry == -1);
124 nassertv(_composition_cache.
is_empty() && _invert_composition_cache.
is_empty());
142 SlotMask mask = _filled_slots | other._filled_slots;
145 int result = _attributes[slot].compare_to(other._attributes[slot]);
165 if (
this == &other) {
172 for (
int n = 0; n < num_sorted_slots; ++n) {
174 nassertr((_attributes[slot]._attrib !=
nullptr) == _filled_slots.
get_bit(slot), 0);
177 const RenderAttrib *b = other._attributes[slot]._attrib;
179 return a < b ? -1 : 1;
197 const RenderAttrib *b = other._attributes[slot]._attrib;
199 return a < b ? -1 : 1;
218 const Attribute &attrib = _attributes[slot];
219 nassertr(attrib._attrib !=
nullptr,
false);
220 if (!attrib._attrib->cull_callback(trav, data)) {
237 int slot = attrib->get_slot();
238 state->_attributes[slot].set(attrib,
override);
239 state->_filled_slots.
set_bit(slot);
240 return return_new(state);
250 state->_attributes[attrib1->get_slot()].set(attrib1,
override);
251 state->_attributes[attrib2->get_slot()].set(attrib2,
override);
252 state->_filled_slots.
set_bit(attrib1->get_slot());
253 state->_filled_slots.
set_bit(attrib2->get_slot());
254 return return_new(state);
265 state->_attributes[attrib1->get_slot()].set(attrib1,
override);
266 state->_attributes[attrib2->get_slot()].set(attrib2,
override);
267 state->_attributes[attrib3->get_slot()].set(attrib3,
override);
268 state->_filled_slots.
set_bit(attrib1->get_slot());
269 state->_filled_slots.
set_bit(attrib2->get_slot());
270 state->_filled_slots.
set_bit(attrib3->get_slot());
271 return return_new(state);
283 state->_attributes[attrib1->get_slot()].set(attrib1,
override);
284 state->_attributes[attrib2->get_slot()].set(attrib2,
override);
285 state->_attributes[attrib3->get_slot()].set(attrib3,
override);
286 state->_attributes[attrib4->get_slot()].set(attrib4,
override);
287 state->_filled_slots.
set_bit(attrib1->get_slot());
288 state->_filled_slots.
set_bit(attrib2->get_slot());
289 state->_filled_slots.
set_bit(attrib3->get_slot());
290 state->_filled_slots.
set_bit(attrib4->get_slot());
291 return return_new(state);
304 state->_attributes[attrib1->get_slot()].set(attrib1,
override);
305 state->_attributes[attrib2->get_slot()].set(attrib2,
override);
306 state->_attributes[attrib3->get_slot()].set(attrib3,
override);
307 state->_attributes[attrib4->get_slot()].set(attrib4,
override);
308 state->_attributes[attrib5->get_slot()].set(attrib5,
override);
309 state->_filled_slots.
set_bit(attrib1->get_slot());
310 state->_filled_slots.
set_bit(attrib2->get_slot());
311 state->_filled_slots.
set_bit(attrib3->get_slot());
312 state->_filled_slots.
set_bit(attrib4->get_slot());
313 state->_filled_slots.
set_bit(attrib5->get_slot());
314 return return_new(state);
321make(
const RenderAttrib *
const *attrib,
int num_attribs,
int override) {
322 if (num_attribs == 0) {
326 for (
int i = 0; i < num_attribs; i++) {
327 int slot = attrib[i]->get_slot();
328 state->_attributes[slot].set(attrib[i],
override);
329 state->_filled_slots.
set_bit(slot);
331 return return_new(state);
358 return do_compose(other);
364 int index = _composition_cache.
find(other);
366 Composition &comp = ((
RenderState *)
this)->_composition_cache.modify_data(index);
367 if (comp._result ==
nullptr) {
372 comp._result = result;
397 ((
RenderState *)
this)->_composition_cache[other]._result = result;
402 ((
RenderState *)other)->_composition_cache[
this]._result =
nullptr;
447 return do_invert_compose(other);
453 int index = _invert_composition_cache.
find(other);
455 Composition &comp = ((
RenderState *)
this)->_invert_composition_cache.modify_data(index);
456 if (comp._result ==
nullptr) {
460 CPT(
RenderState) result = do_invert_compose(other);
461 comp._result = result;
481 CPT(
RenderState) result = do_invert_compose(other);
485 ((
RenderState *)
this)->_invert_composition_cache[other]._result = result;
490 ((
RenderState *)other)->_invert_composition_cache[
this]._result =
nullptr;
513add_attrib(
const RenderAttrib *attrib,
int override)
const {
514 int slot = attrib->get_slot();
515 if (_filled_slots.
get_bit(slot) &&
516 _attributes[slot]._override >
override) {
523 new_state->_attributes[slot].set(attrib,
override);
524 new_state->_filled_slots.
set_bit(slot);
525 return return_new(new_state);
537 int slot = attrib->get_slot();
538 new_state->_attributes[slot]._attrib = attrib;
539 new_state->_filled_slots.
set_bit(slot);
540 return return_new(new_state);
550set_attrib(
const RenderAttrib *attrib,
int override)
const {
552 int slot = attrib->get_slot();
553 new_state->_attributes[slot].set(attrib,
override);
554 new_state->_filled_slots.
set_bit(slot);
555 return return_new(new_state);
563remove_attrib(
int slot)
const {
564 if (_attributes[slot]._attrib ==
nullptr) {
575 new_state->_attributes[slot].set(
nullptr, 0);
576 new_state->_filled_slots.
clear_bit(slot);
577 return return_new(new_state);
587adjust_all_priorities(
int adjustment)
const {
590 SlotMask mask = _filled_slots;
593 Attribute &attrib = new_state->_attributes[slot];
594 nassertr(attrib._attrib !=
nullptr,
this);
595 attrib._override = std::max(attrib._override + adjustment, 0);
597 mask.clear_bit(slot);
598 slot = mask.get_lowest_on_bit();
601 return return_new(new_state);
612 if (garbage_collect_states || !state_cache) {
628 if (auto_break_cycles && uniquify_states) {
656output(ostream &out)
const {
663 const char *sep =
"";
665 SlotMask mask = _filled_slots;
668 const Attribute &attrib = _attributes[slot];
669 nassertv(attrib._attrib !=
nullptr);
670 out << sep << attrib._attrib->get_type();
673 mask.clear_bit(slot);
674 slot = mask.get_lowest_on_bit();
684write(ostream &out,
int indent_level)
const {
690 SlotMask mask = _filled_slots;
693 const Attribute &attrib = _attributes[slot];
694 nassertv(attrib._attrib !=
nullptr);
695 attrib._attrib->write(out, indent_level);
697 mask.clear_bit(slot);
698 slot = mask.get_lowest_on_bit();
719 if (_states ==
nullptr) {
741 if (_states ==
nullptr) {
749 StateCount state_count;
752 for (
size_t si = 0; si < size; ++si) {
755 std::pair<StateCount::iterator, bool> ir =
756 state_count.insert(StateCount::value_type(state, 1));
760 (*(ir.first)).second++;
765 for (i = 0; i < cache_size; ++i) {
767 if (result !=
nullptr && result != state) {
769 std::pair<StateCount::iterator, bool> ir =
770 state_count.insert(StateCount::value_type(result, 1));
774 (*(ir.first)).second++;
779 for (i = 0; i < cache_size; ++i) {
781 if (result !=
nullptr && result != state) {
782 std::pair<StateCount::iterator, bool> ir =
783 state_count.insert(StateCount::value_type(result, 1));
785 (*(ir.first)).second++;
796 StateCount::iterator sci;
797 for (sci = state_count.begin(); sci != state_count.end(); ++sci) {
799 int count = (*sci).second;
805 if (pgraph_cat.is_debug()) {
807 <<
"Unused state: " << (
void *)state <<
":"
809 state->write(pgraph_cat.debug(
false), 2);
832 if (_states ==
nullptr) {
846 TempStates temp_states;
847 temp_states.reserve(orig_size);
850 for (
size_t si = 0; si < size; ++si) {
852 temp_states.push_back(state);
857 TempStates::iterator ti;
858 for (ti = temp_states.begin(); ti != temp_states.end(); ++ti) {
863 for (i = 0; i < cache_size; ++i) {
865 if (result !=
nullptr && result != state) {
866 result->cache_unref();
871 state->_composition_cache.
clear();
874 for (i = 0; i < cache_size; ++i) {
876 if (result !=
nullptr && result != state) {
877 result->cache_unref();
882 state->_invert_composition_cache.
clear();
891 return orig_size - new_size;
906 if (_states ==
nullptr || !garbage_collect_states) {
912 PStatTimer timer(_garbage_collect_pcollector);
916 size_t size = orig_size;
917 size_t num_this_pass = std::max(0,
int(size * garbage_collect_states_rate));
918 if (num_this_pass <= 0) {
922 bool break_and_uniquify = (auto_break_cycles && uniquify_transforms);
924 size_t si = _garbage_index;
929 num_this_pass = std::min(num_this_pass, size);
930 size_t stop_at_element = (si + num_this_pass) % size;
934 if (break_and_uniquify) {
941 state->detect_and_break_cycles();
953 state->release_new();
954 state->remove_cache_pointers();
955 state->cache_unref_only();
963 if (stop_at_element > 0) {
968 si = (si + 1) % size;
969 }
while (si != stop_at_element);
982 return (
int)orig_size - (int)size + num_attribs;
994 for (
size_t si = 0; si < size; ++si) {
996 state->_mungers.
clear();
997 state->_munged_states.
clear();
998 state->_last_mi = -1;
1017 if (_states ==
nullptr) {
1023 VisitedStates visited;
1027 for (
size_t si = 0; si < size; ++si) {
1030 bool inserted = visited.insert(state).second;
1032 ++_last_cycle_detect;
1033 if (r_detect_cycles(state, state, 1, _last_cycle_detect, &cycle_desc)) {
1035 CompositionCycleDesc::reverse_iterator csi;
1037 out <<
"\nCycle detected of length " << cycle_desc.size() + 1 <<
":\n"
1038 <<
"state " << (
void *)state <<
":" << state->
get_ref_count()
1040 state->write(out, 2);
1041 for (csi = cycle_desc.rbegin(); csi != cycle_desc.rend(); ++csi) {
1042 const CompositionCycleDescEntry &entry = (*csi);
1043 if (entry._inverted) {
1044 out <<
"invert composed with ";
1046 out <<
"composed with ";
1048 out << (
const void *)entry._obj <<
":" << entry._obj->
get_ref_count()
1049 <<
" " << *entry._obj <<
"\n"
1050 <<
"produces " << (
const void *)entry._result <<
":"
1052 entry._result->write(out, 2);
1053 visited.insert(entry._result);
1058 ++_last_cycle_detect;
1059 if (r_detect_reverse_cycles(state, state, 1, _last_cycle_detect, &cycle_desc)) {
1061 CompositionCycleDesc::iterator csi;
1063 out <<
"\nReverse cycle detected of length " << cycle_desc.size() + 1 <<
":\n"
1065 for (csi = cycle_desc.begin(); csi != cycle_desc.end(); ++csi) {
1066 const CompositionCycleDescEntry &entry = (*csi);
1067 out << (
const void *)entry._result <<
":"
1069 entry._result->write(out, 2);
1070 out << (
const void *)entry._obj <<
":"
1072 entry._obj->write(out, 2);
1073 visited.insert(entry._result);
1075 out << (
void *)state <<
":"
1077 state->write(out, 2);
1094 if (_states ==
nullptr) {
1095 out <<
"0 states:\n";
1101 out << size <<
" states:\n";
1102 for (
size_t si = 0; si < size; ++si) {
1104 state->write(out, 2);
1116 if (_states ==
nullptr) {
1120 PStatTimer timer(_state_validate_pcollector);
1129 <<
"RenderState::_states cache is invalid!\n";
1135 nassertr(si < size,
false);
1136 nassertr(_states->
get_key(si)->get_ref_count() >= 0,
false);
1139 while (snext < size) {
1140 nassertr(_states->
get_key(snext)->get_ref_count() >= 0,
false);
1145 if ((ci < 0) != (c > 0) ||
1146 (ci > 0) != (c < 0) ||
1147 (ci == 0) != (c == 0)) {
1149 <<
"RenderState::compare_to() not defined properly!\n";
1150 pgraph_cat.error(
false)
1151 <<
"(a, b): " << c <<
"\n";
1152 pgraph_cat.error(
false)
1153 <<
"(b, a): " << ci <<
"\n";
1154 ssi->write(pgraph_cat.error(
false), 2);
1155 ssnext->write(pgraph_cat.error(
false), 2);
1176 if (get_attrib(render_mode)) {
1179 if (get_attrib(tex_gen)) {
1182 if (get_attrib(tex_matrix)) {
1186 return geom_rendering;
1205validate_filled_slots()
const {
1209 int max_slots = reg->get_max_slots();
1210 for (
int slot = 1; slot < max_slots; ++slot) {
1211 const Attribute &attribute = _attributes[slot];
1212 if (attribute._attrib !=
nullptr) {
1217 return (mask == _filled_slots);
1227 SlotMask mask = _filled_slots;
1230 const Attribute &attrib = _attributes[slot];
1231 nassertv(attrib._attrib !=
nullptr);
1235 mask.clear_bit(slot);
1236 slot = mask.get_lowest_on_bit();
1239 _flags |= F_hash_known;
1251 nassertr(state !=
nullptr, state);
1256 if (state->_attributes[0]._attrib !=
nullptr) {
1257 const RenderAttrib *attrib = state->_attributes[0]._attrib;
1258 if (attrib->get_type() == TypeHandle::none()) {
1261 <<
"Uninitialized RenderAttrib type: " << attrib->get_type()
1266 if (already_reported.insert(attrib->get_type()).second) {
1268 << attrib->get_type() <<
" did not initialize its slot number.\n";
1273 state->_attributes[0]._attrib =
nullptr;
1277 nassertr(state->validate_filled_slots(), state);
1280 if (!uniquify_states && !state->
is_empty()) {
1284 return return_unique(state);
1297 nassertr(state !=
nullptr,
nullptr);
1304 if (paranoid_const) {
1311 if (state->_saved_entry != -1) {
1319 if (!uniquify_attribs && !state->
is_empty()) {
1320 SlotMask mask = state->_filled_slots;
1323 Attribute &attrib = state->_attributes[slot];
1324 nassertd(attrib._attrib !=
nullptr) continue;
1325 attrib._attrib = attrib._attrib->get_unique();
1326 mask.clear_bit(slot);
1331 int si = _states->find(state);
1336 if (state->get_ref_count() == 0) {
1343 if (garbage_collect_states) {
1349 si = _states->
store(state,
nullptr);
1352 state->_saved_entry = si;
1366 SlotMask mask = _filled_slots | other->_filled_slots;
1367 new_state->_filled_slots = mask;
1371 const Attribute &a = _attributes[slot];
1372 const Attribute &b = other->_attributes[slot];
1373 Attribute &result = new_state->_attributes[slot];
1375 if (a._attrib ==
nullptr) {
1376 nassertr(b._attrib !=
nullptr,
this);
1380 }
else if (b._attrib ==
nullptr) {
1384 }
else if (b._override < a._override) {
1388 }
else if (a._override < b._override &&
1389 a._attrib->lower_attrib_can_override()) {
1401 result.set(a._attrib->compose(b._attrib), b._override);
1404 mask.clear_bit(slot);
1405 slot = mask.get_lowest_on_bit();
1408 return return_new(new_state);
1415do_invert_compose(
const RenderState *other)
const {
1420 SlotMask mask = _filled_slots | other->_filled_slots;
1421 new_state->_filled_slots = mask;
1425 const Attribute &a = _attributes[slot];
1426 const Attribute &b = other->_attributes[slot];
1427 Attribute &result = new_state->_attributes[slot];
1429 if (a._attrib ==
nullptr) {
1430 nassertr(b._attrib !=
nullptr,
this);
1434 }
else if (b._attrib ==
nullptr) {
1441 result.set(a._attrib->invert_compose(b._attrib), 0);
1444 mask.clear_bit(slot);
1445 slot = mask.get_lowest_on_bit();
1447 return return_new(new_state);
1455detect_and_break_cycles() {
1456 PStatTimer timer(_state_break_cycles_pcollector);
1458 ++_last_cycle_detect;
1459 if (r_detect_cycles(
this,
this, 1, _last_cycle_detect,
nullptr)) {
1462 if (pgraph_cat.is_debug()) {
1464 <<
"Breaking cycle involving " << (*this) <<
"\n";
1469 ++_last_cycle_detect;
1470 if (r_detect_reverse_cycles(
this,
this, 1, _last_cycle_detect,
nullptr)) {
1471 if (pgraph_cat.is_debug()) {
1473 <<
"Breaking cycle involving " << (*this) <<
"\n";
1493 if (current_state->_cycle_detect == this_seq) {
1499 return (current_state == start_state && length > 2);
1501 ((
RenderState *)current_state)->_cycle_detect = this_seq;
1504 size_t cache_size = current_state->_composition_cache.
get_num_entries();
1505 for (i = 0; i < cache_size; ++i) {
1507 if (result !=
nullptr) {
1508 if (r_detect_cycles(start_state, result, length + 1,
1509 this_seq, cycle_desc)) {
1511 if (cycle_desc !=
nullptr) {
1513 CompositionCycleDescEntry entry(other, result,
false);
1514 cycle_desc->push_back(entry);
1521 cache_size = current_state->_invert_composition_cache.
get_num_entries();
1522 for (i = 0; i < cache_size; ++i) {
1524 if (result !=
nullptr) {
1525 if (r_detect_cycles(start_state, result, length + 1,
1526 this_seq, cycle_desc)) {
1528 if (cycle_desc !=
nullptr) {
1530 CompositionCycleDescEntry entry(other, result,
true);
1531 cycle_desc->push_back(entry);
1548r_detect_reverse_cycles(
const RenderState *start_state,
1552 if (current_state->_cycle_detect == this_seq) {
1558 return (current_state == start_state && length > 2);
1560 ((
RenderState *)current_state)->_cycle_detect = this_seq;
1563 size_t cache_size = current_state->_composition_cache.
get_num_entries();
1564 for (i = 0; i < cache_size; ++i) {
1566 if (other != current_state) {
1567 int oi = other->_composition_cache.
find(current_state);
1568 nassertr(oi != -1,
false);
1571 if (result !=
nullptr) {
1572 if (r_detect_reverse_cycles(start_state, result, length + 1,
1573 this_seq, cycle_desc)) {
1575 if (cycle_desc !=
nullptr) {
1577 CompositionCycleDescEntry entry(other, result,
false);
1578 cycle_desc->push_back(entry);
1586 cache_size = current_state->_invert_composition_cache.
get_num_entries();
1587 for (i = 0; i < cache_size; ++i) {
1589 if (other != current_state) {
1590 int oi = other->_invert_composition_cache.
find(current_state);
1591 nassertr(oi != -1,
false);
1594 if (result !=
nullptr) {
1595 if (r_detect_reverse_cycles(start_state, result, length + 1,
1596 this_seq, cycle_desc)) {
1598 if (cycle_desc !=
nullptr) {
1600 CompositionCycleDescEntry entry(other, result,
false);
1601 cycle_desc->push_back(entry);
1623 if (_saved_entry != -1) {
1625 nassertv_always(_states->
remove(
this));
1637remove_cache_pointers() {
1655 if (_composition_cache.
is_empty() && _invert_composition_cache.
is_empty()) {
1664 while (!_composition_cache.
is_empty()) {
1675 Composition comp = _composition_cache.
get_data(i);
1684 if (other !=
this) {
1685 int oi = other->_composition_cache.
find(
this);
1691 Composition ocomp = other->_composition_cache.
get_data(oi);
1700 if (ocomp._result !=
nullptr && ocomp._result != other) {
1708 if (comp._result !=
nullptr && comp._result !=
this) {
1715 while (!_invert_composition_cache.
is_empty()) {
1717 nassertv(other !=
this);
1718 Composition comp = _invert_composition_cache.
get_data(i);
1722 if (other !=
this) {
1723 int oi = other->_invert_composition_cache.
find(
this);
1725 Composition ocomp = other->_invert_composition_cache.
get_data(oi);
1729 if (ocomp._result !=
nullptr && ocomp._result != other) {
1734 if (comp._result !=
nullptr && comp._result !=
this) {
1744determine_bin_index() {
1746 if ((_flags & F_checked_bin_index) != 0) {
1751 std::string bin_name;
1755 if (get_attrib(bin)) {
1760 if (bin_name.empty()) {
1763 bin_name =
"opaque";
1766 if (get_attrib(transparency)) {
1767 switch (transparency->
get_mode()) {
1768 case TransparencyAttrib::M_alpha:
1769 case TransparencyAttrib::M_premultiplied_alpha:
1770 case TransparencyAttrib::M_dual:
1772 bin_name =
"transparent";
1782 _bin_index = bin_manager->
find_bin(bin_name);
1783 if (_bin_index == -1) {
1784 pgraph_cat.warning()
1785 <<
"No bin named " << bin_name <<
"; creating default bin.\n";
1786 _bin_index = bin_manager->
add_bin(bin_name, CullBinManager::BT_unsorted, 0);
1788 _flags |= F_checked_bin_index;
1795determine_cull_callback() {
1797 if ((_flags & F_checked_cull_callback) != 0) {
1802 SlotMask mask = _filled_slots;
1805 const Attribute &attrib = _attributes[slot];
1806 nassertv(attrib._attrib !=
nullptr);
1807 if (attrib._attrib->has_cull_callback()) {
1808 _flags |= F_has_cull_callback;
1812 mask.clear_bit(slot);
1813 slot = mask.get_lowest_on_bit();
1816 _flags |= F_checked_cull_callback;
1826 for (
int slot = 1; slot < num_slots; ++slot) {
1838update_pstats(
int old_referenced_bits,
int new_referenced_bits) {
1840 if ((old_referenced_bits & R_node) != 0) {
1841 _node_counter.sub_level(1);
1842 }
else if ((old_referenced_bits & R_cache) != 0) {
1843 _cache_counter.sub_level(1);
1845 if ((new_referenced_bits & R_node) != 0) {
1846 _node_counter.add_level(1);
1847 }
else if ((new_referenced_bits & R_cache) != 0) {
1848 _cache_counter.add_level(1);
1868 _states_lock =
new LightReMutex(
"RenderState::_states_lock");
1869 _cache_stats.
init();
1877 state->_saved_entry = _states->
store(state,
nullptr);
1878 _empty_state = state;
1898 nassertv(num_attribs == (
int)(uint16_t)num_attribs);
1906 const Attribute &attrib = _attributes[slot];
1907 nassertv(attrib._attrib !=
nullptr);
1924 int num_attribs = 0;
1927 for (
size_t i = 0; i < (*_read_overrides).size(); ++i) {
1928 int override = (*_read_overrides)[i];
1931 if (attrib !=
nullptr) {
1932 int slot = attrib->get_slot();
1933 if (slot > 0 && slot < reg->get_max_slots()) {
1934 _attributes[slot].set(attrib,
override);
1941 delete _read_overrides;
1942 _read_overrides =
nullptr;
1965 if (pointer == state) {
2005 state->fillin(scan, manager);
2020 _read_overrides =
new vector_int;
2021 (*_read_overrides).reserve(num_attribs);
2023 for (
int i = 0; i < num_attribs; ++i) {
2026 (*_read_overrides).push_back(
override);
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
void parse_params(const FactoryParams ¶ms, DatagramIterator &scan, BamReader *&manager)
Takes in a FactoryParams, passed from a WritableFactory into any TypedWritable's make function,...
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
void cache_unref_delete(RefCountType *ptr)
This global helper function will unref the given ReferenceCount object, and if the reference count re...
This is the fundamental interface for extracting binary objects from a Bam file, as generated by a Ba...
void register_finalize(TypedWritable *whom)
Should be called by an object reading itself from the Bam file to indicate that this particular objec...
void register_change_this(ChangeThisFunc func, TypedWritable *whom)
Called by an object reading itself from the bam file to indicate that the object pointer that will be...
bool read_pointer(DatagramIterator &scan)
The interface for reading a pointer to another object from a Bam file.
static WritableFactory * get_factory()
Returns the global WritableFactory for generating TypedWritable objects.
This is the fundamental interface for writing binary objects to a Bam file, to be extracted later by ...
void write_pointer(Datagram &packet, const TypedWritable *dest)
The interface for writing a pointer to another object to a Bam file.
int get_lowest_on_bit() const
Returns the index of the lowest 1 bit in the mask.
void set_bit(int index)
Sets the nth bit on.
bool get_bit(int index) const
Returns true if the nth bit is set, false if it is cleared.
void clear_bit(int index)
Sets the nth bit off.
int get_num_on_bits() const
Returns the number of bits that are set to 1 in the mask.
This is used to track the utilization of the TransformState and RenderState caches,...
void inc_adds(bool is_new)
Increments by 1 the count of elements added to the cache.
void add_total_size(int count)
Adds the indicated count (positive or negative) to the total number of entries for the cache (net occ...
void add_num_states(int count)
Adds the indicated count (positive or negative) to the total count of individual RenderState or Trans...
void maybe_report(const char *name)
Outputs a report if enough time has elapsed.
void inc_dels()
Increments by 1 the count of elements removed from the cache.
void init()
Initializes the CacheStats for the first time.
void inc_hits()
Increments by 1 the count of cache hits.
void inc_misses()
Increments by 1 the count of cache misses.
get_cache_ref_count
Returns the current reference count.
Assigns geometry to a particular bin by name.
get_draw_order
Returns the draw order this attribute specifies.
get_bin_name
Returns the name of the bin this attribute specifies.
This is a global object that maintains the collection of named CullBins in the world.
static CullBinManager * get_global_ptr()
Returns the pointer to the global CullBinManager object.
int add_bin(const std::string &name, BinType type, int sort)
Defines a new bin with the indicated name, and returns the new bin_index.
int find_bin(const std::string &name) const
Returns the bin_index associated with the bin of the given name, or -1 if no bin has that name.
This collects together the pieces of data that are accumulated for each node while walking the scene ...
This object performs a depth-first traversal of the scene graph, with optional view-frustum culling,...
A class to retrieve the individual data elements previously stored in a Datagram.
uint16_t get_uint16()
Extracts an unsigned 16-bit integer.
int32_t get_int32()
Extracts a signed 32-bit integer.
An ordered list of data elements, formatted in memory for transmission over a socket or writing to a ...
void add_int32(int32_t value)
Adds a signed 32-bit integer to the datagram.
void add_uint16(uint16_t value)
Adds an unsigned 16-bit integer to the datagram.
An instance of this class is passed to the Factory when requesting it to do its business and construc...
void register_factory(TypeHandle handle, CreateFunc *func, void *user_data=nullptr)
Registers a new kind of thing the Factory will be able to create.
Similar to MutexHolder, but for a light mutex.
bool debug_is_locked() const
Returns true if the current thread has locked the LightReMutex, false otherwise.
Similar to MutexHolder, but for a light reentrant mutex.
A lightweight reentrant mutex.
static void update_type(ReferenceCount *ptr, TypeHandle type)
Associates the indicated type with the given pointer.
A lightweight class that represents a single element that may be timed and/or counted via stats.
A lightweight class that can be used to automatically start and stop a PStatCollector around a sectio...
void local_object()
This function should be called, once, immediately after creating a new instance of some ReferenceCoun...
bool unref_if_one() const
Atomically decreases the reference count of this object if it is one.
get_ref_count
Returns the current reference count.
virtual bool unref() const
Explicitly decrements the reference count.
This class is used to associate each RenderAttrib with a different slot index at runtime,...
const RenderAttrib * get_slot_default(int slot) const
Returns the default RenderAttrib object associated with slot n.
static RenderAttribRegistry * quick_get_global_ptr()
Returns the global_ptr without first ensuring it has been initialized.
int get_sorted_slot(int n) const
Returns the nth slot in sorted order.
int get_num_slots() const
Returns the number of RenderAttrib slots that have been allocated.
int get_num_sorted_slots() const
Returns the number of entries in the sorted_slots list.
This is the base class for a number of render attributes (other than transform) that may be set on sc...
static int garbage_collect()
Performs a garbage-collection cycle.
Specifies how polygons are to be drawn.
int get_geom_rendering(int geom_rendering) const
Returns the union of the Geom::GeomRendering bits that will be required once this RenderModeAttrib is...
This represents a unique collection of RenderAttrib objects that correspond to a particular renderabl...
int compare_to(const RenderState &other) const
Provides an arbitrary ordering among all unique RenderStates, so we can store the essentially differe...
int compare_mask(const RenderState &other, SlotMask compare_mask) const
This version of compare_to takes a slot mask that indicates which attributes to include in the compar...
static void register_with_read_factory()
Tells the BamReader how to create objects of type RenderState.
static int get_num_states()
Returns the total number of unique RenderState objects allocated in the world.
static void list_states(std::ostream &out)
Lists all of the RenderStates in the cache to the output stream, one per line.
void cache_ref_only() const
Overrides this method to update PStats appropriately.
bool is_empty() const
Returns true if the state is empty, false otherwise.
virtual void write_datagram(BamWriter *manager, Datagram &dg)
Writes the contents of this object to the datagram for shipping out to a Bam file.
virtual int complete_pointers(TypedWritable **plist, BamReader *manager)
Receives an array of pointers, one for each time manager->read_pointer() was called in fillin().
static int garbage_collect()
Performs a garbage-collection cycle.
bool cull_callback(CullTraverser *trav, const CullTraverserData &data) const
Calls cull_callback() on each attrib.
static void bin_removed(int bin_index)
Intended to be called by CullBinManager::remove_bin(), this informs all the RenderStates in the world...
int compare_sort(const RenderState &other) const
Returns -1, 0, or 1 according to the relative sorting of these two RenderStates, with regards to rend...
virtual bool unref() const
Explicitly decrements the reference count.
static void init_states()
Make sure the global _states map is allocated.
static void clear_munger_cache()
Completely empties the cache of state + gsg -> munger, for all states and all gsg's.
static bool validate_states()
Ensures that the cache is still stored in sorted order, and that none of the cache elements have been...
static TypedWritable * change_this(TypedWritable *old_ptr, BamReader *manager)
Called immediately after complete_pointers(), this gives the object a chance to adjust its own pointe...
static void list_cycles(std::ostream &out)
Detects all of the reference-count cycles in the cache and reports them to standard output.
static int clear_cache()
Empties the cache of composed RenderStates.
virtual void finalize(BamReader *manager)
Called by the BamReader to perform any final actions needed for setting up the object after all objec...
virtual ~RenderState()
The destructor is responsible for removing the RenderState from the global set if it is there.
int get_geom_rendering(int geom_rendering) const
Returns the union of the Geom::GeomRendering bits that will be required once this RenderState is appl...
static int get_num_unused_states()
Returns the total number of RenderState objects that have been allocated but have no references outsi...
This template class implements an unordered map of keys to data, implemented as a hashtable.
const Key & get_key(size_t n) const
Returns the key in the nth entry of the table.
int store(const Key &key, const Value &data)
Records the indicated key/data pair in the map.
bool validate() const
Returns true if the internal table appears to be consistent, false if there are some internal errors.
void clear()
Completely empties the table.
const Value & get_data(size_t n) const
Returns the data in the nth entry of the table.
int find(const Key &key) const
Searches for the indicated key in the table.
bool remove(const Key &key)
Removes the indicated key and its associated data from the table.
bool consider_shrink_table()
Shrinks the table if the allocated storage is significantly larger than the number of elements in it.
void remove_element(size_t n)
Removes the nth entry from the table.
bool is_empty() const
Returns true if the table is empty; i.e.
size_t get_num_entries() const
Returns the number of active entries in the table.
Computes texture coordinates for geometry automatically based on vertex position and/or normal.
int get_geom_rendering(int geom_rendering) const
Returns the union of the Geom::GeomRendering bits that will be required once this TexGenAttrib is app...
Applies a transform matrix to UV's before they are rendered.
int get_geom_rendering(int geom_rendering) const
Returns the union of the Geom::GeomRendering bits that will be required once this TexMatrixAttrib is ...
get_main_thread
Returns a pointer to the "main" Thread object–this is the Thread that started the whole process.
get_current_thread
Returns a pointer to the currently-executing Thread object.
This controls the enabling of transparency.
get_mode
Returns the transparency mode.
TypeHandle is the identifier used to differentiate C++ class types.
Base class for objects that can be written to and read from Bam files.
virtual void fillin(DatagramIterator &scan, BamReader *manager)
This internal function is intended to be called by each class's make_from_bam() method to read in all...
virtual void write_datagram(BamWriter *manager, Datagram &dg)
Writes the contents of this object to the datagram for shipping out to a Bam file.
virtual int complete_pointers(TypedWritable **p_list, BamReader *manager)
Receives an array of pointers, one for each time manager->read_pointer() was called in fillin().
This is a sequence number that increments monotonically.
static size_t add_hash(size_t start, const Key &key)
Adds the indicated key into a running hash.
This is our own Panda specialization on the default STL map.
static size_t add_hash(size_t start, const void *key)
Adds the indicated key into a running hash.
This is our own Panda specialization on the default STL set.
This is our own Panda specialization on the default STL vector.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
std::ostream & indent(std::ostream &out, int indent_level)
A handy function for doing text formatting.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
int get_lowest_on_bit(unsigned short x)
Returns the index of the lowest 1 bit in the word.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.
PANDA 3D SOFTWARE Copyright (c) Carnegie Mellon University.