【数据结构】多维数组如何实现按需分配内存,自动扩容?浮点数如何作为定点数压缩,高

std::unique_ptr<Block>[B1][B1];作为一个大体积成员变量在MSVC上编译时会内存溢出(gcc似乎没有这个问题)。所以从course-master\10\03\00.cpp开始就需要将std::unique_ptr<Block> m_data[B1][B1];改为using Matrix = std::unique_ptr<Block>[B1][B1];
std::unique_ptr<Matrix> m_data;
完美解决

附上修改之后的代码:
#include<iostream>
#include"bate.h"
#define N (512*512)
struct Grid{
constexpr static int Bshitft = 8; // 整除用的
constexpr static int B = 1 << Bshitft; // 索引用的
constexpr static int Bmask = B - 1; // 取余用的
constexpr static int B1shitft = 11;
constexpr static int B1 = 1 << B1shitft;
constexpr static int B1mask = B1 - 1;
Grid(): m_data((Matrix*)new Matrix){};
struct Block{
char m_block[B][B];
};
using Matrix = std::unique_ptr<Block>[B1][B1];
std::unique_ptr<Matrix> m_data;
char read(int x, int y) const{
const std::unique_ptr<Block>& block = (*m_data)[(x >> Bshitft) & B1mask][(y >> Bshitft) & B1mask]; // 因为read是const函数,所以类属性的引用需要用const接收
if(!block) return 0; // 如果块指针没有激活,返回0
return block->m_block[x & Bmask][y & Bmask];
}
void write(int x, int y, char value){
std::unique_ptr<Block>& block = (*m_data)[(x >> Bshitft) & B1mask][(y >> Bshitft) & B1mask];
if(!block) block = std::make_unique<Block>(); // 如果指针没有激活,则新建一个块
block->m_block[x & Bmask][y & Bmask] = value;
}
template<class Func>
void foreach(const Func& func){
for(int x1 = 0; x1 < B1; x1++){ // 遍历每个block pointer
for(int y1 = 0; y1 < B1; y1++){
const std::unique_ptr<Block>& block = (*m_data)[x1 & B1mask][y1 & B1mask];
if(!block) continue;
int xb = x1 << Bshitft, yb = y1 << Bshitft;
for(int dx = 0; dx < B; dx++){
for(int dy = 0; dy < B; dy++){
func(xb | dx, yb | dy, block->m_block[dx][dy]);
}
}
}
}
}
};
int main(){
bate::timing("main");
Grid* a = new Grid{};
float px = -100.f, py = 100.f;
float vx = 0.2f, vy = -0.6f;
for(int step = 0; step < N; step++){
px += vx;
py += vy;
int x = (int)std::floor(px);
int y = (int)std::floor(py);
a->write(x, y, 1);
}
int count = 0;
a->foreach([&](int x, int y, char& value){
if(value != 0) ++count;
});
bate::timing("main");
printf("Count=%d", count);
return 0;
}
