benchmark/inference/README.md
pip install ogb
autoconf required for jemalloc setup:
sudo apt-get install autoconf
jemalloc for performance benchmark:
cd ${workspace}
git clone https://github.com/jemalloc/jemalloc.git
cd jemalloc
git checkout 5.2.1
./autogen.sh
./configure --prefix=${workspace}/jemalloc-bin
make
make install
source activate env_name
export DNNL_PRIMITIVE_CACHE_CAPACITY=1024
export KMP_BLOCKTIME=1
export KMP_AFFINITY=granularity=fine,compact,1,0
jemalloc_lib=${workspace}/jemalloc-bin/lib/libjemalloc.so
export LD_PRELOAD="$jemalloc_lib"
export MALLOC_CONF="oversize_threshold:1,background_thread:true,metadata_thp:auto,dirty_decay_ms:9000000000,muzzy_decay_ms:9000000000"
OMP_NUM_THREADS=${CORES} numactl -C 0-${LAST_CORE} -m 0 CMD......
python -u inference_benchmark.py --datasets=Reddit --models=gcn --eval-batch-sizes=512 --num-layers=2 --num-hidden-channels=64
python -u inference_benchmark.py --datasets=Reddit --models=gcn --eval-batch-sizes=512 --num-layers=2 --num-hidden-channels=64 --use-sparse-tensor
python -u inference_benchmark.py --datasets=ogbn-products --models=sage --eval-batch-sizes=512 --num-layers=2 --num-hidden-channels=64
python -u inference_benchmark.py --datasets=ogbn-products --models=sage --eval-batch-sizes=512 --num-layers=2 --num-hidden-channels=64 --use-sparse-tensor