Home > Workload Solutions > Oracle > Guides > Design Guide — Oracle Big Data SQL on Dell EMC PowerFlex > Installing and loading TPCH data using DBGEN
This section describes how we installed and loaded TPCH data using the DBGEN tool.
[root@hadoop-namenode hadoop] # unzip 05bc7108-2175-4a4b-8e50-b9443ab82e22-tpc-h-tool.zip
[root@hadoop-namenode hadoop] # ls
05bc7108-2175-4a4b-8e50-b9443ab82e22-tpc-h-tool.zip 2.18.0_rc2 lost+found
[root@hadoop-namenode dbgen] # vi makefile.suite
################
## CHANGE NAME OF ANSI COMPILER HERE
################
CC = gcc
# Current values for DATABASE are: INFORMIX, DB2, TDAT (Teradata)
# SQLSERVER, SYBASE, ORACLE
# Current values for MACHINE are: ATT, DOS, HP, IBM, ICL, MVS,
# SGI, SUN, U2200, VMS, LINUX, WIN32
# Current values for WORKLOAD are: TPCH
DATABASE= ORACLE
MACHINE = LINUX
WORKLOAD = TPCH
[root@hadoop-namenode dbgen] # make makefile.suite
chmod 755 update_release.sh
./update_release.sh 2 18 0
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST - D_FILE_OFFSET_BITS=64 -c -o build. o build.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o driver. o driver.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o bm_utils.o bm_utils.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o rnd.o rnd.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o print.o print.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o load_stub.o load_stub.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o bcd2.o bcd2.c
gcc -g -DDBNAME=\"dss\" -DLINUX -DORACLE -DTPCH -DRNG_TEST -D_FILE_OFFSET_BITS=64 -c -o speed_seed.o speed_seed.c
[root@hadoop-namenode dbgen] #. /dbgen -s 3000 -S 1 -C 1 -v
TPC-H Population Generator (Version 2.12.0)
Copyright Transaction Processing Performance Council 1994 – 2010
Starting to load stage 1 of 8 for suppliers table…/
Preloading text … 100%
done.
Starting to load stage 1 of 8 for customers table…done.
Starting to load stage 1 of 8 for orders/lineitem tables…done.
Starting to load stage 1 of 8 for part/partsupplier tables…done.
Generating data for nation tabledone.
Generating data for region tabledone.
[root@hadoop-namenode dbgen] #. /dbgen -s 3000 -S 3 -C 3 -v