Preparation of input data
Shotgun metagenome
# download database
humann_databases --download utility_mapping full path/to/directory
# remove read mapped to human genome
bowtie2 -x path/to/bowtie2/hg38_bowtie2/hg38 -p 10 \
-1 path/to/FASTQ -2 path/to/FASTQ \
--un-conc-gz $OUTPUT_DIR/unmap_R%.fq.gz --fr -I 0 -X 500 -S /dev/null
# run humann3
humann -i path/to/FASTQ -o path/to/output --threads 10 \
--nucleotide-database path/to/chocophlan \
--protein-database path/to/map_ko_uniref90.txt.gz
# integrate each results into a file
merge_metaphlan_tables.py INPUTDIR/xxmetaphanxx.tsv > Metaphlan_merge.txt
humann_join_tables -i INPUTDIR/*genefamilies.tsv \
-o genefamilies_name.tsv --file_name genefamilies
16S rRNA gene
# import
qiime tools import \
--type 'SampleData[PairedEndSequencesWithQuality]' \
--input-path dataset1/mani_file.csv \
--output-path single-end-demux.qza \
--input-format PairedEndFastqManifestPhred33V2
# denoise
qiime dada2 denoise-paired \
--verbose \
--p-n-threads 10 \
--p-trim-left-f 22 \
--p-trim-left-r 22 \
--p-trunc-len-f 250 \
--p-trunc-len-r 240 \
--i-demultiplexed-seqs demux.qza \
--o-table table.qza \
--o-representative-sequences rep-seqs.qza \
--o-denoising-stats stats-dada2.qza