forked from LSSTScienceCollaborations/StackClub
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathRe-RunHSC.sh
More file actions
178 lines (130 loc) · 6.98 KB
/
Re-RunHSC.sh
File metadata and controls
178 lines (130 loc) · 6.98 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
: 'HSC Re-Run: Making Forced Photometry Light Curves from Scratch
Owner: **Justin Myles** (@jtmyles)
Last Verified to Run: **2018-09-13**
Verified Stack Release: **16.0**
This project addresses issue #63: HSC Re-run
This shell script runs the command-line tasks from the tutorial at pipelines.lsst.io for analysis
from raw images through source detection and forced photometry measurements. It is an intermediate
step toward the end-goal of making a forced photometry lightcurve in the notebook at
StackClub/ImageProcessing/Re-RunHSC.ipynb
Running this script may take several hours on lsst-lspdev.
Recommended to run with
$ bash Re-RunHSC.sh > output.txt
'
# Setup the LSST Stack
source /opt/lsst/software/stack/loadLSST.bash
eups list lsst_distrib
setup lsst_distrib
# I. Setting up the Butler data repository
date
echo "Re-RunHSC INFO: set up the Butler"
setup -j -r /project/shared/data/ci_hsc
DATADIR="/home/$USER/DATA"
mkdir -p "$DATADIR"
# A Butler needs a *mapper* file "to find and organize data in a format specific to each camera."
# We write this file to the data repository so that any instantiated Butler object knows which mapper to use.
echo lsst.obs.hsc.HscMapper > $DATADIR/_mapper
# The ingest script creates links in the instantiated butler repository to the original data files
date
echo "Re-RunHSC INFO: ingest images with ingestImages.py"
ingestImages.py $DATADIR $CI_HSC_DIR/raw/*.fits --mode=link
# Grab calibration files
date
echo "Re-RunHSC INFO: obtain calibration files with installTransmissionCurves.py"
installTransmissionCurves.py $DATADIR
ln -s $CI_HSC_DIR/CALIB/ $DATADIR/CALIB
mkdir -p $DATADIR/ref_cats
ln -s $CI_HSC_DIR/ps1_pv3_3pi_20170110 $DATADIR/ref_cats/ps1_pv3_3pi_20170110
# II. Calibrate a single frame with processCcd.py
date
echo "Re-RunHSC INFO: process raw exposures with processCcd.py"
# Use calibration files to do CCD processing
# Does calibration happen here? What is the end result of the calibration process?
# What specifically does this task do?
processCcd.py $DATADIR --rerun processCcdOutputs --id
# III. (omitted) Visualize images.
# IV. Make coadds
# IV. A. Make skymap
# A sky map is a tiling of the celestial sphere. It is composed of one or more tracts.
# A tract is composed of one or more overlapping patches. Each tract has a WCS.
# We define a skymap so that we can warp all of the exposure to fit on a single coordinate system
# This is a necessary step for making coadds
date
echo "Re-RunHSC INFO: make skymap with makeDiscreteSkyMap.py"
makeDiscreteSkyMap.py $DATADIR --id --rerun processCcdOutputs:coadd --config skyMap.projection="TAN"
# IV. B. Warp images onto skymap
date
echo "Re-RunHSC INFO: warp images with makeCoaddTempExp.py"
makeCoaddTempExp.py $DATADIR --rerun coadd \
--selectId filter=HSC-R \
--id filter=HSC-R tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2 \
--config doApplyUberCal=False doApplySkyCorr=False
makeCoaddTempExp.py $DATADIR --rerun coadd \
--selectId filter=HSC-I \
--id filter=HSC-I tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2 \
--config doApplyUberCal=False doApplySkyCorr=False
# IV. C. Coadd warped images
# Now that we have warped images, we can perform coaddition to get deeper images
# The motivation for this is to have the deepest image possible for source detection
date
echo "Re-RunHSC INFO: coadd warped images with assembleCoadd.py"
assembleCoadd.py $DATADIR --rerun coadd \
--selectId filter=HSC-R \
--id filter=HSC-R tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2
assembleCoadd.py $DATADIR --rerun coadd \
--selectId filter=HSC-I \
--id filter=HSC-I tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2
# V. Measuring Sources
# V. A. Source detection
# As noted above, we do source detection on the deepest image possible.
date
echo "Re-RunHSC INFO: detect objects in the coadd images with detectCoaddSources.py"
detectCoaddSources.py $DATADIR --rerun coadd:coaddPhot \
--id filter=HSC-R tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2
detectCoaddSources.py $DATADIR --rerun coaddPhot \
--id filter=HSC-I tract=0 patch=0,0^0,1^0,2^1,0^1,1^1,2^2,0^2,1^2,2
# V. B. Merge multi-band detection catalogs
# Ultimately, for photometry, we will need to deblend objects.
# In order to do this, we first merge the detected source catalogs.
date
echo "Re-RunHSC INFO: merge detection catalogs with mergeCoaddDetections.py"
mergeCoaddDetections.py $DATADIR --rerun coaddPhot --id filter=HSC-R^HSC-I
# V. C. Measure objects in coadds
# Given a full coaddSource catalog, we can do regular photometry with implicit deblending.
date
echo "Re-RunHSC INFO: measure objects in coadds with measureCoaddSources.py"
measureCoaddSources.py $DATADIR --rerun coaddPhot --id filter=HSC-R
measureCoaddSources.py $DATADIR --rerun coaddPhot --id filter=HSC-I
# V. D. Merge multi-band catalogs from coadds
date
echo "Re-RunHSC INFO: merge measurements from coadds with mergeCoaddMeasurements.py"
mergeCoaddMeasurements.py $DATADIR --rerun coaddPhot --id filter=HSC-R^HSC-I
# V. E. Run forced photometry on coadds
# Given a full source catalog, we can do forced photometry with implicit deblending.
date
echo "Re-RunHSC INFO: perform forced photometry on coadds with forcedPhotCoadd.py"
forcedPhotCoadd.py $DATADIR --rerun coaddPhot:coaddForcedPhot --id filter=HSC-R
forcedPhotCoadd.py $DATADIR --rerun coaddForcedPhot --id filter=HSC-I
# V. F. Run forced photometry on individual exposures
# Given a full source catalog, we can do forced photometry on the individual exposures.
# Note that as of 2018_08_23, the forcedPhotCcd.py task doesn't do deblending,
# which could lead to bad photometry for blended sources.
# This tasks requires a coadd tract stored in the Butler to grab the appropriate
# coadd catalogs to use as references for forced photometry.
# It has access to this tract because we chain the output from the coaddPhot subdirectory
date
echo "Re-RunHSC INFO: perform forced photometry on individual exposures with forcedPhotCcd.py"
forcedPhotCcd.py $DATADIR --rerun coaddPhot:ccdForcedPhot --id filter=HSC-R --clobber-config --configfile=/project/shared/data/ci_hsc/forcedPhotCcdConfig.py &> ccd_r.txt
forcedPhotCcd.py $DATADIR --rerun ccdForcedPhot --id filter=HSC-I --clobber-config --configfile=/project/shared/data/ci_hsc/forcedPhotCcdConfig.py &> ccd_i.txt
# VI. Multi-band catalog analysis
# For analysis of the catalog, see part VI of StackClub/ImageProcessing/Re-RunHSC.ipynb
date
echo "Re-RunHSC INFO: parse output of forcedPhotCcd.py"
# The following grep & sed commands clean up the output log file used to determine
# which DataIds have measured forced photometry. The cleaner output is stored
# in a new file, data_ids.txt, that is used in Re-RunHSC.ipynb
grep 'forcedPhotCcd INFO: Performing forced measurement on DataId' ccd_r.txt ccd_i.txt > data_ids.txt
sed -i 's/ccd_[i,r].txt:forcedPhotCcd INFO: Performing forced measurement on DataId(initialdata={//g' data_ids.txt
sed -i 's/}, tag=set())//g' data_ids.txt
sed -i 's/'"'"'//g' data_ids.txt
sed -i 's/ //g' data_ids.txt