Skip to content

Commit 6b0705a

Browse files
authored
Merge pull request #512 from atsampson/explainntsc
Add some explanation to the NTSC chroma filter code
2 parents e0ba866 + 32da313 commit 6b0705a

File tree

1 file changed

+89
-64
lines changed

1 file changed

+89
-64
lines changed

tools/ld-chroma-decoder/comb.cpp

Lines changed: 89 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ RGBFrame Comb::decodeFrame(const SourceField &firstField, const SourceField &sec
8181
// Allocate RGB output buffer
8282
RGBFrame rgbOutputBuffer;
8383

84-
// Interlace the input fields and place in the frame[0]'s raw buffer
84+
// Interlace the input fields and place in the frame buffer
8585
qint32 fieldLine = 0;
8686
currentFrameBuffer.rawbuffer.clear();
8787
for (qint32 frameLine = 0; frameLine < frameHeight; frameLine += 2) {
@@ -94,40 +94,15 @@ RGBFrame Comb::decodeFrame(const SourceField &firstField, const SourceField &sec
9494
currentFrameBuffer.firstFieldPhaseID = firstField.field.fieldPhaseID;
9595
currentFrameBuffer.secondFieldPhaseID = secondField.field.fieldPhaseID;
9696

97-
// 2D or 3D comb filter processing?
98-
if (!configuration.use3D) {
99-
// 2D comb filter processing
97+
// Extract chroma using 1D filter
98+
split1D(&currentFrameBuffer);
10099

101-
// Perform 1D processing
102-
split1D(&currentFrameBuffer);
100+
// Extract chroma using 2D filter
101+
split2D(&currentFrameBuffer);
103102

104-
// Perform 2D processing
105-
split2D(&currentFrameBuffer);
106-
107-
// Split the IQ values
108-
splitIQ(&currentFrameBuffer);
109-
110-
// Copy the current frame to a temporary buffer, so operations on the frame do not
111-
// alter the original data
112-
tempYiqBuffer = currentFrameBuffer.yiqBuffer;
113-
114-
// Process the copy of the current frame
115-
adjustY(&currentFrameBuffer, tempYiqBuffer);
116-
if (configuration.colorlpf) filterIQ(currentFrameBuffer.yiqBuffer);
117-
doYNR(tempYiqBuffer);
118-
doCNR(tempYiqBuffer);
119-
120-
// Convert the YIQ result to RGB
121-
rgbOutputBuffer = yiqToRgbFrame(tempYiqBuffer);
122-
} else {
103+
if (configuration.use3D) {
123104
// 3D comb filter processing
124105

125-
// Perform 1D processing
126-
split1D(&currentFrameBuffer);
127-
128-
// Perform 2D processing
129-
split2D(&currentFrameBuffer);
130-
131106
#if 1
132107
// XXX - At present we don't have an implementation of motion detection,
133108
// which makes this a non-adaptive 3D decoder: it'll give good results
@@ -139,43 +114,55 @@ RGBFrame Comb::decodeFrame(const SourceField &firstField, const SourceField &sec
139114
#else
140115
// With motion detection, it would look like this...
141116
142-
// Split the IQ values (populates Y)
117+
// Demodulate chroma giving I/Q
143118
splitIQ(&currentFrameBuffer);
144119
120+
// Copy the current frame to a temporary buffer, so operations on the frame do not
121+
// alter the original data
145122
tempYiqBuffer = currentFrameBuffer.yiqBuffer;
146123
147-
// Process the copy of the current frame (needed for the Y image used by the optical flow)
124+
// Extract Y from baseband and I/Q
148125
adjustY(&currentFrameBuffer, tempYiqBuffer);
126+
127+
// Post-filter I/Q
149128
if (configuration.colorlpf) filterIQ(currentFrameBuffer.yiqBuffer);
129+
130+
// Apply noise reduction
150131
doYNR(tempYiqBuffer);
151132
doCNR(tempYiqBuffer);
152133
153134
opticalFlow.denseOpticalFlow(currentFrameBuffer.yiqBuffer, currentFrameBuffer.kValues);
154135
#endif
155136

156-
// Perform 3D processing
137+
// Extract chroma using 3D filter
157138
split3D(&currentFrameBuffer, &previousFrameBuffer);
158139

159-
// Split the IQ values
160-
splitIQ(&currentFrameBuffer);
140+
// Save the current frame for next time
141+
previousFrameBuffer = currentFrameBuffer;
142+
}
161143

162-
tempYiqBuffer = currentFrameBuffer.yiqBuffer;
144+
// Demodulate chroma giving I/Q
145+
splitIQ(&currentFrameBuffer);
163146

164-
// Process the copy of the current frame (for final output now flow detection has been performed)
165-
adjustY(&currentFrameBuffer, tempYiqBuffer);
166-
if (configuration.colorlpf) filterIQ(currentFrameBuffer.yiqBuffer);
167-
doYNR(tempYiqBuffer);
168-
doCNR(tempYiqBuffer);
147+
// Copy the current frame to a temporary buffer, so operations on the frame do not
148+
// alter the original data
149+
tempYiqBuffer = currentFrameBuffer.yiqBuffer;
169150

170-
// Convert the YIQ result to RGB
171-
rgbOutputBuffer = yiqToRgbFrame(tempYiqBuffer);
151+
// Extract Y from baseband and I/Q
152+
adjustY(&currentFrameBuffer, tempYiqBuffer);
172153

173-
// Overlay the optical flow map if required
174-
if (configuration.showOpticalFlowMap) overlayOpticalFlowMap(currentFrameBuffer, rgbOutputBuffer);
154+
// Post-filter I/Q
155+
if (configuration.colorlpf) filterIQ(currentFrameBuffer.yiqBuffer);
175156

176-
// Store the current frame
177-
previousFrameBuffer = currentFrameBuffer;
178-
}
157+
// Apply noise reduction
158+
doYNR(tempYiqBuffer);
159+
doCNR(tempYiqBuffer);
160+
161+
// Convert the YIQ result to RGB
162+
rgbOutputBuffer = yiqToRgbFrame(tempYiqBuffer);
163+
164+
// Overlay the optical flow map if required
165+
if (configuration.showOpticalFlowMap) overlayOpticalFlowMap(currentFrameBuffer, rgbOutputBuffer);
179166

180167
// Return the output frame
181168
return rgbOutputBuffer;
@@ -215,6 +202,11 @@ inline bool Comb::GetLinePhase(FrameBuffer *frameBuffer, qint32 lineNumber)
215202
return isEvenLine ? isPositivePhaseOnEvenLines : !isPositivePhaseOnEvenLines;
216203
}
217204

205+
// Extract chroma into clpbuffer[0] using a 1D bandpass filter.
206+
//
207+
// The filter is [0.5, 0, -1.0, 0, 0.5], a gentle bandpass centred on fSC, with
208+
// a gain of 2. So the output will contain all of the chroma signal, but also
209+
// whatever luma components ended up in the same frequency range.
218210
void Comb::split1D(FrameBuffer *frameBuffer)
219211
{
220212
for (qint32 lineNumber = videoParameters.firstActiveFrameLine; lineNumber < videoParameters.lastActiveFrameLine; lineNumber++) {
@@ -230,14 +222,26 @@ void Comb::split1D(FrameBuffer *frameBuffer)
230222
}
231223
}
232224

233-
// This could do with an explaination of what it is doing...
225+
// Extract chroma into clpbuffer[1] using a 2D 3-line adaptive filter.
226+
//
227+
// Because the phase of the chroma signal changes by 180 degrees from line to
228+
// line, subtracting two adjacent lines that contain the same information will
229+
// give you just the chroma signal. But real images don't necessarily contain
230+
// the same information on every line.
231+
//
232+
// The "3-line adaptive" part means that we look at both surrounding lines to
233+
// estimate how similar they are to this one. We can then compute the 2D chroma
234+
// value as a blend of the two differences, weighted by similarity.
235+
//
236+
// We could do this using the input signal directly, but in fact we use the
237+
// output of split1D, which has already had most of the luma signal removed.
234238
void Comb::split2D(FrameBuffer *frameBuffer)
235239
{
236-
// Dummy black line.
240+
// Dummy black line
237241
static constexpr qreal blackLine[911] = {0};
238242

239243
for (qint32 lineNumber = videoParameters.firstActiveFrameLine; lineNumber < videoParameters.lastActiveFrameLine; lineNumber++) {
240-
// Get pointers to the surrounding lines.
244+
// Get pointers to the surrounding lines of 1D chroma.
241245
// If a line we need is outside the active area, use blackLine instead.
242246
const qreal *previousLine = blackLine;
243247
if (lineNumber - 2 >= videoParameters.firstActiveFrameLine) {
@@ -249,16 +253,15 @@ void Comb::split2D(FrameBuffer *frameBuffer)
249253
nextLine = frameBuffer->clpbuffer[0].pixel[lineNumber + 2];
250254
}
251255

252-
// 2D filtering.
253256
for (qint32 h = videoParameters.activeVideoStart; h < videoParameters.activeVideoEnd; h++) {
254-
qreal tc1;
255-
256257
qreal kp, kn;
257258

258-
kp = fabs(fabs(currentLine[h]) - fabs(previousLine[h])); // - fabs(c1line[h] * .20);
259+
// Estimate similarity to the previous and next lines
260+
// (with a penalty if this is also a horizontal transition)
261+
kp = fabs(fabs(currentLine[h]) - fabs(previousLine[h]));
259262
kp += fabs(fabs(currentLine[h - 1]) - fabs(previousLine[h - 1]));
260263
kp -= (fabs(currentLine[h]) + fabs(currentLine[h - 1])) * .10;
261-
kn = fabs(fabs(currentLine[h]) - fabs(nextLine[h])); // - fabs(c1line[h] * .20);
264+
kn = fabs(fabs(currentLine[h]) - fabs(nextLine[h]));
262265
kn += fabs(fabs(currentLine[h - 1]) - fabs(nextLine[h - 1]));
263266
kn -= (fabs(currentLine[h]) + fabs(nextLine[h - 1])) * .10;
264267

@@ -272,29 +275,49 @@ void Comb::split2D(FrameBuffer *frameBuffer)
272275
qreal sc = 1.0;
273276

274277
if ((kn > 0) || (kp > 0)) {
278+
// At least one of the next/previous lines is pretty similar to this one.
279+
280+
// If one of them is much better than the other, just use that one
275281
if (kn > (3 * kp)) kp = 0;
276282
else if (kp > (3 * kn)) kn = 0;
277283

278-
sc = (2.0 / (kn + kp));// * max(kn * kn, kp * kp);
284+
sc = (2.0 / (kn + kp));
279285
if (sc < 1.0) sc = 1.0;
280286
} else {
287+
// Both the next/previous lines are different.
288+
289+
// But are they similar to each other? If so, we can use both of them!
281290
if ((fabs(fabs(previousLine[h]) - fabs(nextLine[h])) - fabs((nextLine[h] + previousLine[h]) * .2)) <= 0) {
282291
kn = kp = 1;
283292
}
293+
294+
// Else kn = kp = 0, so we won't extract any chroma for this sample.
295+
// (Some NTSC decoders fall back to the 1D chroma in this situation.)
284296
}
285297

286-
tc1 = ((frameBuffer->clpbuffer[0].pixel[lineNumber][h] - previousLine[h]) * kp * sc);
287-
tc1 += ((frameBuffer->clpbuffer[0].pixel[lineNumber][h] - nextLine[h]) * kn * sc);
288-
tc1 /= 8; //(2 * 2);
298+
// Compute the weighted sum of differences, giving the 2D chroma value
299+
qreal tc1;
300+
tc1 = ((currentLine[h] - previousLine[h]) * kp * sc);
301+
tc1 += ((currentLine[h] - nextLine[h]) * kn * sc);
302+
tc1 /= 8;
289303

290-
// Record the 2D C value
291304
frameBuffer->clpbuffer[1].pixel[lineNumber][h] = tc1;
292305
}
293306
}
294307
}
295308

296-
// This could do with an explaination of what it is doing...
297-
// Only apply 3D processing to stationary pixels
309+
// Extract chroma into clpbuffer[2] using a 3D filter.
310+
//
311+
// This is like the 2D filtering above, except now we're looking at the
312+
// same sample in the previous *frame* -- and since there are an odd number of
313+
// lines in an NTSC frame, the subcarrier phase is also 180 degrees different
314+
// from the current sample. So if the previous frame carried the same
315+
// information in this sample, subtracting the two samples will give us just
316+
// the chroma again.
317+
//
318+
// And as with 2D filtering, real video can have differences between frames, so
319+
// we need to make an adaptive choice whether to use this or drop back to the
320+
// 2D result (which is done in splitIQ below).
298321
void Comb::split3D(FrameBuffer *currentFrame, FrameBuffer *previousFrame)
299322
{
300323
// If there is no previous frame data (i.e. this is the first frame), use the current frame.
@@ -331,6 +354,8 @@ void Comb::splitIQ(FrameBuffer *frameBuffer)
331354
qreal cavg = frameBuffer->clpbuffer[1].pixel[lineNumber][h]; // 2D C average
332355

333356
if (configuration.use3D && frameBuffer->kValues.size() != 0) {
357+
// 3D mode -- compute a weighted sum of the 2D and 3D chroma values
358+
334359
// The motionK map returns K (0 for stationary pixels to 1 for moving pixels)
335360
cavg = frameBuffer->clpbuffer[1].pixel[lineNumber][h] * frameBuffer->kValues[(lineNumber * 910) + h]; // 2D mix
336361
cavg += frameBuffer->clpbuffer[2].pixel[lineNumber][h] * (1 - frameBuffer->kValues[(lineNumber * 910) + h]); // 3D mix

0 commit comments

Comments
 (0)