blob: f9b81dc34f0667ed94804e5f59f72790be85ebc4 [file] [log] [blame]
niklase@google.com470e71d2011-07-07 08:21:25 +00001/*
stefan@webrtc.org91c63082012-01-31 10:49:08 +00002 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
niklase@google.com470e71d2011-07-07 08:21:25 +00003 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
stefan@webrtc.org91c63082012-01-31 10:49:08 +000011#include "modules/video_coding/main/source/receiver.h"
niklase@google.com470e71d2011-07-07 08:21:25 +000012
13#include <assert.h>
14
stefan@webrtc.org91c63082012-01-31 10:49:08 +000015#include "modules/video_coding/main/interface/video_coding.h"
16#include "modules/video_coding/main/source/encoded_frame.h"
17#include "modules/video_coding/main/source/internal_defines.h"
18#include "modules/video_coding/main/source/media_opt_util.h"
19#include "modules/video_coding/main/source/tick_time_base.h"
20#include "system_wrappers/interface/trace.h"
21
niklase@google.com470e71d2011-07-07 08:21:25 +000022namespace webrtc {
23
24VCMReceiver::VCMReceiver(VCMTiming& timing,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +000025 TickTimeBase* clock,
niklase@google.com470e71d2011-07-07 08:21:25 +000026 WebRtc_Word32 vcmId,
27 WebRtc_Word32 receiverId,
28 bool master)
stefan@webrtc.org91c63082012-01-31 10:49:08 +000029 : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
30 _vcmId(vcmId),
31 _clock(clock),
32 _receiverId(receiverId),
33 _master(master),
34 _jitterBuffer(_clock, vcmId, receiverId, master),
35 _timing(timing),
36 _renderWaitEvent(*new VCMEvent()),
37 _state(kPassive) {}
niklase@google.com470e71d2011-07-07 08:21:25 +000038
39VCMReceiver::~VCMReceiver()
40{
41 _renderWaitEvent.Set();
42 delete &_renderWaitEvent;
stefan@webrtc.org7889a9b2011-12-12 08:18:24 +000043 delete _critSect;
niklase@google.com470e71d2011-07-07 08:21:25 +000044}
45
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000046void
47VCMReceiver::Reset()
niklase@google.com470e71d2011-07-07 08:21:25 +000048{
49 CriticalSectionScoped cs(_critSect);
50 if (!_jitterBuffer.Running())
51 {
52 _jitterBuffer.Start();
53 }
54 else
55 {
56 _jitterBuffer.Flush();
57 }
58 _renderWaitEvent.Reset();
59 if (_master)
60 {
61 _state = kReceiving;
62 }
63 else
64 {
65 _state = kPassive;
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +000066 }
67}
68
69WebRtc_Word32
70VCMReceiver::Initialize()
71{
72 CriticalSectionScoped cs(_critSect);
73 Reset();
74 if (!_master)
75 {
niklase@google.com470e71d2011-07-07 08:21:25 +000076 SetNackMode(kNoNack);
77 }
78 return VCM_OK;
79}
80
81void VCMReceiver::UpdateRtt(WebRtc_UWord32 rtt)
82{
83 _jitterBuffer.UpdateRtt(rtt);
84}
85
86WebRtc_Word32
87VCMReceiver::InsertPacket(const VCMPacket& packet,
88 WebRtc_UWord16 frameWidth,
89 WebRtc_UWord16 frameHeight)
90{
91 // Find an empty frame
92 VCMEncodedFrame *buffer = NULL;
93 const WebRtc_Word32 error = _jitterBuffer.GetFrame(packet, buffer);
94 if (error == VCM_OLD_PACKET_ERROR)
95 {
96 return VCM_OK;
97 }
stefan@webrtc.org91c63082012-01-31 10:49:08 +000098 else if (error != VCM_OK)
niklase@google.com470e71d2011-07-07 08:21:25 +000099 {
100 return error;
101 }
stefan@webrtc.org91c63082012-01-31 10:49:08 +0000102 assert(buffer);
niklase@google.com470e71d2011-07-07 08:21:25 +0000103 {
104 CriticalSectionScoped cs(_critSect);
105
106 if (frameWidth && frameHeight)
107 {
108 buffer->SetEncodedSize(static_cast<WebRtc_UWord32>(frameWidth),
109 static_cast<WebRtc_UWord32>(frameHeight));
110 }
111
112 if (_master)
113 {
114 // Only trace the primary receiver to make it possible
115 // to parse and plot the trace file.
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000116 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
117 VCMId(_vcmId, _receiverId),
118 "Packet seqNo %u of frame %u at %u",
119 packet.seqNum, packet.timestamp,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000120 MaskWord64ToUWord32(_clock->MillisecondTimestamp()));
niklase@google.com470e71d2011-07-07 08:21:25 +0000121 }
122
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000123 const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
niklase@google.com470e71d2011-07-07 08:21:25 +0000124
125 WebRtc_Word64 renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
126
127 if (renderTimeMs < 0)
128 {
129 // Render time error. Assume that this is due to some change in
130 // the incoming video stream and reset the JB and the timing.
131 _jitterBuffer.Flush();
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000132 _timing.Reset(_clock->MillisecondTimestamp());
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000133 return VCM_FLUSH_INDICATOR;
niklase@google.com470e71d2011-07-07 08:21:25 +0000134 }
135 else if (renderTimeMs < nowMs - kMaxVideoDelayMs)
136 {
137 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
138 "This frame should have been rendered more than %u ms ago."
139 "Flushing jitter buffer and resetting timing.", kMaxVideoDelayMs);
140 _jitterBuffer.Flush();
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000141 _timing.Reset(_clock->MillisecondTimestamp());
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000142 return VCM_FLUSH_INDICATOR;
niklase@google.com470e71d2011-07-07 08:21:25 +0000143 }
144 else if (_timing.TargetVideoDelay() > kMaxVideoDelayMs)
145 {
146 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
147 "More than %u ms target delay. Flushing jitter buffer and resetting timing.",
148 kMaxVideoDelayMs);
149 _jitterBuffer.Flush();
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000150 _timing.Reset(_clock->MillisecondTimestamp());
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000151 return VCM_FLUSH_INDICATOR;
niklase@google.com470e71d2011-07-07 08:21:25 +0000152 }
153
154 // First packet received belonging to this frame.
155 if (buffer->Length() == 0)
156 {
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000157 const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
niklase@google.com470e71d2011-07-07 08:21:25 +0000158 if (_master)
159 {
160 // Only trace the primary receiver to make it possible to parse and plot the trace file.
161 WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
162 "First packet of frame %u at %u", packet.timestamp,
163 MaskWord64ToUWord32(nowMs));
164 }
165 renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
166 if (renderTimeMs >= 0)
167 {
168 buffer->SetRenderTime(renderTimeMs);
169 }
170 else
171 {
172 buffer->SetRenderTime(nowMs);
173 }
174 }
175
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000176 // Insert packet into the jitter buffer
niklase@google.com470e71d2011-07-07 08:21:25 +0000177 // both media and empty packets
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000178 const VCMFrameBufferEnum
179 ret = _jitterBuffer.InsertPacket(buffer, packet);
180 if (ret == kFlushIndicator) {
181 return VCM_FLUSH_INDICATOR;
182 } else if (ret < 0) {
183 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
184 VCMId(_vcmId, _receiverId),
niklase@google.com470e71d2011-07-07 08:21:25 +0000185 "Error inserting packet seqNo=%u, timeStamp=%u",
186 packet.seqNum, packet.timestamp);
mikhal@webrtc.orgf13388f2011-11-22 22:57:51 +0000187 return VCM_JITTER_BUFFER_ERROR;
niklase@google.com470e71d2011-07-07 08:21:25 +0000188 }
189 }
190 return VCM_OK;
191}
192
stefan@webrtc.org91c63082012-01-31 10:49:08 +0000193VCMEncodedFrame* VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
194 WebRtc_Word64& nextRenderTimeMs,
195 bool renderTiming,
196 VCMReceiver* dualReceiver)
niklase@google.com470e71d2011-07-07 08:21:25 +0000197{
198 // No need to enter the critical section here since the jitter buffer
199 // is thread-safe.
200 FrameType incomingFrameType = kVideoFrameDelta;
201 nextRenderTimeMs = -1;
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000202 const WebRtc_Word64 startTimeMs = _clock->MillisecondTimestamp();
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000203 WebRtc_Word64 ret = _jitterBuffer.NextTimestamp(maxWaitTimeMs,
204 &incomingFrameType,
205 &nextRenderTimeMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000206 if (ret < 0)
207 {
208 // No timestamp in jitter buffer at the moment
209 return NULL;
210 }
211 const WebRtc_UWord32 timeStamp = static_cast<WebRtc_UWord32>(ret);
212
213 // Update the timing
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000214 _timing.SetRequiredDelay(_jitterBuffer.EstimatedJitterMs());
niklase@google.com470e71d2011-07-07 08:21:25 +0000215 _timing.UpdateCurrentDelay(timeStamp);
216
217 const WebRtc_Word32 tempWaitTime = maxWaitTimeMs -
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000218 static_cast<WebRtc_Word32>(_clock->MillisecondTimestamp() - startTimeMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000219 WebRtc_UWord16 newMaxWaitTime = static_cast<WebRtc_UWord16>(VCM_MAX(tempWaitTime, 0));
220
221 VCMEncodedFrame* frame = NULL;
222
223 if (renderTiming)
224 {
225 frame = FrameForDecoding(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
226 }
227 else
228 {
229 frame = FrameForRendering(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
230 }
231
232 if (frame != NULL)
233 {
234 bool retransmitted = false;
235 const WebRtc_Word64 lastPacketTimeMs =
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000236 _jitterBuffer.LastPacketTime(frame, &retransmitted);
niklase@google.com470e71d2011-07-07 08:21:25 +0000237 if (lastPacketTimeMs >= 0 && !retransmitted)
238 {
239 // We don't want to include timestamps which have suffered from retransmission
240 // here, since we compensate with extra retransmission delay within
241 // the jitter estimate.
242 _timing.IncomingTimestamp(timeStamp, lastPacketTimeMs);
243 }
244 if (dualReceiver != NULL)
245 {
246 dualReceiver->UpdateState(*frame);
247 }
248 }
249 return frame;
250}
251
252VCMEncodedFrame*
253VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
254 WebRtc_Word64 nextRenderTimeMs,
255 VCMReceiver* dualReceiver)
256{
257 // How long can we wait until we must decode the next frame
258 WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000259 _clock->MillisecondTimestamp());
niklase@google.com470e71d2011-07-07 08:21:25 +0000260
261 // Try to get a complete frame from the jitter buffer
262 VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
263
264 if (frame == NULL && maxWaitTimeMs == 0 && waitTimeMs > 0)
265 {
stefan@webrtc.orgd855c1a2011-10-25 11:52:48 +0000266 // If we're not allowed to wait for frames to get complete we must
267 // calculate if it's time to decode, and if it's not we will just return
268 // for now.
niklase@google.com470e71d2011-07-07 08:21:25 +0000269 return NULL;
270 }
271
stefan@webrtc.orgeb658602011-10-28 12:25:34 +0000272 if (frame == NULL && VCM_MIN(waitTimeMs, maxWaitTimeMs) == 0)
273 {
274 // No time to wait for a complete frame,
275 // check if we have an incomplete
henrik.lundin@webrtc.orge7d8c562011-12-16 15:40:52 +0000276 const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
277 dualReceiver->State() == kPassive &&
278 dualReceiver->NackMode() == kNackInfinite);
279 if (dualReceiverEnabledAndPassive &&
280 !_jitterBuffer.CompleteSequenceWithNextFrame())
281 {
282 // Jitter buffer state might get corrupt with this frame.
283 dualReceiver->CopyJitterBufferStateFromReceiver(*this);
284 frame = _jitterBuffer.GetFrameForDecoding();
285 assert(frame);
286 } else {
287 frame = _jitterBuffer.GetFrameForDecoding();
288 }
stefan@webrtc.orgeb658602011-10-28 12:25:34 +0000289 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000290 if (frame == NULL)
291 {
292 // Wait for a complete frame
stefan@webrtc.orgd855c1a2011-10-25 11:52:48 +0000293 frame = _jitterBuffer.GetCompleteFrameForDecoding(maxWaitTimeMs);
niklase@google.com470e71d2011-07-07 08:21:25 +0000294 }
295 if (frame == NULL)
296 {
297 // Get an incomplete frame
stefan@webrtc.orgd855c1a2011-10-25 11:52:48 +0000298 if (_timing.MaxWaitingTime(nextRenderTimeMs,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000299 _clock->MillisecondTimestamp()) > 0)
niklase@google.com470e71d2011-07-07 08:21:25 +0000300 {
301 // Still time to wait for a complete frame
302 return NULL;
303 }
304
305 // No time left to wait, we must decode this frame now.
stefan@webrtc.orgd855c1a2011-10-25 11:52:48 +0000306 const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
307 dualReceiver->State() == kPassive &&
308 dualReceiver->NackMode() == kNackInfinite);
309 if (dualReceiverEnabledAndPassive &&
310 !_jitterBuffer.CompleteSequenceWithNextFrame())
niklase@google.com470e71d2011-07-07 08:21:25 +0000311 {
312 // Jitter buffer state might get corrupt with this frame.
313 dualReceiver->CopyJitterBufferStateFromReceiver(*this);
314 }
315
316 frame = _jitterBuffer.GetFrameForDecoding();
317 }
318 return frame;
319}
320
321VCMEncodedFrame*
322VCMReceiver::FrameForRendering(WebRtc_UWord16 maxWaitTimeMs,
323 WebRtc_Word64 nextRenderTimeMs,
324 VCMReceiver* dualReceiver)
325{
326 // How long MUST we wait until we must decode the next frame. This is different for the case
327 // where we have a renderer which can render at a specified time. Here we must wait as long
328 // as possible before giving the frame to the decoder, which will render the frame as soon
329 // as it has been decoded.
330 WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
henrik.lundin@webrtc.org7d8c72e2011-12-21 15:24:01 +0000331 _clock->MillisecondTimestamp());
niklase@google.com470e71d2011-07-07 08:21:25 +0000332 if (maxWaitTimeMs < waitTimeMs)
333 {
334 // If we're not allowed to wait until the frame is supposed to be rendered
335 // we will have to return NULL for now.
336 return NULL;
337 }
338 // Wait until it's time to render
339 _renderWaitEvent.Wait(waitTimeMs);
340
341 // Get a complete frame if possible
342 VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
343
344 if (frame == NULL)
345 {
346 // Get an incomplete frame
347 const bool dualReceiverEnabledAndPassive = dualReceiver != NULL &&
348 dualReceiver->State() == kPassive &&
349 dualReceiver->NackMode() == kNackInfinite;
350 if (dualReceiverEnabledAndPassive && !_jitterBuffer.CompleteSequenceWithNextFrame())
351 {
352 // Jitter buffer state might get corrupt with this frame.
353 dualReceiver->CopyJitterBufferStateFromReceiver(*this);
354 }
355
356 frame = _jitterBuffer.GetFrameForDecoding();
357 }
358 return frame;
359}
360
361void
362VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame)
363{
364 _jitterBuffer.ReleaseFrame(frame);
365}
366
367WebRtc_Word32
368VCMReceiver::ReceiveStatistics(WebRtc_UWord32& bitRate, WebRtc_UWord32& frameRate)
369{
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000370 _jitterBuffer.IncomingRateStatistics(&frameRate, &bitRate);
niklase@google.com470e71d2011-07-07 08:21:25 +0000371 bitRate /= 1000; // Should be in kbps
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000372 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000373}
374
375WebRtc_Word32
376VCMReceiver::ReceivedFrameCount(VCMFrameCount& frameCount) const
377{
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000378 _jitterBuffer.FrameStatistics(&frameCount.numDeltaFrames,
379 &frameCount.numKeyFrames);
380 return 0;
niklase@google.com470e71d2011-07-07 08:21:25 +0000381}
382
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000383WebRtc_UWord32 VCMReceiver::DiscardedPackets() const {
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000384 return _jitterBuffer.num_discarded_packets();
stefan@webrtc.org791eec72011-10-11 07:53:43 +0000385}
386
niklase@google.com470e71d2011-07-07 08:21:25 +0000387void
388VCMReceiver::SetNackMode(VCMNackMode nackMode)
389{
390 CriticalSectionScoped cs(_critSect);
stefan@webrtc.org932ab182011-11-29 11:33:31 +0000391 // Default to always having NACK enabled in hybrid mode.
392 _jitterBuffer.SetNackMode(nackMode, kLowRttNackMs, -1);
niklase@google.com470e71d2011-07-07 08:21:25 +0000393 if (!_master)
394 {
395 _state = kPassive; // The dual decoder defaults to passive
396 }
397}
398
399VCMNackMode
400VCMReceiver::NackMode() const
401{
402 CriticalSectionScoped cs(_critSect);
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000403 return _jitterBuffer.nack_mode();
niklase@google.com470e71d2011-07-07 08:21:25 +0000404}
405
406VCMNackStatus
407VCMReceiver::NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size)
408{
409 bool extended = false;
410 WebRtc_UWord16 nackListSize = 0;
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000411 WebRtc_UWord16* internalNackList = _jitterBuffer.CreateNackList(
412 &nackListSize, &extended);
niklase@google.com470e71d2011-07-07 08:21:25 +0000413 if (internalNackList == NULL && nackListSize == 0xffff)
414 {
415 // This combination is used to trigger key frame requests.
416 size = 0;
417 return kNackKeyFrameRequest;
418 }
419 if (nackListSize > size)
420 {
421 size = nackListSize;
422 return kNackNeedMoreMemory;
423 }
stefan@webrtc.org8e506932012-01-19 12:30:21 +0000424 if (internalNackList != NULL && nackListSize > 0) {
425 memcpy(nackList, internalNackList, nackListSize * sizeof(WebRtc_UWord16));
426 }
niklase@google.com470e71d2011-07-07 08:21:25 +0000427 size = nackListSize;
428 return kNackOk;
429}
430
431// Decide whether we should change decoder state. This should be done if the dual decoder
432// has caught up with the decoder decoding with packet losses.
433bool
434VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dualFrame, VCMReceiver& dualReceiver) const
435{
436 if (dualFrame == NULL)
437 {
438 return false;
439 }
440 if (_jitterBuffer.LastDecodedTimestamp() == dualFrame->TimeStamp())
441 {
442 dualReceiver.UpdateState(kWaitForPrimaryDecode);
443 return true;
444 }
445 return false;
446}
447
448void
449VCMReceiver::CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver)
450{
henrik.lundin@webrtc.orgbaf6db52011-11-02 18:58:39 +0000451 _jitterBuffer.CopyFrom(receiver._jitterBuffer);
niklase@google.com470e71d2011-07-07 08:21:25 +0000452}
453
454VCMReceiverState
455VCMReceiver::State() const
456{
457 CriticalSectionScoped cs(_critSect);
458 return _state;
459}
460
461void
462VCMReceiver::UpdateState(VCMReceiverState newState)
463{
464 CriticalSectionScoped cs(_critSect);
465 assert(!(_state == kPassive && newState == kWaitForPrimaryDecode));
466// assert(!(_state == kReceiving && newState == kPassive));
467 _state = newState;
468}
469
470void
471VCMReceiver::UpdateState(VCMEncodedFrame& frame)
472{
stefan@webrtc.org912981f2012-10-12 07:04:52 +0000473 if (_jitterBuffer.nack_mode() == kNoNack)
niklase@google.com470e71d2011-07-07 08:21:25 +0000474 {
475 // Dual decoder mode has not been enabled.
476 return;
477 }
478 // Update the dual receiver state
479 if (frame.Complete() && frame.FrameType() == kVideoFrameKey)
480 {
481 UpdateState(kPassive);
482 }
483 if (State() == kWaitForPrimaryDecode &&
484 frame.Complete() && !frame.MissingFrame())
485 {
486 UpdateState(kPassive);
487 }
488 if (frame.MissingFrame() || !frame.Complete())
489 {
490 // State was corrupted, enable dual receiver.
491 UpdateState(kReceiving);
492 }
493}
494
495}