xref: /haiku/src/add-ons/media/plugins/rtsp_streamer/rtsp.cpp (revision 4bd0c1066b227cec4b79883bdef697c7a27f2e90)
1 /**********
2 This library is free software; you can redistribute it and/or modify it under
3 the terms of the GNU Lesser General Public License as published by the
4 Free Software Foundation; either version 2.1 of the License, or (at your
5 option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
6 This library is distributed in the hope that it will be useful, but WITHOUT
7 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
8 FOR A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for
9 more details.
10 You should have received a copy of the GNU Lesser General Public License
11 along with this library; if not, write to the Free Software Foundation, Inc.,
12 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301  USA
13 **********/
14 // Copyright (c) 1996-2016, Live Networks, Inc.  All rights reserved
15 // Copyright (c) 2016, Dario Casalinuovo. All rights reserved.
16 
17 
18 #include "rtsp.h"
19 
20 #include <AdapterIO.h>
21 
22 #include "RTSPMediaIO.h"
23 
24 
25 #define REQUEST_STREAMING_OVER_TCP False
26 #define RECEIVE_BUFFER_SIZE 100000
27 
28 
29 UsageEnvironment& operator<<(UsageEnvironment& env,
30 	const RTSPClient& rtspClient)
31 {
32 	return env << "[URL:\"" << rtspClient.url() << "\"]: ";
33 }
34 
35 
36 UsageEnvironment& operator<<(UsageEnvironment& env,
37 	const MediaSubsession& subsession)
38 {
39 	return env << subsession.mediumName() << "/" << subsession.codecName();
40 }
41 
42 
43 class AdapterSink : public MediaSink
44 {
45 public:
46 			static	 			AdapterSink* createNew(UsageEnvironment& env,
47 									MediaSubsession& subsession,
48 									BInputAdapter* inputAdapter,
49 									char const* streamId = NULL);
50 
51 private:
52 								AdapterSink(UsageEnvironment& env,
53 									MediaSubsession& subsession,
54 									char const* streamId,
55 									BInputAdapter* inputAdapter);
56 
57 	virtual 					~AdapterSink();
58 
59 			static void			afterGettingFrame(void* clientData,
60 									unsigned frameSize,
61 									unsigned numTruncatedBytes,
62 									struct timeval presentationTime,
63 									unsigned durationInMicroseconds);
64 
65 			void				afterGettingFrame(unsigned frameSize,
66 									unsigned numTruncatedBytes,
67 									struct timeval presentationTime,
68 									unsigned durationInMicroseconds);
69 
70 private:
71 	// redefined virtual functions:
72 	virtual Boolean				continuePlaying();
73 
74 private:
75 			BInputAdapter*		fInputAdapter;
76 			u_int8_t*			fReceiveBuffer;
77 			MediaSubsession&	fSubsession;
78 			char*				fStreamId;
79 };
80 
81 // Implementation of the RTSP 'response handlers':
82 
83 void continueAfterDESCRIBE(RTSPClient* rtspClient,
84 	int resultCode, char* resultString)
85 {
86 	UsageEnvironment& env = rtspClient->envir();
87 	HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
88 	do {
89 		if (resultCode != 0) {
90 			env << *rtspClient << "Failed to get a SDP description: "
91 				<< resultString << "\n";
92 			delete[] resultString;
93 
94 			break;
95 		}
96 
97 		char* const sdpDescription = resultString;
98 		env << *rtspClient << "Got a SDP description:\n"
99 			<< sdpDescription << "\n";
100 
101 		// Create a media session object from this SDP description:
102 		client->session = MediaSession::createNew(env, sdpDescription);
103 		delete[] sdpDescription; // because we don't need it anymore
104 		if (client->session == NULL) {
105 			env << *rtspClient
106 				<< "Failed to create a MediaSession object "
107 					"from the SDP description: "
108 				<< env.getResultMsg() << "\n";
109 
110 			break;
111 		} else if (!client->session->hasSubsessions()) {
112 			env << *rtspClient << "This session has no media subsessions"
113 				" (i.e., no \"m=\" lines)\n";
114 
115 			break;
116 		}
117 
118 		// Then, create and set up our data source objects for the session.
119 		// We do this by iterating over the session's 'subsessions',
120 		// calling "MediaSubsession::initiate()",
121 		// and then sending a RTSP "SETUP" command, on each one.
122 		// (Each 'subsession' will have its own data source.)
123 		client->iter = new MediaSubsessionIterator(*client->session);
124 		setupNextSubsession(rtspClient);
125 		return;
126 	} while (0);
127 
128 	// An unrecoverable error occurred with this stream.
129 	shutdownStream(rtspClient);
130 }
131 
132 
133 void setupNextSubsession(RTSPClient* rtspClient)
134 {
135 	UsageEnvironment& env = rtspClient->envir();
136 	HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
137 
138 	client->subsession = client->iter->next();
139 	if (client->subsession != NULL) {
140 		if (!client->subsession->initiate()) {
141 
142 			env << *rtspClient << "Failed to initiate the \""
143 				<< *client->subsession << "\" subsession: "
144 				<< env.getResultMsg() << "\n";
145 
146 			// give up on this subsession; go to the next one
147 			setupNextSubsession(rtspClient);
148 		}
149 		else {
150 			env << *rtspClient << "Initiated the \""
151 				<< *client->subsession << "\" subsession (";
152 
153 			if (client->subsession->rtcpIsMuxed()) {
154 				env << "client port " << client->subsession->clientPortNum();
155 			} else {
156 				env << "client ports " << client->subsession->clientPortNum()
157 					<< "-" << client->subsession->clientPortNum() + 1;
158 			}
159 			env << ")\n";
160 
161 			// Continue setting up this subsession,
162 			// by sending a RTSP "SETUP" command:
163 			rtspClient->sendSetupCommand(*client->subsession,
164 				continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP);
165 		}
166 		return;
167 	}
168 
169 	// We've finished setting up all of the subsessions.
170 	// Now, send a RTSP "PLAY" command to start the streaming:
171 	if (client->session->absStartTime() != NULL) {
172 		// Special case: The stream is indexed by 'absolute' time,
173 		// so send an appropriate "PLAY" command:
174 		rtspClient->sendPlayCommand(*client->session, continueAfterPLAY,
175 			client->session->absStartTime(), client->session->absEndTime());
176 	} else {
177 		client->duration = client->session->playEndTime()
178 			- client->session->playStartTime();
179 		rtspClient->sendPlayCommand(*client->session, continueAfterPLAY);
180 	}
181 }
182 
183 
184 void continueAfterSETUP(RTSPClient* rtspClient,
185 	int resultCode, char* resultString)
186 {
187 	do {
188 		UsageEnvironment& env = rtspClient->envir();
189 		HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
190 
191 		if (resultCode != 0) {
192 			env << *rtspClient << "Failed to set up the \""
193 				<< *client->subsession << "\" subsession: "
194 				<< resultString << "\n";
195 			break;
196 		}
197 
198 		env << *rtspClient << "Set up the \""
199 			<< *client->subsession << "\" subsession (";
200 		if (client->subsession->rtcpIsMuxed()) {
201 			env << "client port " << client->subsession->clientPortNum();
202 		} else {
203 			env << "client ports " << client->subsession->clientPortNum()
204 				<< "-" << client->subsession->clientPortNum() + 1;
205 		}
206 		env << ")\n";
207 
208 		// Having successfully setup the subsession, create a data sink for it,
209 		// and call "startPlaying()" on it.
210 		// (This will prepare the data sink to receive data; the actual
211 		// flow of data from the client won't start happening until later,
212 		// after we've sent a RTSP "PLAY" command.)
213 
214 		client->subsession->sink = AdapterSink::createNew(env, *client->subsession,
215 			((HaikuRTSPClient*)rtspClient)->GetInputAdapter(), rtspClient->url());
216 		// perhaps use your own custom "MediaSink" subclass instead
217 		if (client->subsession->sink == NULL) {
218 			env << *rtspClient << "Failed to create a data sink for the \""
219 				<< *client->subsession << "\" subsession: "
220 				<< env.getResultMsg() << "\n";
221 			break;
222 		}
223 
224 		env << *rtspClient << "Created a data sink for the \""
225 			<< *client->subsession << "\" subsession\n";
226 		// a hack to let subsession handler functions
227 		// get the "RTSPClient" from the subsession
228 		client->subsession->miscPtr = rtspClient;
229 		client->subsession->sink
230 				->startPlaying(*(client->subsession->readSource()),
231 					subsessionAfterPlaying, client->subsession);
232 		// Also set a handler to be called if a RTCP "BYE"
233 		// arrives for this subsession:
234 		if (client->subsession->rtcpInstance() != NULL) {
235 			client->subsession->rtcpInstance()->setByeHandler(
236 				subsessionByeHandler,
237 				client->subsession);
238 		}
239 	} while (0);
240 	delete[] resultString;
241 
242 	// Set up the next subsession, if any:
243 	setupNextSubsession(rtspClient);
244 }
245 
246 
247 void continueAfterPLAY(RTSPClient* rtspClient,
248 	int resultCode, char* resultString)
249 {
250 	Boolean success = False;
251 	UsageEnvironment& env = rtspClient->envir();
252 	HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
253 
254 	do {
255 		if (resultCode != 0) {
256 			env << *rtspClient << "Failed to start playing session: "
257 				<< resultString << "\n";
258 			break;
259 		}
260 
261 		// Set a timer to be handled at the end of the stream's
262 		// expected duration (if the stream does not already signal its end
263 		// using a RTCP "BYE").  This is optional.  If, instead, you want
264 		// to keep the stream active - e.g., so you can later
265 		// 'seek' back within it and do another RTSP "PLAY"
266 		// - then you can omit this code.
267 		// (Alternatively, if you don't want to receive the entire stream,
268 		// you could set this timer for some shorter value.)
269 		if (client->duration > 0) {
270 			// number of seconds extra to delay,
271 			// after the stream's expected duration.  (This is optional.)
272 			unsigned const delaySlop = 2;
273 			client->duration += delaySlop;
274 			unsigned uSecsToDelay = (unsigned)(client->duration * 1000000);
275 			client->streamTimerTask
276 				= env.taskScheduler().scheduleDelayedTask(uSecsToDelay,
277 					(TaskFunc*)streamTimerHandler, rtspClient);
278 		}
279 
280 		env << *rtspClient << "Started playing session";
281 		if (client->duration > 0) {
282 			env << " (for up to " << client->duration << " seconds)";
283 		}
284 		env << "...\n";
285 
286 		success = True;
287 	} while (0);
288 	delete[] resultString;
289 
290 	if (!success) {
291 		// An unrecoverable error occurred with this stream.
292 		shutdownStream(rtspClient);
293 	} else
294 		client->NotifySucces();
295 }
296 
297 // Implementation of the other event handlers:
298 
299 void subsessionAfterPlaying(void* clientData)
300 {
301 	MediaSubsession* subsession = (MediaSubsession*)clientData;
302 	RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);
303 
304 	// Begin by closing this subsession's stream:
305 	Medium::close(subsession->sink);
306 	subsession->sink = NULL;
307 
308 	// Next, check whether *all* subsessions' streams have now been closed:
309 	MediaSession& session = subsession->parentSession();
310 	MediaSubsessionIterator iter(session);
311 	while ((subsession = iter.next()) != NULL) {
312 		if (subsession->sink != NULL)
313 			return; // this subsession is still active
314 	}
315 
316 	// All subsessions' streams have now been closed, so shutdown the client:
317 	shutdownStream(rtspClient);
318 }
319 
320 
321 void subsessionByeHandler(void* clientData)
322 {
323 	MediaSubsession* subsession = (MediaSubsession*)clientData;
324 	RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
325 	UsageEnvironment& env = rtspClient->envir();
326 
327 	env << *rtspClient << "Received RTCP \"BYE\" on \""
328 		<< *subsession << "\" subsession\n";
329 
330 	// Now act as if the subsession had closed:
331 	subsessionAfterPlaying(subsession);
332 }
333 
334 
335 void streamTimerHandler(void* clientData)
336 {
337 	HaikuRTSPClient* client = (HaikuRTSPClient*)clientData;
338 
339 	client->streamTimerTask = NULL;
340 
341 	// Shut down the stream:
342 	shutdownStream(client);
343 }
344 
345 
346 void shutdownStream(RTSPClient* rtspClient, int exitCode)
347 {
348 	UsageEnvironment& env = rtspClient->envir();
349 	HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
350 
351 	// First, check whether any subsessions have still to be closed:
352 	if (client->session != NULL) {
353 		Boolean someSubsessionsWereActive = False;
354 		MediaSubsessionIterator iter(*client->session);
355 		MediaSubsession* subsession;
356 
357 		while ((subsession = iter.next()) != NULL) {
358 			if (subsession->sink != NULL) {
359 				Medium::close(subsession->sink);
360 				subsession->sink = NULL;
361 
362 				if (subsession->rtcpInstance() != NULL) {
363 					// in case the server sends a RTCP "BYE"
364 					// while handling "TEARDOWN"
365 					subsession->rtcpInstance()->setByeHandler(NULL, NULL);
366 				}
367 
368 				someSubsessionsWereActive = True;
369 			}
370 		}
371 
372 		if (someSubsessionsWereActive) {
373 			// Send a RTSP "TEARDOWN" command,
374 			// to tell the server to shutdown the stream.
375 			// Don't bother handling the response to the "TEARDOWN".
376 			rtspClient->sendTeardownCommand(*client->session, NULL);
377 		}
378 	}
379 
380 	env << *rtspClient << "Closing the stream.\n";
381 	Medium::close(rtspClient);
382 	// Note that this will also cause this stream's
383 	// "StreamClientState" structure to get reclaimed.
384 	client->NotifyError();
385 }
386 
387 
388 AdapterSink* AdapterSink::createNew(UsageEnvironment& env,
389 	MediaSubsession& subsession, BInputAdapter* inputAdapter,
390 	char const* streamId)
391 {
392 	return new AdapterSink(env, subsession, streamId, inputAdapter);
393 }
394 
395 
396 AdapterSink::AdapterSink(UsageEnvironment& env, MediaSubsession& subsession,
397 	char const* streamId, BInputAdapter* inputAdapter)
398 	:
399 	MediaSink(env),
400 	fSubsession(subsession),
401 	fInputAdapter(inputAdapter)
402 {
403 	fStreamId = strDup(streamId);
404 	fReceiveBuffer = new u_int8_t[RECEIVE_BUFFER_SIZE];
405 }
406 
407 
408 AdapterSink::~AdapterSink()
409 {
410 	delete[] fReceiveBuffer;
411 	delete[] fStreamId;
412 }
413 
414 
415 void AdapterSink::afterGettingFrame(void* clientData, unsigned frameSize,
416 	unsigned numTruncatedBytes, struct timeval presentationTime,
417 	unsigned durationInMicroseconds)
418 {
419 	AdapterSink* sink = (AdapterSink*)clientData;
420 	sink->afterGettingFrame(frameSize, numTruncatedBytes,
421 		presentationTime, durationInMicroseconds);
422 }
423 
424 
425 void
426 AdapterSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
427 	struct timeval presentationTime, unsigned /*durationInMicroseconds*/)
428 {
429 	fInputAdapter->Write(fReceiveBuffer, frameSize);
430 	continuePlaying();
431 }
432 
433 
434 Boolean
435 AdapterSink::continuePlaying()
436 {
437 	if (fSource == NULL)
438 		return False;
439 
440 	fSource->getNextFrame(fReceiveBuffer, RECEIVE_BUFFER_SIZE,
441 		afterGettingFrame, this,
442 		onSourceClosure, this);
443 	return True;
444 }
445