Revision 3b371676

View differences:

ffserver.c
2043 2043
        break;
2044 2044
    case HTTPSTATE_SEND_DATA:
2045 2045
        /* find a new packet */
2046
            /* read a packet from the input stream */
2047
            if (c->stream->feed)
2048
                ffm_set_write_index(c->fmt_in,
2049
                                    c->stream->feed->feed_write_index,
2050
                                    c->stream->feed->feed_size);
2051

  
2052
            if (c->stream->max_time &&
2053
                c->stream->max_time + c->start_time - cur_time < 0)
2054
                /* We have timed out */
2055
                c->state = HTTPSTATE_SEND_DATA_TRAILER;
2056
            else {
2057
                AVPacket pkt;
2058
            redo:
2059
                if (av_read_frame(c->fmt_in, &pkt) < 0) {
2060
                    if (c->stream->feed && c->stream->feed->feed_opened) {
2061
                        /* if coming from feed, it means we reached the end of the
2062
                           ffm file, so must wait for more data */
2063
                        c->state = HTTPSTATE_WAIT_FEED;
2064
                        return 1; /* state changed */
2065
                    } else {
2066
                        if (c->stream->loop) {
2067
                            av_close_input_file(c->fmt_in);
2068
                            c->fmt_in = NULL;
2069
                            if (open_input_stream(c, "") < 0)
2070
                                goto no_loop;
2071
                            goto redo;
2072
                        } else {
2073
                        no_loop:
2074
                            /* must send trailer now because eof or error */
2075
                            c->state = HTTPSTATE_SEND_DATA_TRAILER;
2076
                        }
2077
                    }
2046
        /* read a packet from the input stream */
2047
        if (c->stream->feed)
2048
            ffm_set_write_index(c->fmt_in,
2049
                                c->stream->feed->feed_write_index,
2050
                                c->stream->feed->feed_size);
2051

  
2052
        if (c->stream->max_time &&
2053
            c->stream->max_time + c->start_time - cur_time < 0)
2054
            /* We have timed out */
2055
            c->state = HTTPSTATE_SEND_DATA_TRAILER;
2056
        else {
2057
            AVPacket pkt;
2058
        redo:
2059
            if (av_read_frame(c->fmt_in, &pkt) < 0) {
2060
                if (c->stream->feed && c->stream->feed->feed_opened) {
2061
                    /* if coming from feed, it means we reached the end of the
2062
                       ffm file, so must wait for more data */
2063
                    c->state = HTTPSTATE_WAIT_FEED;
2064
                    return 1; /* state changed */
2078 2065
                } else {
2079
                    /* update first pts if needed */
2080
                    if (c->first_pts == AV_NOPTS_VALUE) {
2081
                        c->first_pts = av_rescale_q(pkt.dts, c->fmt_in->streams[pkt.stream_index]->time_base, AV_TIME_BASE_Q);
2082
                        c->start_time = cur_time;
2066
                    if (c->stream->loop) {
2067
                        av_close_input_file(c->fmt_in);
2068
                        c->fmt_in = NULL;
2069
                        if (open_input_stream(c, "") < 0)
2070
                            goto no_loop;
2071
                        goto redo;
2072
                    } else {
2073
                    no_loop:
2074
                        /* must send trailer now because eof or error */
2075
                        c->state = HTTPSTATE_SEND_DATA_TRAILER;
2083 2076
                    }
2084
                    /* send it to the appropriate stream */
2085
                    if (c->stream->feed) {
2086
                        /* if coming from a feed, select the right stream */
2087
                        if (c->switch_pending) {
2088
                            c->switch_pending = 0;
2089
                            for(i=0;i<c->stream->nb_streams;i++) {
2090
                                if (c->switch_feed_streams[i] == pkt.stream_index)
2091
                                    if (pkt.flags & PKT_FLAG_KEY)
2092
                                        do_switch_stream(c, i);
2093
                                if (c->switch_feed_streams[i] >= 0)
2094
                                    c->switch_pending = 1;
2095
                            }
2096
                        }
2077
                }
2078
            } else {
2079
                /* update first pts if needed */
2080
                if (c->first_pts == AV_NOPTS_VALUE) {
2081
                    c->first_pts = av_rescale_q(pkt.dts, c->fmt_in->streams[pkt.stream_index]->time_base, AV_TIME_BASE_Q);
2082
                    c->start_time = cur_time;
2083
                }
2084
                /* send it to the appropriate stream */
2085
                if (c->stream->feed) {
2086
                    /* if coming from a feed, select the right stream */
2087
                    if (c->switch_pending) {
2088
                        c->switch_pending = 0;
2097 2089
                        for(i=0;i<c->stream->nb_streams;i++) {
2098
                            if (c->feed_streams[i] == pkt.stream_index) {
2099
                                pkt.stream_index = i;
2090
                            if (c->switch_feed_streams[i] == pkt.stream_index)
2100 2091
                                if (pkt.flags & PKT_FLAG_KEY)
2101
                                    c->got_key_frame |= 1 << i;
2102
                                /* See if we have all the key frames, then
2103
                                 * we start to send. This logic is not quite
2104
                                 * right, but it works for the case of a
2105
                                 * single video stream with one or more
2106
                                 * audio streams (for which every frame is
2107
                                 * typically a key frame).
2108
                                 */
2109
                                if (!c->stream->send_on_key ||
2110
                                    ((c->got_key_frame + 1) >> c->stream->nb_streams))
2111
                                    goto send_it;
2112
                            }
2092
                                    do_switch_stream(c, i);
2093
                            if (c->switch_feed_streams[i] >= 0)
2094
                                c->switch_pending = 1;
2113 2095
                        }
2114
                    } else {
2115
                        AVCodecContext *codec;
2116

  
2117
                    send_it:
2118
                        /* specific handling for RTP: we use several
2119
                           output stream (one for each RTP
2120
                           connection). XXX: need more abstract handling */
2121
                        if (c->is_packetized) {
2122
                            AVStream *st;
2123
                            /* compute send time and duration */
2124
                            st = c->fmt_in->streams[pkt.stream_index];
2125
                            c->cur_pts = av_rescale_q(pkt.dts, st->time_base, AV_TIME_BASE_Q);
2126
                            if (st->start_time != AV_NOPTS_VALUE)
2127
                                c->cur_pts -= av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q);
2128
                            c->cur_frame_duration = av_rescale_q(pkt.duration, st->time_base, AV_TIME_BASE_Q);
2096
                    }
2097
                    for(i=0;i<c->stream->nb_streams;i++) {
2098
                        if (c->feed_streams[i] == pkt.stream_index) {
2099
                            pkt.stream_index = i;
2100
                            if (pkt.flags & PKT_FLAG_KEY)
2101
                                c->got_key_frame |= 1 << i;
2102
                            /* See if we have all the key frames, then
2103
                             * we start to send. This logic is not quite
2104
                             * right, but it works for the case of a
2105
                             * single video stream with one or more
2106
                             * audio streams (for which every frame is
2107
                             * typically a key frame).
2108
                             */
2109
                            if (!c->stream->send_on_key ||
2110
                                ((c->got_key_frame + 1) >> c->stream->nb_streams))
2111
                                goto send_it;
2112
                        }
2113
                    }
2114
                } else {
2115
                    AVCodecContext *codec;
2116

  
2117
                send_it:
2118
                    /* specific handling for RTP: we use several
2119
                       output stream (one for each RTP
2120
                       connection). XXX: need more abstract handling */
2121
                    if (c->is_packetized) {
2122
                        AVStream *st;
2123
                        /* compute send time and duration */
2124
                        st = c->fmt_in->streams[pkt.stream_index];
2125
                        c->cur_pts = av_rescale_q(pkt.dts, st->time_base, AV_TIME_BASE_Q);
2126
                        if (st->start_time != AV_NOPTS_VALUE)
2127
                            c->cur_pts -= av_rescale_q(st->start_time, st->time_base, AV_TIME_BASE_Q);
2128
                        c->cur_frame_duration = av_rescale_q(pkt.duration, st->time_base, AV_TIME_BASE_Q);
2129 2129
#if 0
2130
                            printf("index=%d pts=%0.3f duration=%0.6f\n",
2131
                                   pkt.stream_index,
2132
                                   (double)c->cur_pts /
2133
                                   AV_TIME_BASE,
2134
                                   (double)c->cur_frame_duration /
2135
                                   AV_TIME_BASE);
2130
                        printf("index=%d pts=%0.3f duration=%0.6f\n",
2131
                               pkt.stream_index,
2132
                               (double)c->cur_pts /
2133
                               AV_TIME_BASE,
2134
                               (double)c->cur_frame_duration /
2135
                               AV_TIME_BASE);
2136 2136
#endif
2137
                            /* find RTP context */
2138
                            c->packet_stream_index = pkt.stream_index;
2139
                            ctx = c->rtp_ctx[c->packet_stream_index];
2140
                            if(!ctx) {
2141
                              av_free_packet(&pkt);
2142
                              break;
2143
                            }
2144
                            codec = ctx->streams[0]->codec;
2145
                            /* only one stream per RTP connection */
2146
                            pkt.stream_index = 0;
2147
                        } else {
2148
                            ctx = &c->fmt_ctx;
2149
                            /* Fudge here */
2150
                            codec = ctx->streams[pkt.stream_index]->codec;
2151
                        }
2152

  
2153
                        if (c->is_packetized) {
2154
                            int max_packet_size;
2155
                            if (c->rtp_protocol == RTSP_PROTOCOL_RTP_TCP)
2156
                                max_packet_size = RTSP_TCP_MAX_PACKET_SIZE;
2157
                            else
2158
                                max_packet_size = url_get_max_packet_size(c->rtp_handles[c->packet_stream_index]);
2159
                            ret = url_open_dyn_packet_buf(&ctx->pb, max_packet_size);
2160
                        } else {
2161
                            ret = url_open_dyn_buf(&ctx->pb);
2162
                        }
2163
                        if (ret < 0) {
2164
                            /* XXX: potential leak */
2165
                            return -1;
2166
                        }
2167
                        if (pkt.dts != AV_NOPTS_VALUE)
2168
                            pkt.dts = av_rescale_q(pkt.dts,
2169
                                c->fmt_in->streams[pkt.stream_index]->time_base,
2170
                                ctx->streams[pkt.stream_index]->time_base);
2171
                        if (pkt.pts != AV_NOPTS_VALUE)
2172
                            pkt.pts = av_rescale_q(pkt.pts,
2173
                                c->fmt_in->streams[pkt.stream_index]->time_base,
2174
                                ctx->streams[pkt.stream_index]->time_base);
2175
                        if (av_write_frame(ctx, &pkt))
2176
                            c->state = HTTPSTATE_SEND_DATA_TRAILER;
2177

  
2178
                        len = url_close_dyn_buf(ctx->pb, &c->pb_buffer);
2179
                        c->cur_frame_bytes = len;
2180
                        c->buffer_ptr = c->pb_buffer;
2181
                        c->buffer_end = c->pb_buffer + len;
2182

  
2183
                        codec->frame_number++;
2184
                        if (len == 0) {
2137
                        /* find RTP context */
2138
                        c->packet_stream_index = pkt.stream_index;
2139
                        ctx = c->rtp_ctx[c->packet_stream_index];
2140
                        if(!ctx) {
2185 2141
                            av_free_packet(&pkt);
2186
                            goto redo;
2142
                            break;
2187 2143
                        }
2144
                        codec = ctx->streams[0]->codec;
2145
                        /* only one stream per RTP connection */
2146
                        pkt.stream_index = 0;
2147
                    } else {
2148
                        ctx = &c->fmt_ctx;
2149
                        /* Fudge here */
2150
                        codec = ctx->streams[pkt.stream_index]->codec;
2151
                    }
2152

  
2153
                    if (c->is_packetized) {
2154
                        int max_packet_size;
2155
                        if (c->rtp_protocol == RTSP_PROTOCOL_RTP_TCP)
2156
                            max_packet_size = RTSP_TCP_MAX_PACKET_SIZE;
2157
                        else
2158
                            max_packet_size = url_get_max_packet_size(c->rtp_handles[c->packet_stream_index]);
2159
                        ret = url_open_dyn_packet_buf(&ctx->pb, max_packet_size);
2160
                    } else {
2161
                        ret = url_open_dyn_buf(&ctx->pb);
2162
                    }
2163
                    if (ret < 0) {
2164
                        /* XXX: potential leak */
2165
                        return -1;
2166
                    }
2167
                    if (pkt.dts != AV_NOPTS_VALUE)
2168
                        pkt.dts = av_rescale_q(pkt.dts,
2169
                                               c->fmt_in->streams[pkt.stream_index]->time_base,
2170
                                               ctx->streams[pkt.stream_index]->time_base);
2171
                    if (pkt.pts != AV_NOPTS_VALUE)
2172
                        pkt.pts = av_rescale_q(pkt.pts,
2173
                                               c->fmt_in->streams[pkt.stream_index]->time_base,
2174
                                               ctx->streams[pkt.stream_index]->time_base);
2175
                    if (av_write_frame(ctx, &pkt))
2176
                        c->state = HTTPSTATE_SEND_DATA_TRAILER;
2177

  
2178
                    len = url_close_dyn_buf(ctx->pb, &c->pb_buffer);
2179
                    c->cur_frame_bytes = len;
2180
                    c->buffer_ptr = c->pb_buffer;
2181
                    c->buffer_end = c->pb_buffer + len;
2182

  
2183
                    codec->frame_number++;
2184
                    if (len == 0) {
2185
                        av_free_packet(&pkt);
2186
                        goto redo;
2188 2187
                    }
2189
                    av_free_packet(&pkt);
2190 2188
                }
2189
                av_free_packet(&pkt);
2191 2190
            }
2191
        }
2192 2192
        break;
2193 2193
    default:
2194 2194
    case HTTPSTATE_SEND_DATA_TRAILER:

Also available in: Unified diff