From 550a2af0447bcf7e7be5d5d88f7d258cf9654ec9 Mon Sep 17 00:00:00 2001 From: Wim Taymans Date: Thu, 31 Jan 2019 11:58:35 +0100 Subject: [PATCH] examples: improve examples --- src/examples/audio-src.c | 48 ++++++++++++++++++------ src/examples/export-source.c | 1 - src/examples/sdl.h | 9 +++++ src/examples/video-play.c | 73 +++++++++++++++++++++++++++++------- 4 files changed, 106 insertions(+), 25 deletions(-) diff --git a/src/examples/audio-src.c b/src/examples/audio-src.c index ef8918e7a..86f4f295c 100644 --- a/src/examples/audio-src.c +++ b/src/examples/audio-src.c @@ -41,22 +41,17 @@ struct data { struct pw_main_loop *loop; - - struct pw_core *core; - struct pw_remote *remote; - struct pw_stream *stream; double accumulator; }; -static void fill_f32(struct data *d, void *dest, int avail) +static void fill_f32(struct data *d, void *dest, int n_frames) { float *dst = dest, val; - int n_samples = avail / (sizeof(float) * DEFAULT_CHANNELS); int i, c; - for (i = 0; i < n_samples; i++) { + for (i = 0; i < n_frames; i++) { d->accumulator += M_PI_M2 * 440 / DEFAULT_RATE; if (d->accumulator >= M_PI_M2) d->accumulator -= M_PI_M2; @@ -67,11 +62,21 @@ static void fill_f32(struct data *d, void *dest, int avail) } } +/* our data processing function is in general: + * + * struct pw_buffer *b; + * b = pw_stream_dequeue_buffer(stream); + * + * .. generate stuff in the buffer ... + * + * pw_stream_queue_buffer(stream, b); + */ static void on_process(void *userdata) { struct data *data = userdata; struct pw_buffer *b; struct spa_buffer *buf; + int n_frames, stride; uint8_t *p; if ((b = pw_stream_dequeue_buffer(data->stream)) == NULL) @@ -81,9 +86,14 @@ static void on_process(void *userdata) if ((p = buf->datas[0].data) == NULL) return; - fill_f32(data, p, buf->datas[0].maxsize); + stride = sizeof(float) * DEFAULT_CHANNELS; + n_frames = buf->datas[0].maxsize / stride; - buf->datas[0].chunk->size = buf->datas[0].maxsize; + fill_f32(data, p, n_frames); + + buf->datas[0].chunk->offset = 0; + buf->datas[0].chunk->stride = stride; + buf->datas[0].chunk->size = n_frames * stride; pw_stream_queue_buffer(data->stream, b); } @@ -102,8 +112,21 @@ int main(int argc, char *argv[]) pw_init(&argc, &argv); + /* make a main loop. If you already have another main loop, you can add + * the fd of this pipewire mainloop to it. */ data.loop = pw_main_loop_new(NULL); + /* create a simple stream, the simple stream manages to core and remote + * objects for you if you don't need to deal with them + * + * If you plan to autoconnect your stream, you need to provide at least + * media, category and role properties + * + * Pass your events and a use_data pointer as the last arguments. This + * will inform you about the stream state. The most important event + * you need to listen to is the process event where you need to produce + * the data. + */ data.stream = pw_stream_new_simple( pw_main_loop_get_loop(data.loop), "audio-src", @@ -115,14 +138,16 @@ int main(int argc, char *argv[]) &stream_events, &data); - data.remote = pw_stream_get_remote(data.stream); - + /* make one parameter with the supported formats. The SPA_PARAM_EnumFormat + * id means that this is a format enumeration. */ params[0] = spa_format_audio_raw_build(&b, SPA_PARAM_EnumFormat, &SPA_AUDIO_INFO_RAW_INIT( .format = SPA_AUDIO_FORMAT_F32, .channels = DEFAULT_CHANNELS, .rate = DEFAULT_RATE )); + /* now connect this stream. We ask that our process function is + * called in a realtime thread. */ pw_stream_connect(data.stream, PW_DIRECTION_OUTPUT, argc > 1 ? (uint32_t)atoi(argv[1]) : SPA_ID_INVALID, @@ -131,6 +156,7 @@ int main(int argc, char *argv[]) PW_STREAM_FLAG_RT_PROCESS, params, 1); + /* and wait */ pw_main_loop_run(data.loop); pw_stream_destroy(data.stream); diff --git a/src/examples/export-source.c b/src/examples/export-source.c index ffdc4ac64..a48d6a79d 100644 --- a/src/examples/export-source.c +++ b/src/examples/export-source.c @@ -56,7 +56,6 @@ struct data { struct pw_remote *remote; struct spa_hook remote_listener; - struct pw_node *node; struct spa_port_info port_info; struct spa_dict port_props; struct spa_dict_item port_items[1]; diff --git a/src/examples/sdl.h b/src/examples/sdl.h index f940eca84..39c0e7f1f 100644 --- a/src/examples/sdl.h +++ b/src/examples/sdl.h @@ -103,14 +103,21 @@ static struct spa_pod *sdl_build_formats(SDL_RendererInfo *info, struct spa_pod_ uint32_t i, c; struct spa_pod_frame f[2]; + /* make an oject of type SPA_TYPE_OBJECT_Format and id SPA_PARAM_EnumFormat. + * The object type is important because it defines the properties that are + * acceptable. The id gives more context about what the object is meant to + * contain. In this case we enumerate supported formats. */ spa_pod_builder_push_object(b, &f[0], SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat); + /* add media type and media subtype properties */ spa_pod_builder_prop(b, SPA_FORMAT_mediaType, 0); spa_pod_builder_id(b, SPA_MEDIA_TYPE_video); spa_pod_builder_prop(b, SPA_FORMAT_mediaSubtype, 0); spa_pod_builder_id(b, SPA_MEDIA_SUBTYPE_raw); + /* build an enumeration of formats */ spa_pod_builder_prop(b, SPA_FORMAT_VIDEO_format, 0); spa_pod_builder_push_choice(b, &f[1], SPA_CHOICE_Enum, 0); + /* first the formats supported by the textures */ for (i = 0, c = 0; i < info->num_texture_formats; i++) { uint32_t id = sdl_format_to_id(info->texture_formats[i]); if (id == 0) @@ -119,12 +126,14 @@ static struct spa_pod *sdl_build_formats(SDL_RendererInfo *info, struct spa_pod_ spa_pod_builder_id(b, id); spa_pod_builder_id(b, id); } + /* then all the other ones SDL can convert from/to */ for (i = 0; i < SPA_N_ELEMENTS(sdl_video_formats); i++) { uint32_t id = sdl_video_formats[i].id; if (id != SPA_VIDEO_FORMAT_UNKNOWN) spa_pod_builder_id(b, id); } spa_pod_builder_pop(b, &f[1]); + /* add size and framerate ranges */ spa_pod_builder_add(b, SPA_FORMAT_VIDEO_size, SPA_POD_CHOICE_RANGE_Rectangle( &SPA_RECTANGLE(WIDTH, HEIGHT), diff --git a/src/examples/video-play.c b/src/examples/video-play.c index 2352dc51f..d918f6f52 100644 --- a/src/examples/video-play.c +++ b/src/examples/video-play.c @@ -50,10 +50,6 @@ struct data { struct pw_main_loop *loop; - struct pw_core *core; - struct pw_remote *remote; - struct spa_hook remote_listener; - struct pw_stream *stream; struct spa_hook stream_listener; @@ -77,6 +73,15 @@ static void handle_events(struct data *data) } } +/* our data processing function is in general: + * + * struct pw_buffer *b; + * b = pw_stream_dequeue_buffer(stream); + * + * .. do stuff with buffer ... + * + * pw_stream_queue_buffer(stream, b); + */ static void on_process(void *_data) { @@ -103,13 +108,14 @@ on_process(void *_data) handle_events(data); if ((sdata = buf->datas[0].data) == NULL) - return; + goto done; if (SDL_LockTexture(data->texture, NULL, &ddata, &dstride) < 0) { fprintf(stderr, "Couldn't lock texture: %s\n", SDL_GetError()); - return; + goto done; } + /* get the videocrop metadata if any */ if ((mc = spa_buffer_find_meta_data(buf, SPA_META_VideoCrop, sizeof(*mc))) && spa_meta_region_is_valid(mc)) { data->rect.x = mc->region.position.x; @@ -117,6 +123,7 @@ on_process(void *_data) data->rect.w = mc->region.size.width; data->rect.h = mc->region.size.height; } + /* get cursor metadata */ if ((mcs = spa_buffer_find_meta_data(buf, SPA_META_Cursor, sizeof(*mcs))) && spa_meta_cursor_is_valid(mcs)) { struct spa_meta_bitmap *mb; @@ -144,6 +151,7 @@ on_process(void *_data) goto done; } + /* copy the cursor bitmap into the texture */ src = SPA_MEMBER(mb, mb->offset, uint8_t); dst = cdata; ostride = SPA_MIN(cstride, mb->stride); @@ -158,6 +166,7 @@ on_process(void *_data) render_cursor = true; } + /* copy video image in texture */ sstride = buf->datas[0].chunk->stride; ostride = SPA_MIN(sstride, dstride); @@ -171,6 +180,7 @@ on_process(void *_data) SDL_UnlockTexture(data->texture); SDL_RenderClear(data->renderer); + /* now render the video and then the cursor if any */ SDL_RenderCopy(data->renderer, data->texture, &data->rect, NULL); if (render_cursor) { SDL_RenderCopy(data->renderer, data->cursor, NULL, &data->cursor_rect); @@ -191,6 +201,7 @@ static void on_stream_state_changed(void *_data, enum pw_stream_state old, pw_main_loop_quit(data->loop); break; case PW_STREAM_STATE_CONFIGURE: + /* because we started inactive, activate ourselves now */ pw_stream_set_active(data->stream, true); break; default: @@ -198,6 +209,16 @@ static void on_stream_state_changed(void *_data, enum pw_stream_state old, } } +/* Be notified when the stream format changes. + * + * We are now supposed to call pw_stream_finish_format() with success or + * failure, depending on if we can support the format. Because we gave + * a list of supported formats, this should be ok. + * + * As part of pw_stream_finish_format() we can provide parameters that + * will control the buffer memory allocation. This includes the metadata + * that we would like on our buffer, the size, alignment, etc. + */ static void on_stream_format_changed(void *_data, const struct spa_pod *format) { @@ -209,6 +230,7 @@ on_stream_format_changed(void *_data, const struct spa_pod *format) Uint32 sdl_format; void *d; + /* NULL means to clear the format */ if (format == NULL) { pw_stream_finish_format(stream, 0, NULL, 0); return; @@ -217,6 +239,7 @@ on_stream_format_changed(void *_data, const struct spa_pod *format) fprintf(stderr, "got format:\n"); spa_debug_format(2, NULL, format); + /* call a helper function to parse the format for us. */ spa_format_video_raw_parse(format, &data->format); sdl_format = id_to_sdl_format(data->format.format); @@ -238,6 +261,8 @@ on_stream_format_changed(void *_data, const struct spa_pod *format) data->rect.w = data->format.size.width; data->rect.h = data->format.size.height; + /* a SPA_TYPE_OBJECT_ParamBuffers object defines the acceptable size, + * number, stride etc of the buffers */ params[0] = spa_pod_builder_add_object(&b, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers, SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(8, 2, MAX_BUFFERS), @@ -246,16 +271,19 @@ on_stream_format_changed(void *_data, const struct spa_pod *format) SPA_PARAM_BUFFERS_stride, SPA_POD_Int(data->stride), SPA_PARAM_BUFFERS_align, SPA_POD_Int(16)); + /* a header metadata with timing information */ params[1] = spa_pod_builder_add_object(&b, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_header))); + /* video cropping information */ params[2] = spa_pod_builder_add_object(&b, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, SPA_POD_Id(SPA_META_VideoCrop), SPA_PARAM_META_size, SPA_POD_Int(sizeof(struct spa_meta_region))); #define CURSOR_META_SIZE(w,h) (sizeof(struct spa_meta_cursor) + \ sizeof(struct spa_meta_bitmap) + w * h * 4) + /* cursor information */ params[3] = spa_pod_builder_add_object(&b, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type, SPA_POD_Id(SPA_META_Cursor), @@ -264,9 +292,11 @@ on_stream_format_changed(void *_data, const struct spa_pod *format) CURSOR_META_SIZE(1,1), CURSOR_META_SIZE(256,256))); + /* we are done */ pw_stream_finish_format(stream, 0, params, 4); } +/* these are the stream events we listen for */ static const struct pw_stream_events stream_events = { PW_VERSION_STREAM_EVENTS, .state_changed = on_stream_state_changed, @@ -296,8 +326,20 @@ int main(int argc, char *argv[]) pw_init(&argc, &argv); + /* create a main loop */ data.loop = pw_main_loop_new(NULL); + /* create a simple stream, the simple stream manages to core and remote + * objects for you if you don't need to deal with them + * + * If you plan to autoconnect your stream, you need to provide at least + * media, category and role properties + * + * Pass your events and a use_data pointer as the last arguments. This + * will inform you about the stream state. The most important event + * you need to listen to is the process event where you need to consume + * the data provided to you. + */ data.stream = pw_stream_new_simple( pw_main_loop_get_loop(data.loop), "video-play", @@ -309,8 +351,6 @@ int main(int argc, char *argv[]) &stream_events, &data); - data.remote = pw_stream_get_remote(data.stream); - data.core = pw_remote_get_core(data.remote); data.path = argc > 1 ? argv[1] : NULL; if (SDL_Init(SDL_INIT_VIDEO) < 0) { @@ -324,17 +364,24 @@ int main(int argc, char *argv[]) return -1; } + /* build the extra parameters to connect with. To connect, we can provide + * a list of supported formats. We use a builder that writes the param + * object to the stack. */ build_format(&data, &b, params); + /* now connect the stream, we need a direction (input/output), + * an optional target node to connect to, some flags and parameters + */ pw_stream_connect(data.stream, PW_DIRECTION_INPUT, data.path ? (uint32_t)atoi(data.path) : SPA_ID_INVALID, - PW_STREAM_FLAG_AUTOCONNECT | - PW_STREAM_FLAG_INACTIVE | - PW_STREAM_FLAG_EXCLUSIVE | - PW_STREAM_FLAG_MAP_BUFFERS, - params, 1); + PW_STREAM_FLAG_AUTOCONNECT | /* try to automatically connect this stream */ + PW_STREAM_FLAG_INACTIVE | /* we will activate ourselves */ + PW_STREAM_FLAG_EXCLUSIVE | /* require exclusive access */ + PW_STREAM_FLAG_MAP_BUFFERS, /* mmap the buffer data for us */ + params, 1); /* extra parameters, see above */ + /* do things until we quit the mainloop */ pw_main_loop_run(data.loop); pw_stream_destroy(data.stream);