Smaws_Client_KinesisKinesis client library built on EIO.
module Types : sig ... endval make_stream_mode_details :
stream_mode:Types.stream_mode ->
unit ->
Types.stream_mode_detailsval make_update_stream_mode_input :
stream_mode_details:Types.stream_mode_details ->
stream_ar_n:Types.stream_ar_n ->
unit ->
Types.update_stream_mode_inputval make_update_shard_count_output :
?stream_ar_n:Types.stream_ar_n ->
?target_shard_count:Types.positive_integer_object ->
?current_shard_count:Types.positive_integer_object ->
?stream_name:Types.stream_name ->
unit ->
Types.update_shard_count_outputval make_update_shard_count_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
scaling_type:Types.scaling_type ->
target_shard_count:Types.positive_integer_object ->
unit ->
Types.update_shard_count_inputval make_untag_resource_input :
resource_ar_n:Types.resource_ar_n ->
tag_keys:Types.tag_key_list ->
unit ->
Types.untag_resource_inputval make_tag_resource_input :
resource_ar_n:Types.resource_ar_n ->
tags:Types.tag_map ->
unit ->
Types.tag_resource_inputval make_tag : ?value:Types.tag_value -> key:Types.tag_key -> unit -> Types.tagval make_record :
?encryption_type:Types.encryption_type ->
?approximate_arrival_timestamp:Types.timestamp ->
partition_key:Types.partition_key ->
data:Types.data ->
sequence_number:Types.sequence_number ->
unit ->
Types.recordval make_hash_key_range :
ending_hash_key:Types.hash_key ->
starting_hash_key:Types.hash_key ->
unit ->
Types.hash_key_rangeval make_child_shard :
hash_key_range:Types.hash_key_range ->
parent_shards:Types.shard_id_list ->
shard_id:Types.shard_id ->
unit ->
Types.child_shardval make_subscribe_to_shard_event :
?child_shards:Types.child_shard_list ->
millis_behind_latest:Types.millis_behind_latest ->
continuation_sequence_number:Types.sequence_number ->
records:Types.record_list ->
unit ->
Types.subscribe_to_shard_eventval make_subscribe_to_shard_output :
event_stream:Types.subscribe_to_shard_event_stream ->
unit ->
Types.subscribe_to_shard_outputval make_starting_position :
?timestamp:Types.timestamp ->
?sequence_number:Types.sequence_number ->
type_:Types.shard_iterator_type ->
unit ->
Types.starting_positionval make_subscribe_to_shard_input :
starting_position:Types.starting_position ->
shard_id:Types.shard_id ->
consumer_ar_n:Types.consumer_ar_n ->
unit ->
Types.subscribe_to_shard_inputval make_stream_summary :
?stream_creation_timestamp:Types.timestamp ->
?stream_mode_details:Types.stream_mode_details ->
stream_status:Types.stream_status ->
stream_ar_n:Types.stream_ar_n ->
stream_name:Types.stream_name ->
unit ->
Types.stream_summaryval make_enhanced_metrics :
?shard_level_metrics:Types.metrics_name_list ->
unit ->
Types.enhanced_metricsval make_stream_description_summary :
?consumer_count:Types.consumer_count_object ->
?key_id:Types.key_id ->
?encryption_type:Types.encryption_type ->
?stream_mode_details:Types.stream_mode_details ->
open_shard_count:Types.shard_count_object ->
enhanced_monitoring:Types.enhanced_monitoring_list ->
stream_creation_timestamp:Types.timestamp ->
retention_period_hours:Types.retention_period_hours ->
stream_status:Types.stream_status ->
stream_ar_n:Types.stream_ar_n ->
stream_name:Types.stream_name ->
unit ->
Types.stream_description_summaryval make_sequence_number_range :
?ending_sequence_number:Types.sequence_number ->
starting_sequence_number:Types.sequence_number ->
unit ->
Types.sequence_number_rangeval make_shard :
?adjacent_parent_shard_id:Types.shard_id ->
?parent_shard_id:Types.shard_id ->
sequence_number_range:Types.sequence_number_range ->
hash_key_range:Types.hash_key_range ->
shard_id:Types.shard_id ->
unit ->
Types.shardval make_stream_description :
?key_id:Types.key_id ->
?encryption_type:Types.encryption_type ->
?stream_mode_details:Types.stream_mode_details ->
enhanced_monitoring:Types.enhanced_monitoring_list ->
stream_creation_timestamp:Types.timestamp ->
retention_period_hours:Types.retention_period_hours ->
has_more_shards:Types.boolean_object ->
shards:Types.shard_list ->
stream_status:Types.stream_status ->
stream_ar_n:Types.stream_ar_n ->
stream_name:Types.stream_name ->
unit ->
Types.stream_descriptionval make_stop_stream_encryption_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
key_id:Types.key_id ->
encryption_type:Types.encryption_type ->
unit ->
Types.stop_stream_encryption_inputval make_start_stream_encryption_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
key_id:Types.key_id ->
encryption_type:Types.encryption_type ->
unit ->
Types.start_stream_encryption_inputval make_split_shard_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
new_starting_hash_key:Types.hash_key ->
shard_to_split:Types.shard_id ->
unit ->
Types.split_shard_inputval make_shard_filter :
?timestamp:Types.timestamp ->
?shard_id:Types.shard_id ->
type_:Types.shard_filter_type ->
unit ->
Types.shard_filterval make_remove_tags_from_stream_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
tag_keys:Types.tag_key_list ->
unit ->
Types.remove_tags_from_stream_inputval make_consumer :
consumer_creation_timestamp:Types.timestamp ->
consumer_status:Types.consumer_status ->
consumer_ar_n:Types.consumer_ar_n ->
consumer_name:Types.consumer_name ->
unit ->
Types.consumerval make_register_stream_consumer_output :
consumer:Types.consumer ->
unit ->
Types.register_stream_consumer_outputval make_register_stream_consumer_input :
?tags:Types.tag_map ->
consumer_name:Types.consumer_name ->
stream_ar_n:Types.stream_ar_n ->
unit ->
Types.register_stream_consumer_inputval make_put_resource_policy_input :
policy:Types.policy ->
resource_ar_n:Types.resource_ar_n ->
unit ->
Types.put_resource_policy_inputval make_put_records_result_entry :
?error_message:Types.error_message ->
?error_code:Types.error_code ->
?shard_id:Types.shard_id ->
?sequence_number:Types.sequence_number ->
unit ->
Types.put_records_result_entryval make_put_records_request_entry :
?explicit_hash_key:Types.hash_key ->
partition_key:Types.partition_key ->
data:Types.data ->
unit ->
Types.put_records_request_entryval make_put_records_output :
?encryption_type:Types.encryption_type ->
?failed_record_count:Types.positive_integer_object ->
records:Types.put_records_result_entry_list ->
unit ->
Types.put_records_outputval make_put_records_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
records:Types.put_records_request_entry_list ->
unit ->
Types.put_records_inputval make_put_record_output :
?encryption_type:Types.encryption_type ->
sequence_number:Types.sequence_number ->
shard_id:Types.shard_id ->
unit ->
Types.put_record_outputval make_put_record_input :
?stream_ar_n:Types.stream_ar_n ->
?sequence_number_for_ordering:Types.sequence_number ->
?explicit_hash_key:Types.hash_key ->
?stream_name:Types.stream_name ->
partition_key:Types.partition_key ->
data:Types.data ->
unit ->
Types.put_record_inputval make_merge_shards_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
adjacent_shard_to_merge:Types.shard_id ->
shard_to_merge:Types.shard_id ->
unit ->
Types.merge_shards_inputval make_list_tags_for_stream_output :
has_more_tags:Types.boolean_object ->
tags:Types.tag_list ->
unit ->
Types.list_tags_for_stream_outputval make_list_tags_for_stream_input :
?stream_ar_n:Types.stream_ar_n ->
?limit:Types.list_tags_for_stream_input_limit ->
?exclusive_start_tag_key:Types.tag_key ->
?stream_name:Types.stream_name ->
unit ->
Types.list_tags_for_stream_inputval make_list_tags_for_resource_output :
?tags:Types.tag_list ->
unit ->
Types.list_tags_for_resource_outputval make_list_tags_for_resource_input :
resource_ar_n:Types.resource_ar_n ->
unit ->
Types.list_tags_for_resource_inputval make_list_streams_output :
?stream_summaries:Types.stream_summary_list ->
?next_token:Types.next_token ->
has_more_streams:Types.boolean_object ->
stream_names:Types.stream_name_list ->
unit ->
Types.list_streams_outputval make_list_streams_input :
?next_token:Types.next_token ->
?exclusive_start_stream_name:Types.stream_name ->
?limit:Types.list_streams_input_limit ->
unit ->
Types.list_streams_inputval make_list_stream_consumers_output :
?next_token:Types.next_token ->
?consumers:Types.consumer_list ->
unit ->
Types.list_stream_consumers_outputval make_list_stream_consumers_input :
?stream_creation_timestamp:Types.timestamp ->
?max_results:Types.list_stream_consumers_input_limit ->
?next_token:Types.next_token ->
stream_ar_n:Types.stream_ar_n ->
unit ->
Types.list_stream_consumers_inputval make_list_shards_output :
?next_token:Types.next_token ->
?shards:Types.shard_list ->
unit ->
Types.list_shards_outputval make_list_shards_input :
?stream_ar_n:Types.stream_ar_n ->
?shard_filter:Types.shard_filter ->
?stream_creation_timestamp:Types.timestamp ->
?max_results:Types.list_shards_input_limit ->
?exclusive_start_shard_id:Types.shard_id ->
?next_token:Types.next_token ->
?stream_name:Types.stream_name ->
unit ->
Types.list_shards_inputval make_increase_stream_retention_period_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
retention_period_hours:Types.retention_period_hours ->
unit ->
Types.increase_stream_retention_period_inputval make_get_shard_iterator_output :
?shard_iterator:Types.shard_iterator ->
unit ->
Types.get_shard_iterator_outputval make_get_shard_iterator_input :
?stream_ar_n:Types.stream_ar_n ->
?timestamp:Types.timestamp ->
?starting_sequence_number:Types.sequence_number ->
?stream_name:Types.stream_name ->
shard_iterator_type:Types.shard_iterator_type ->
shard_id:Types.shard_id ->
unit ->
Types.get_shard_iterator_inputval make_get_resource_policy_output :
policy:Types.policy ->
unit ->
Types.get_resource_policy_outputval make_get_resource_policy_input :
resource_ar_n:Types.resource_ar_n ->
unit ->
Types.get_resource_policy_inputval make_get_records_output :
?child_shards:Types.child_shard_list ->
?millis_behind_latest:Types.millis_behind_latest ->
?next_shard_iterator:Types.shard_iterator ->
records:Types.record_list ->
unit ->
Types.get_records_outputval make_get_records_input :
?stream_ar_n:Types.stream_ar_n ->
?limit:Types.get_records_input_limit ->
shard_iterator:Types.shard_iterator ->
unit ->
Types.get_records_inputval make_enhanced_monitoring_output :
?stream_ar_n:Types.stream_ar_n ->
?desired_shard_level_metrics:Types.metrics_name_list ->
?current_shard_level_metrics:Types.metrics_name_list ->
?stream_name:Types.stream_name ->
unit ->
Types.enhanced_monitoring_outputval make_enable_enhanced_monitoring_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
shard_level_metrics:Types.metrics_name_list ->
unit ->
Types.enable_enhanced_monitoring_inputval make_disable_enhanced_monitoring_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
shard_level_metrics:Types.metrics_name_list ->
unit ->
Types.disable_enhanced_monitoring_inputval make_describe_stream_summary_output :
stream_description_summary:Types.stream_description_summary ->
unit ->
Types.describe_stream_summary_outputval make_describe_stream_summary_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
unit ->
Types.describe_stream_summary_inputval make_consumer_description :
stream_ar_n:Types.stream_ar_n ->
consumer_creation_timestamp:Types.timestamp ->
consumer_status:Types.consumer_status ->
consumer_ar_n:Types.consumer_ar_n ->
consumer_name:Types.consumer_name ->
unit ->
Types.consumer_descriptionval make_describe_stream_consumer_output :
consumer_description:Types.consumer_description ->
unit ->
Types.describe_stream_consumer_outputval make_describe_stream_consumer_input :
?consumer_ar_n:Types.consumer_ar_n ->
?consumer_name:Types.consumer_name ->
?stream_ar_n:Types.stream_ar_n ->
unit ->
Types.describe_stream_consumer_inputval make_describe_stream_output :
stream_description:Types.stream_description ->
unit ->
Types.describe_stream_outputval make_describe_stream_input :
?stream_ar_n:Types.stream_ar_n ->
?exclusive_start_shard_id:Types.shard_id ->
?limit:Types.describe_stream_input_limit ->
?stream_name:Types.stream_name ->
unit ->
Types.describe_stream_inputval make_describe_limits_output :
on_demand_stream_count_limit:Types.on_demand_stream_count_limit_object ->
on_demand_stream_count:Types.on_demand_stream_count_object ->
open_shard_count:Types.shard_count_object ->
shard_limit:Types.shard_count_object ->
unit ->
Types.describe_limits_outputval make_deregister_stream_consumer_input :
?consumer_ar_n:Types.consumer_ar_n ->
?consumer_name:Types.consumer_name ->
?stream_ar_n:Types.stream_ar_n ->
unit ->
Types.deregister_stream_consumer_inputval make_delete_stream_input :
?stream_ar_n:Types.stream_ar_n ->
?enforce_consumer_deletion:Types.boolean_object ->
?stream_name:Types.stream_name ->
unit ->
Types.delete_stream_inputval make_delete_resource_policy_input :
resource_ar_n:Types.resource_ar_n ->
unit ->
Types.delete_resource_policy_inputval make_decrease_stream_retention_period_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
retention_period_hours:Types.retention_period_hours ->
unit ->
Types.decrease_stream_retention_period_inputval make_create_stream_input :
?tags:Types.tag_map ->
?stream_mode_details:Types.stream_mode_details ->
?shard_count:Types.positive_integer_object ->
stream_name:Types.stream_name ->
unit ->
Types.create_stream_inputval make_add_tags_to_stream_input :
?stream_ar_n:Types.stream_ar_n ->
?stream_name:Types.stream_name ->
tags:Types.tag_map ->
unit ->
Types.add_tags_to_stream_inputOperations
module AddTagsToStream : sig ... endAdds or updates tags for the specified Kinesis data stream. You can assign up to 50 tags to a data stream.
module CreateStream : sig ... endCreates a Kinesis data stream. A stream captures and transports data records that are continuously emitted from different data sources or producers. Scale-out within a stream is explicitly supported by means of shards, which are uniquely identified groups of data records in a stream.
module DecreaseStreamRetentionPeriod : sig ... endDecreases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The minimum value of a stream's retention period is 24 hours.
module DeleteResourcePolicy : sig ... endDelete a policy for the specified data stream or consumer. Request patterns can be one of the following:
module DeleteStream : sig ... endDeletes a Kinesis data stream and all its shards and data. You must shut down any applications that are operating on the stream before you delete the stream. If an application attempts to operate on a deleted stream, it receives the exception ResourceNotFoundException.
module DeregisterStreamConsumer : sig ... endTo deregister a consumer, provide its ARN. Alternatively, you can provide the ARN of the data stream and the name you gave the consumer when you registered it. You may also provide all three parameters, as long as they don't conflict with each other. If you don't know the name or ARN of the consumer that you want to deregister, you can use the ListStreamConsumers operation to get a list of the descriptions of all the consumers that are currently registered with a given data stream. The description of a consumer contains its name and ARN.
module DescribeLimits : sig ... endDescribes the shard limits and usage for the account.
module DescribeStream : sig ... endDescribes the specified Kinesis data stream.
module DescribeStreamConsumer : sig ... endTo get the description of a registered consumer, provide the ARN of the consumer. Alternatively, you can provide the ARN of the data stream and the name you gave the consumer when you registered it. You may also provide all three parameters, as long as they don't conflict with each other. If you don't know the name or ARN of the consumer that you want to describe, you can use the ListStreamConsumers operation to get a list of the descriptions of all the consumers that are currently registered with a given data stream.
module DescribeStreamSummary : sig ... endProvides a summarized description of the specified Kinesis data stream without the shard list.
module DisableEnhancedMonitoring : sig ... endDisables enhanced monitoring.
module EnableEnhancedMonitoring : sig ... endEnables enhanced Kinesis data stream monitoring for shard-level metrics.
module GetRecords : sig ... endGets data records from a Kinesis data stream's shard.
module GetResourcePolicy : sig ... endReturns a policy attached to the specified data stream or consumer. Request patterns can be one of the following:
module GetShardIterator : sig ... endGets an Amazon Kinesis shard iterator. A shard iterator expires 5 minutes after it is returned to the requester.
module IncreaseStreamRetentionPeriod : sig ... endIncreases the Kinesis data stream's retention period, which is the length of time data records are accessible after they are added to the stream. The maximum value of a stream's retention period is 8760 hours (365 days).
module ListShards : sig ... endLists the shards in a stream and provides information about each shard. This operation has a limit of 1000 transactions per second per data stream.
module ListStreamConsumers : sig ... endLists the consumers registered to receive data from a stream using enhanced fan-out, and provides information about each consumer.
module ListStreams : sig ... endLists your Kinesis data streams.
module ListTagsForResource : sig ... endList all tags added to the specified Kinesis resource. Each tag is a label consisting of a user-defined key and value. Tags can help you manage, identify, organize, search for, and filter resources.
module ListTagsForStream : sig ... endLists the tags for the specified Kinesis data stream. This operation has a limit of five transactions per second per account.
module MergeShards : sig ... endMerges two adjacent shards in a Kinesis data stream and combines them into a single shard to reduce the stream's capacity to ingest and transport data. This API is only supported for the data streams with the provisioned capacity mode. Two shards are considered adjacent if the union of the hash key ranges for the two shards form a contiguous set with no gaps. For example, if you have two shards, one with a hash key range of 276...381 and the other with a hash key range of 382...454, then you could merge these two shards into a single shard that would have a hash key range of 276...454. After the merge, the single child shard receives data for all hash key values covered by the two parent shards.
module PutRecord : sig ... endWrites a single data record into an Amazon Kinesis data stream. Call PutRecord to send data into the stream for real-time ingestion and subsequent processing, one record at a time. Each shard can support writes up to 1,000 records per second, up to a maximum data write total of 1 MiB per second.
module PutRecords : sig ... endWrites multiple data records into a Kinesis data stream in a single call (also referred to as a PutRecords request). Use this operation to send data into the stream for data ingestion and processing.
module PutResourcePolicy : sig ... endAttaches a resource-based policy to a data stream or registered consumer. If you are using an identity other than the root user of the Amazon Web Services account that owns the resource, the calling identity must have the PutResourcePolicy permissions on the specified Kinesis Data Streams resource and belong to the owner's account in order to use this operation. If you don't have PutResourcePolicy permissions, Amazon Kinesis Data Streams returns a 403 Access Denied error. If you receive a ResourceNotFoundException, check to see if you passed a valid stream or consumer resource.
module RegisterStreamConsumer : sig ... endRegisters a consumer with a Kinesis data stream. When you use this operation, the consumer you register can then call SubscribeToShard to receive data from the stream using enhanced fan-out, at a rate of up to 2 MiB per second for every shard you subscribe to. This rate is unaffected by the total number of consumers that read from the same stream.
module RemoveTagsFromStream : sig ... endRemoves tags from the specified Kinesis data stream. Removed tags are deleted and cannot be recovered after this operation successfully completes.
module SplitShard : sig ... endSplits a shard into two new shards in the Kinesis data stream, to increase the stream's capacity to ingest and transport data. SplitShard is called when there is a need to increase the overall capacity of a stream because of an expected increase in the volume of data records being ingested. This API is only supported for the data streams with the provisioned capacity mode.
module StartStreamEncryption : sig ... endEnables or updates server-side encryption using an Amazon Web Services KMS key for a specified stream.
module StopStreamEncryption : sig ... endDisables server-side encryption for a specified stream.
module SubscribeToShard : sig ... endThis operation establishes an HTTP/2 connection between the consumer you specify in the ConsumerARN parameter and the shard you specify in the ShardId parameter. After the connection is successfully established, Kinesis Data Streams pushes records from the shard to the consumer over this connection. Before you call this operation, call RegisterStreamConsumer to register the consumer with Kinesis Data Streams.
module TagResource : sig ... endAdds or updates tags for the specified Kinesis resource. Each tag is a label consisting of a user-defined key and value. Tags can help you manage, identify, organize, search for, and filter resources. You can assign up to 50 tags to a Kinesis resource.
module UntagResource : sig ... endRemoves tags from the specified Kinesis resource. Removed tags are deleted and can't be recovered after this operation completes successfully.
module UpdateShardCount : sig ... endUpdates the shard count of the specified stream to the specified number of shards. This API is only supported for the data streams with the provisioned capacity mode.
module UpdateStreamMode : sig ... endUpdates the capacity mode of the data stream. Currently, in Kinesis Data Streams, you can choose between an on-demand capacity mode and a provisioned capacity mode for your data stream.
module Json_serializers : sig ... endmodule Json_deserializers : sig ... end