Module erlcloud_kinesis

Data Types

explicit_hash_key()

explicit_hash_key() = binary() | undefined

get_records_limit()

get_records_limit() = 1..10000

ordering()

ordering() = binary() | undefined

partition_key()

partition_key() = binary()

payload()

payload() = binary() | string()

put_records_item()

put_records_item() = {Data::string(), PartitionKey::string()} | {Data::string(), ExplicitHashKey::string(), PartitionKey::string()}

put_records_items()

put_records_items() = [put_records_item()]

Function Index

configure/2
configure/3
configure/4
create_stream/2 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_CreateStream.html
create_stream/3
delete_stream/1 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_DeleteStream.html
delete_stream/2
describe_stream/1 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeStream.html
describe_stream/2
describe_stream/3
describe_stream/4
get_records/1 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetRecords.html
get_records/2
get_records/3
get_records/4
get_shard_iterator/3 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html
get_shard_iterator/4
get_shard_iterator/5
list_streams/0 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListStreams.html
list_streams/1
list_streams/2
list_streams/3
merge_shards/3 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_MergeShards.html
merge_shards/4
new/2
new/3
put_record/3 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html
put_record/4
put_record/5
put_record/6
put_record/7
put_records/2
put_records/3
split_shards/3 Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_SplitShard.html
split_shards/4

Function Details

configure/2

configure(AccessKeyID::string(), SecretAccessKey::string()) -> ok

configure/3

configure(AccessKeyID::string(), SecretAccessKey::string(), Host::string()) -> ok

configure/4

configure(AccessKeyID::string(), SecretAccessKey::string(), Host::string(), Port::non_neg_integer()) -> ok

create_stream/2

create_stream(StreamName::string(), ShardCount::1..100000) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_CreateStream.html

Example

This operation adds a new Amazon Kinesis stream to your AWS account.

erlcloud_kinesis:create_stream(<<"test">>, 2). {ok,{incomplete,#Fun<jsx_decoder.1.688044>}}

create_stream/3

create_stream(StreamName::string(), ShardCount::1..100000, Config::aws_config()) -> proplist()

delete_stream/1

delete_stream(StreamName::string()) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_DeleteStream.html

Example

This operation deletes a stream and all of its shards and data.

erlcloud_kinesis:delete_stream(<<"test">>). {ok,{incomplete,#Fun<jsx_decoder.1.688044>}}

delete_stream/2

delete_stream(StreamName::string(), Config::aws_config()) -> proplist()

describe_stream/1

describe_stream(StreamName::string()) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_DescribeStream.html

Example

This operation returns the following information about the stream: the current status of the stream, the stream Amazon Resource Name (ARN), and an array of shard objects that comprise the stream.

erlcloud_kinesis:describe_stream(<<"staging">>). {ok,[{<<"StreamDescription">>, [{<<"HasMoreShards">>,false}, {<<"Shards">>, [[{<<"HashKeyRange">>, [{<<"EndingHashKey">>, <<"170141183460469231731687303715884105727">>}, {<<"StartingHashKey">>,<<"0">>}]}, {<<"SequenceNumberRange">>, [{<<"StartingSequenceNumber">>, <<"495372647485535624187345081927970814089871018992"...>>}]}, {<<"ShardId">>,<<"shardId-000000000000">>}], [{<<"HashKeyRange">>, [{<<"EndingHashKey">>, <<"340282366920938463463374607431768211455">>}, {<<"StartingHashKey">>, <<"170141183460469231731687303715884105728">>}]}, {<<"SequenceNumberRange">>, [{<<"StartingSequenceNumber">>, <<"49537264748575863163933038815938617127259750"...>>}]}, {<<"ShardId">>,<<"shardId-000000000001">>}]]}, {<<"StreamARN">>, <<"arn:aws:kinesis:us-east-1:821148768124:stream/staging">>}, {<<"StreamName">>,<<"staging">>}, {<<"StreamStatus">>,<<"ACTIVE">>}]}]}

describe_stream/2

describe_stream(StreamName::string(), Config::get_records_limit() | aws_config()) -> proplist()

describe_stream/3

describe_stream(StreamName::string(), Limit::get_records_limit(), Config::string() | aws_config()) -> proplist()

describe_stream/4

describe_stream(StreamName::string(), Limit::get_records_limit(), ExcludeShard::string(), Config::aws_config()) -> proplist()

get_records/1

get_records(ShardIterator::string()) -> {ok, [proplist()]} | {error, any()}

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetRecords.html

Example

This operation returns one or more data records from a shard. A GetRecords operation request can retrieve up to 10 MB of data.

{ok, [{_, A2}]} = erlcloud_kinesis:get_shard_terator(<<"test">>, <<"shardId-000000000000">>, <<"TRIM_HORIZON">>). {ok,[{<<"ShardIterator">>, <<"AAAAAAAAAAEuncwaAk+GTC2TIdmdg5w6dIuZ4Scu6vaMGPtaPUfopvw9cBm2NM3Rlj9WyI5JFJr2ahuSh3Z187AdW4Lug86E"...>>}]} erlcloud_kinesis:get_records(A2). {ok,[{<<"NextShardIterator">>, <<"AAAAAAAAAAEkuCmrC+QDW1gUywyu7G8GxvRyM6GSMkcHQ9wrvCJBW87mjn9C8YEckkipaoJySwgKXMmn1BwSPjnjiUCsu6pc"...>>}, {<<"Records">>, [[{<<"Data">>,<<"asdasd">>}, {<<"PartitionKey">>,<<"key">>}, {<<"SequenceNumber">>, <<"49537292605574028653758531131893428543501381406818304001">>}], [{<<"Data">>,<<"asdasd 213123123">>}, {<<"PartitionKey">>,<<"key">>}, {<<"SequenceNumber">>, <<"49537292605574028653758541428570459745183078607853977601">>}]]}]}

get_records/2

get_records(ShardIterator::string(), Config::get_records_limit() | aws_config()) -> {ok, [proplist()]} | {error, any()}

get_records/3

get_records(ShardIterator::binary(), Limit::get_records_limit(), Config::aws_config()) -> {ok, [proplist()]} | {error, any()}

get_records/4

get_records(ShardIterator::binary(), Limit::get_records_limit(), Options::proplist(), Config::aws_config()) -> {ok, [proplist()] | binary()} | {error, any()}

get_shard_iterator/3

get_shard_iterator(StreamName::string(), ShardId::string(), ShardIteratorType::string()) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_GetShardIterator.html

Example

This operation returns a shard iterator in ShardIterator. The shard iterator specifies the position in the shard from which you want to start reading data records sequentially.

erlcloud_kinesis:get_shard_iterator(<<"test">>, <<"shardId-000000000001">>, <<"TRIM_HORIZON">>). {ok,[{<<"ShardIterator">>, <<"AAAAAAAAAAFHJejL6/AjDShV3pIXsxYZT7Xj2G6EHxokHqT2D1stIOVYUEyprlUGWUepKqUDaR0+hB6qTlKvZa+fsBRqgHi4"...>>}]}

get_shard_iterator/4

get_shard_iterator(StreamName::string(), ShardId::string(), ShardIteratorType::string(), Config::string() | aws_config()) -> proplist()

get_shard_iterator/5

get_shard_iterator(StreamName::string(), ShardId::string(), ShardIteratorType::string(), StartingSequenceNumber::string(), Config::aws_config()) -> proplist()

list_streams/0

list_streams() -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_ListStreams.html

Example

This operation returns an array of the names of all the streams that are associated with the AWS account making the ListStreams request.

erlcloud_kinesis:list_streams(). {ok,[{<<"HasMoreStreams">>,false}, {<<"StreamNames">>,[<<"staging">>]}]}

list_streams/1

list_streams(Config::string() | aws_config()) -> proplist()

list_streams/2

list_streams(ExclusiveStartStreamName::string(), Config::1..100 | aws_config()) -> proplist()

list_streams/3

list_streams(ExclusiveStartStreamName::string(), Limit::1..100, Config::aws_config()) -> proplist()

merge_shards/3

merge_shards(StreamName::string(), AdjacentShardToMerge::string(), ShardToMerge::string()) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_MergeShards.html

Example

This operation merges two adjacent shards in a stream and combines them into a single shard to reduce the stream's capacity to ingest and transport data. Two shards are considered adjacent if the union of the hash key ranges for the two shards form a contiguous set with no gaps.

erlcloud_kinesis:merge_shards(<<"test">>, <<"shardId-000000000001">>, <<"shardId-000000000003">>). {ok,{incomplete,#Fun<jsx_decoder.1.688044>}}

merge_shards/4

merge_shards(StreamName::string(), AdjacentShardToMerge::string(), ShardToMerge::string(), Config::aws_config()) -> proplist()

new/2

new(AccessKeyID::string(), SecretAccessKey::string()) -> aws_config()

new/3

new(AccessKeyID::string(), SecretAccessKey::string(), Host::string()) -> aws_config()

put_record/3

put_record(StreamName::binary(), PartitionKey::partition_key(), Data::payload()) -> {ok, proplist()} | {error, any()}

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html

Example

This operation puts a data record into an Amazon Kinesis stream from a producer.

erlcloud_kinesis:put_record(<<"test">>, <<"key">>, <<"asdasd">>). {ok,[{<<"SequenceNumber">>, <<"49537292605574028653758531131893428543501381406818304001">>}, {<<"ShardId">>,<<"shardId-000000000000">>}]} erlcloud_kinesis:put_record(<<"test">>, <<"key">>, <<"asdasd 213123123">>). {ok,[{<<"SequenceNumber">>, <<"49537292605574028653758541428570459745183078607853977601">>}, {<<"ShardId">>,<<"shardId-000000000000">>}]}

put_record/4

put_record(StreamName::binary(), PartitionKey::partition_key(), Data::payload(), Config::explicit_hash_key() | aws_config()) -> {ok, proplist()} | {error, any()}

put_record/5

put_record(StreamName::binary(), PartitionKey::partition_key(), Data::payload(), ExplicitHashKey::explicit_hash_key(), Config::ordering() | aws_config()) -> {ok, proplist()} | {error, any()}

put_record/6

put_record(StreamName::binary(), PartitionKey::partition_key(), Data::payload(), ExplicitHashKey::explicit_hash_key(), Ordering::ordering(), Config::proplist() | aws_config()) -> {ok, proplist()} | {error, any()}

put_record/7

put_record(StreamName::binary(), PartitionKey::partition_key(), Data::payload(), ExplicitHashKey::explicit_hash_key(), Ordering::ordering(), Options::proplist(), Config::aws_config()) -> {ok, proplist()} | {error, any()}

put_records/2

put_records(StreamName::string(), Items::put_records_items()) -> proplist()

put_records/3

put_records(StreamName::string(), Items::put_records_items(), Config) -> proplist()

split_shards/3

split_shards(StreamName::string(), ShardToSplit::string(), NewStartingHashKey::string()) -> proplist()

Kinesis API: http://docs.aws.amazon.com/kinesis/latest/APIReference/API_SplitShard.html

Example

This operation splits a shard into two new shards in the stream, to increase the stream's capacity to ingest and transport data.

erlcloud_kinesis:split_shards(<<"test">>, <<"shardId-000000000000">>, <<"10">>). {ok,{incomplete,#Fun<jsx_decoder.1.688044>}}

split_shards/4

split_shards(StreamName::string(), ShardToSplit::string(), NewStartingHashKey::string(), Config::aws_config()) -> proplist()


Generated by EDoc, Dec 23 2016, 12:25:53.