In addition to Weibo, there is also WeChat
Please pay attention
WeChat public account
Shulou
2025-02-23 Update From: SLTechnology News&Howtos shulou NAV: SLTechnology News&Howtos > Internet Technology >
Share
Shulou(Shulou.com)06/01 Report--
This article introduces the knowledge of "how to use C language and java to achieve kafka avro producers and consumers". In the operation of actual cases, many people will encounter such a dilemma. Next, let the editor lead you to learn how to deal with these situations. I hope you can read it carefully and be able to achieve something!
Original data format
Request IP reply IP domain name type
3183375114 3729673322 "mx.hc.spdrb.com" A
The above data are the contents of the test file
Schema is defined as follows
{
"type": "record"
"name": "data"
"fields":
[
{"name": "qip", "type": "long"}
{"name": "aip", "type": "long"}
{"name": "domain", "type": "string"}
{"name": "type", "type": "string"}
]
}
The C language producer code is as follows
# include # include "avro.h" # include "producer.h" const char PERSON_SCHEMA [] = "{" type ":" record "," name ":" data "," fields ": [{" name ":" qip "," type ":" long "}, {" name ":" aip "," type ":" long "}, {" name ":" domain "," type ":" string "}, {" name ":" type "," type ":" string "}}" Const char * file = "avro_file.dat"; const char * brokers = "xxxxx:9092"; const char * topic = "topic1"; void print_avro_value (avro_value_t * value) {char * json; if (! avro_value_to_json (value, 1, & json)) {printf ("% s\ n", json); free (json) }} if (avro_schema_from_json (PERSON_SCHEMA, sizeof (PERSON_SCHEMA), & test_schema, & error) {fprintf (stderr, "schema error\ n"); exit (EXIT_FAILURE);} return test_schema;avro_schema_t init_schema () {avro_schema_t test_schema;avro_schema_ error_t error } void add_data (avro_writer_t writer, avro_schema_t schema, int64_t qip, uint64_t aip, const char* domain, const char* type) {avro_datum_t data = avro_record (schema); avro_datum_t dqip = avro_int64 (qip); avro_datum_t daip = avro_int64 (aip); avro_datum_t ddomain = avro_string (domain); avro_datum_t dtype = avro_string (type) Avro_record_set (data, qip, dqip); avro_record_set (data, "aip", daip); avro_record_set (data, "domain", ddomain); avro_record_set (data, "type", dtype); avro_write_data (writer, NULL, F2C); avro_datum_decref (dqip); avro_datum_decref (daip); avro_datum_decref (ddomain); avro_datum_decref (dtype) Avro_datum_decref (data);} int main (int argc, char* argv []) {int len = 0; avro_schema_t schema; avro_writer_t mem_writer; char buf [1024]; char tmp [4] [500] = {{0x00}}; FILE * fp = fopen ("test", "r"); if (! fp) {printf ("open test file error!\ n"); return-1 } schema = init_schema (); mem_writer = avro_writer_memory (buf, 1024); while (fgets (buf, 1024 buf)! = NULL) {if (BUF [strlen (buf)] ='\ n') BUF [strlen (buf)] ='\ 0mm; if (sscanf (buf, "% s%s%s%s", tmp [0], tmp [1], tmp [2], tmp [3])! = 4) continue Add_data (mem_writer,schema,atol (tmp [0]), atol (tmp [1]), tmp [2], tmp [3]); printf ("data len =% ld\ n", avro_writer_tell (mem_writer)); len = avro_writer_tell (mem_writer); kafka_putdata (buf, len,brokers,topic) / / the producer code implemented by librdkafka does not list memset (tmp, 0x00, sizeof (tmp)); memset (buf, 0x00, sizeof (buf)); avro_writer_reset (mem_writer);} fclose (fp); avro_writer_free (mem_writer); return 0;}
The consumers implemented in C language are as follows
# include "consumer.h" # include "avro.h" # include # include const char * brokers = "xxxx:9092"; const char * topic = "topic1"; const char * group = "avrotest" Const char PERSON_SCHEMA [] = "{" type ":" record "," name ":" data "," fields ": [{" name ":" qip "," type ":" long "}, {" name ":" aip "," type ":" long "}, {" name ":" domain "," type ":" string "}, {" name ":" type "," type ":" string "]}"; avro_schema_t init_schema () {avro_schema_t test_schema " Avro_schema_error_t error; if (avro_schema_from_json (PERSON_SCHEMA, sizeof (PERSON_SCHEMA), & test_schema, & error) {fprintf (stderr, "schema error\ n"); exit (EXIT_FAILURE);} return test_schema;} void print_data (avro_reader_t reader, avro_schema_t schema) {avro_datum_t data If (avro_read_data (reader, schema, schema, & data) = = 0) {int64_t qip; int64_t aip; char * domain; char * type; avro_datum_t qenddatum direction; avro_record_get (data, qip, & q_datum); avro_int64_get (q_datum, & qip) Avro_record_get (data, aip, & a_datum); avro_int64_get (a_datum, & aip); avro_record_get (data, "domain", & d_datum); avro_string_get (d_datum, & domain); avro_record_get (data, "type", & t_datum); avro_string_get (t_datum, & type) Printf ("qip:% lld, aip:% lld,domain:% s\ n", qip,aip,domain,type); avro_datum_decref (data);} int main (int argc, char* argv []) {rd_kafka_t * rk; rd_kafka_topic_partition_list_t * topics; if (initKafka (& rk, brokers, group, topic, & topics)
Welcome to subscribe "Shulou Technology Information " to get latest news, interesting things and hot topics in the IT industry, and controls the hottest and latest Internet news, technology news and IT industry trends.
Views: 0
*The comments in the above article only represent the author's personal views and do not represent the views and positions of this website. If you have more insights, please feel free to contribute and share.
Continue with the installation of the previous hadoop.First, install zookooper1. Decompress zookoope
"Every 5-10 years, there's a rare product, a really special, very unusual product that's the most un
© 2024 shulou.com SLNews company. All rights reserved.